Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions src/BUILD
Original file line number Diff line number Diff line change
Expand Up @@ -643,6 +643,7 @@ ovms_cc_library(
"//src/dags:pipelinedefinition",
"//src/filesystem:libovmsfilesystem",
"//src/filesystem:libovmsfilesystemfactory",
"//src/graph_export:graph_export",
"//src/metrics:libovms_metric_provider",
"//src/metrics:libovmsmetrics",
"@com_github_tencent_rapidjson//:rapidjson",
Expand Down Expand Up @@ -931,6 +932,7 @@ ovms_cc_library(
"//src/kfserving_api:kfserving_api_cpp",
"capimodule",
"//src/pull_module:hf_pull_model_module",
"//src/graph_export:graph_export",
"//src/servables_config_manager_module:servablesconfigmanagermodule",
"predict_request_validation_utils", # to be removed when capi has its own lib and added there @atobisze
"kfs_backend_impl",
Expand Down
3 changes: 2 additions & 1 deletion src/capi_frontend/server_settings.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,7 @@ enum OvmsServerMode : int {
HF_PULL_AND_START_MODE,
LIST_MODELS_MODE,
MODIFY_CONFIG_MODE,
GENAI_CONFIGURE_AND_START,
UNKNOWN_MODE
};

Expand Down Expand Up @@ -171,7 +172,7 @@ struct HFSettingsImpl {
std::string downloadPath = "";
bool overwriteModels = false;
ModelDownlaodType downloadType = GIT_CLONE_DOWNLOAD;
GraphExportType task = TEXT_GENERATION_GRAPH;
GraphExportType task = UNKNOWN_GRAPH;
std::variant<TextGenGraphSettingsImpl, RerankGraphSettingsImpl, EmbeddingsGraphSettingsImpl, TextToSpeechGraphSettingsImpl, SpeechToTextGraphSettingsImpl, ImageGenerationGraphSettingsImpl> graphSettings;
};

Expand Down
40 changes: 31 additions & 9 deletions src/cli_parser.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -344,8 +344,8 @@ std::variant<bool, std::pair<int, std::string>> CLIParser::parse(int argc, char*

result = std::make_unique<cxxopts::ParseResult>(options->parse(argc, argv));

// HF pull mode or pull and start mode
if (isHFPullOrPullAndStart(this->result)) {
// HF pull mode or pull and start mode or starting from local folder with graph created in memory
if (isHFPullOrPullAndStart(this->result) || isGenAIConfigureAndStart(this->result)) {
Copy link
Copy Markdown
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

GenAI name but it has nothing to do with OpenVINO GenAI, I dont think its good naming

std::vector<std::string> unmatchedOptions;
GraphExportType task;
if (result->count("task")) {
Expand Down Expand Up @@ -692,13 +692,23 @@ void CLIParser::prepareModel(ModelsSettingsImpl& modelsSettings, HFSettingsImpl&
}

bool CLIParser::isHFPullOrPullAndStart(const std::unique_ptr<cxxopts::ParseResult>& result) {
return (result->count("pull") || result->count("task"));
return (result->count("pull") || (result->count("task") && result->count("source_model")));
}

bool CLIParser::isGenAIConfigureAndStart(const std::unique_ptr<cxxopts::ParseResult>& result) {
return (result->count("task") && !result->count("source_model") && !result->count("pull"));
}

void CLIParser::prepareGraph(ServerSettingsImpl& serverSettings, HFSettingsImpl& hfSettings, const std::string& modelName) {
// Always propagate source_model so validation can detect misuse
if (result->count("source_model")) {
hfSettings.sourceModel = result->operator[]("source_model").as<std::string>();
}
// Ovms Pull models mode || pull and start models mode
if (isHFPullOrPullAndStart(this->result)) {
if (result->count("pull")) {
if (isHFPullOrPullAndStart(this->result) || isGenAIConfigureAndStart(this->result)) {
if (isGenAIConfigureAndStart(this->result)) {
serverSettings.serverMode = GENAI_CONFIGURE_AND_START;
} else if (result->count("pull")) {
serverSettings.serverMode = HF_PULL_MODE;
} else {
serverSettings.serverMode = HF_PULL_AND_START_MODE;
Expand All @@ -711,8 +721,11 @@ void CLIParser::prepareGraph(ServerSettingsImpl& serverSettings, HFSettingsImpl&
hfSettings.overwriteModels = result->operator[]("overwrite_models").as<bool>();
}
if (result->count("source_model")) {
// Already set above, but keep the original flow for downloadType logic
hfSettings.sourceModel = result->operator[]("source_model").as<std::string>();
} else if (result->count("model_name")) {
} else if (result->count("model_name") && !result->count("model_path")) {
// Only use model_name as source_model when model_path is not set
// (when model_path is set, user wants to use local model without HF pull)
hfSettings.sourceModel = result->operator[]("model_name").as<std::string>();
}
if ((result->count("weight-format") || result->count("extra_quantization_params")) && isOptimumCliDownload(hfSettings.sourceModel, hfSettings.ggufFilename)) {
Expand All @@ -732,6 +745,11 @@ void CLIParser::prepareGraph(ServerSettingsImpl& serverSettings, HFSettingsImpl&
if (result->count("vocoder"))
hfSettings.exportSettings.vocoder = result->operator[]("vocoder").as<std::string>();
hfSettings.downloadPath = result->operator[]("model_repository_path").as<std::string>();
// When --task is used with --model_path but without --pull/--source_model,
// use model_path as the model location (no HF download needed)
if (!result->count("pull") && !result->count("source_model") && result->count("model_path")) {
hfSettings.exportSettings.modelPath = result->operator[]("model_path").as<std::string>();
}
if (result->count("task")) {
hfSettings.task = stringToEnum(result->operator[]("task").as<std::string>());
switch (hfSettings.task) {
Expand Down Expand Up @@ -798,7 +816,8 @@ void CLIParser::prepareGraph(ServerSettingsImpl& serverSettings, HFSettingsImpl&
if (!serverSettings.cacheDir.empty()) {
hfSettings.exportSettings.pluginConfig.cacheDir = serverSettings.cacheDir;
}
// No pull nor pull and start mode

// No pull nor pull and start mode and no start with local model_path
} else {
if (result->count("weight-format")) {
throw std::logic_error("--weight-format parameter unsupported for Openvino huggingface organization models.");
Expand Down Expand Up @@ -840,11 +859,14 @@ void CLIParser::prepareGraphStart(HFSettingsImpl& hfSettings, ModelsSettingsImpl
// Model settings
if (result->count("model_name")) {
modelsSettings.modelName = result->operator[]("model_name").as<std::string>();
} else {
} else if (!hfSettings.sourceModel.empty()) {
Copy link
Copy Markdown
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

why this change? what happens with modelsSettings.modelName on else path now?

modelsSettings.modelName = hfSettings.sourceModel;
}

modelsSettings.modelPath = FileSystem::joinPath({hfSettings.downloadPath, hfSettings.sourceModel});
// Only override modelPath if it wasn't already set via --model_path
if (!result->count("model_path")) {
modelsSettings.modelPath = FileSystem::joinPath({hfSettings.downloadPath, hfSettings.sourceModel});
}
}

void CLIParser::prepare(ServerSettingsImpl* serverSettings, ModelsSettingsImpl* modelsSettings) {
Expand Down
1 change: 1 addition & 0 deletions src/cli_parser.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@ class CLIParser {
void prepareGraphStart(HFSettingsImpl& hfSettings, ModelsSettingsImpl& modelsSettings);
void prepareConfigExport(ModelsSettingsImpl& modelsSettings);
bool isHFPullOrPullAndStart(const std::unique_ptr<cxxopts::ParseResult>& result);
bool isGenAIConfigureAndStart(const std::unique_ptr<cxxopts::ParseResult>& result);
};

} // namespace ovms
26 changes: 15 additions & 11 deletions src/config.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -148,18 +148,22 @@ bool Config::validateUserSettingsInConfigAddRemoveModel(const ModelsSettingsImpl
}

bool Config::validate() {
if (!this->serverSettings.hfSettings.sourceModel.empty() && this->serverSettings.hfSettings.task == UNKNOWN_GRAPH) {
std::cerr << "--source_model should be used combined with --task" << std::endl;
return false;
}
if (this->serverSettings.serverMode == HF_PULL_MODE || this->serverSettings.serverMode == HF_PULL_AND_START_MODE) {
if (!serverSettings.hfSettings.sourceModel.size()) {
std::cerr << "source_model parameter is required for pull mode";
return false;
}
if (!serverSettings.hfSettings.downloadPath.size()) {
std::cerr << "model_repository_path parameter is required for pull mode";
return false;
}
if (this->serverSettings.hfSettings.task == UNKNOWN_GRAPH) {
std::cerr << "Error: --task parameter not set." << std::endl;
return false;
// When --task is used with --model_path (no HF pulling), sourceModel and downloadPath are not required
bool taskWithModelPath = this->serverSettings.serverMode == HF_PULL_AND_START_MODE && !this->modelsSettings.modelPath.empty();
if (!taskWithModelPath) {
if (!serverSettings.hfSettings.sourceModel.size()) {
std::cerr << "source_model parameter is required for pull mode";
return false;
}
if (!serverSettings.hfSettings.downloadPath.size()) {
std::cerr << "model_repository_path parameter is required for pull mode";
return false;
}
}
if (this->serverSettings.hfSettings.task == TEXT_GENERATION_GRAPH) {
if (!std::holds_alternative<TextGenGraphSettingsImpl>(this->serverSettings.hfSettings.graphSettings)) {
Expand Down
64 changes: 45 additions & 19 deletions src/graph_export/graph_export.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,20 @@
#endif
namespace ovms {

static std::string s_inMemoryGraphContent;
Copy link
Copy Markdown
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why s_ prefix? That's inconsistent with camelCase convention we use.


bool GraphExport::hasInMemoryGraphContent() {
return !s_inMemoryGraphContent.empty();
}

const std::string& GraphExport::getInMemoryGraphContent() {
return s_inMemoryGraphContent;
}

void GraphExport::clearInMemoryGraphContent() {
s_inMemoryGraphContent.clear();
}

static const std::string OVMS_VERSION_GRAPH_LINE = std::string("# File created with: ") + PROJECT_NAME + std::string(" ") + PROJECT_VERSION + std::string("\n");

static std::string constructModelsPath(const std::string& modelPath, const std::optional<std::string>& ggufFilenameOpt) {
Expand Down Expand Up @@ -91,22 +105,26 @@ std::string GraphExport::getDraftModelDirectoryPath(const std::string& directory
} \
auto pluginConfigOpt = std::get<std::optional<std::string>>(pluginConfigOrStatus)

static Status createPbtxtFile(const std::string& directoryPath, const std::string& pbtxtContent) {
static Status createPbtxtFile(const std::string& directoryPath, const std::string& pbtxtContent, bool writeToFile) {
#if (MEDIAPIPE_DISABLE == 0)
::mediapipe::CalculatorGraphConfig config;
SPDLOG_TRACE("Generated pbtxt: {}", pbtxtContent);
SPDLOG_TRACE("Generated pbtxt\n: {}", pbtxtContent);
Copy link
Copy Markdown
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
SPDLOG_TRACE("Generated pbtxt\n: {}", pbtxtContent);
SPDLOG_TRACE("Generated pbtxt: \n{}", pbtxtContent);

bool success = ::google::protobuf::TextFormat::ParseFromString(pbtxtContent, &config);
if (!success) {
SPDLOG_ERROR("Created graph config file couldn't be parsed - check used task parameters values.");
return StatusCode::MEDIAPIPE_GRAPH_CONFIG_FILE_INVALID;
}
#endif
if (!writeToFile) {
s_inMemoryGraphContent = pbtxtContent;
return StatusCode::OK;
}
// clang-format on
std::string fullPath = FileSystem::joinPath({directoryPath, "graph.pbtxt"});
return FileSystem::createFileOverwrite(fullPath, pbtxtContent);
Comment on lines +118 to 124
Copy link

Copilot AI Apr 15, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

When writeToFile==false, s_inMemoryGraphContent is populated, but there is no corresponding clearing when writeToFile==true (or before generating a new graph). This means a previous in-memory graph can persist and later be picked up by GraphExport::hasInMemoryGraphContent(), causing unrelated runs/tests to load stale graph content instead of reading graph.pbtxt from disk. Clear the in-memory content whenever you generate/write a file-based graph (and/or at the start of each server startup) so the in-memory fallback cannot leak across runs.

Copilot uses AI. Check for mistakes.
Copy link
Copy Markdown
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Good point.

Copy link
Copy Markdown
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

}

static Status createTextGenerationGraphTemplate(const std::string& directoryPath, const HFSettingsImpl& hfSettings) {
static Status createTextGenerationGraphTemplate(const std::string& directoryPath, const HFSettingsImpl& hfSettings, bool writeToFile) {
if (!std::holds_alternative<TextGenGraphSettingsImpl>(hfSettings.graphSettings)) {
SPDLOG_ERROR("Graph options not initialized for text generation.");
return StatusCode::INTERNAL_ERROR;
Expand Down Expand Up @@ -198,10 +216,10 @@ static Status createTextGenerationGraphTemplate(const std::string& directoryPath
}
}
})";
return createPbtxtFile(directoryPath, oss.str());
return createPbtxtFile(directoryPath, oss.str(), writeToFile);
}

static Status createRerankGraphTemplate(const std::string& directoryPath, const HFSettingsImpl& hfSettings) {
static Status createRerankGraphTemplate(const std::string& directoryPath, const HFSettingsImpl& hfSettings, bool writeToFile) {
if (!std::holds_alternative<RerankGraphSettingsImpl>(hfSettings.graphSettings)) {
SPDLOG_ERROR("Graph options not initialized for reranking.");
return StatusCode::INTERNAL_ERROR;
Expand Down Expand Up @@ -242,10 +260,10 @@ node {
}
}
})";
return createPbtxtFile(directoryPath, oss.str());
return createPbtxtFile(directoryPath, oss.str(), writeToFile);
}

static Status createEmbeddingsGraphTemplate(const std::string& directoryPath, const HFSettingsImpl& hfSettings) {
static Status createEmbeddingsGraphTemplate(const std::string& directoryPath, const HFSettingsImpl& hfSettings, bool writeToFile) {
if (!std::holds_alternative<EmbeddingsGraphSettingsImpl>(hfSettings.graphSettings)) {
SPDLOG_ERROR("Graph options not initialized for embeddings.");
return StatusCode::INTERNAL_ERROR;
Expand Down Expand Up @@ -289,10 +307,10 @@ node {
oss << R"(}
}
})";
return createPbtxtFile(directoryPath, oss.str());
return createPbtxtFile(directoryPath, oss.str(), writeToFile);
}

static Status createTextToSpeechGraphTemplate(const std::string& directoryPath, const HFSettingsImpl& hfSettings) {
static Status createTextToSpeechGraphTemplate(const std::string& directoryPath, const HFSettingsImpl& hfSettings, bool writeToFile) {
if (!std::holds_alternative<TextToSpeechGraphSettingsImpl>(hfSettings.graphSettings)) {
SPDLOG_ERROR("Graph options not initialized for speech generation.");
return StatusCode::INTERNAL_ERROR;
Expand Down Expand Up @@ -339,11 +357,15 @@ node {
}
#endif
// clang-format on
if (!writeToFile) {
s_inMemoryGraphContent = oss.str();
return StatusCode::OK;
}
std::string fullPath = FileSystem::joinPath({directoryPath, "graph.pbtxt"});
return FileSystem::createFileOverwrite(fullPath, oss.str());
}

static Status createSpeechToTextGraphTemplate(const std::string& directoryPath, const HFSettingsImpl& hfSettings) {
static Status createSpeechToTextGraphTemplate(const std::string& directoryPath, const HFSettingsImpl& hfSettings, bool writeToFile) {
if (!std::holds_alternative<SpeechToTextGraphSettingsImpl>(hfSettings.graphSettings)) {
SPDLOG_ERROR("Graph options not initialized for speech to text.");
return StatusCode::INTERNAL_ERROR;
Expand Down Expand Up @@ -405,11 +427,15 @@ node {
}
#endif
// clang-format on
if (!writeToFile) {
s_inMemoryGraphContent = oss.str();
return StatusCode::OK;
}
std::string fullPath = FileSystem::joinPath({directoryPath, "graph.pbtxt"});
return FileSystem::createFileOverwrite(fullPath, oss.str());
}

static Status createImageGenerationGraphTemplate(const std::string& directoryPath, const HFSettingsImpl& hfSettings) {
static Status createImageGenerationGraphTemplate(const std::string& directoryPath, const HFSettingsImpl& hfSettings, bool writeToFile) {
if (!std::holds_alternative<ImageGenerationGraphSettingsImpl>(hfSettings.graphSettings)) {
SPDLOG_ERROR("Graph options not initialized for image generation.");
return StatusCode::INTERNAL_ERROR;
Expand Down Expand Up @@ -489,13 +515,13 @@ node: {
}
)";
// clang-format on
return createPbtxtFile(directoryPath, oss.str());
return createPbtxtFile(directoryPath, oss.str(), writeToFile);
}

GraphExport::GraphExport() {
}

Status GraphExport::createServableConfig(const std::string& directoryPath, const HFSettingsImpl& hfSettings) {
Status GraphExport::createServableConfig(const std::string& directoryPath, const HFSettingsImpl& hfSettings, bool writeToFile) {
if (directoryPath.empty()) {
SPDLOG_ERROR("Directory path empty: {}", directoryPath);
return StatusCode::PATH_INVALID;
Expand All @@ -518,17 +544,17 @@ Status GraphExport::createServableConfig(const std::string& directoryPath, const
}
}
if (hfSettings.task == TEXT_GENERATION_GRAPH) {
return createTextGenerationGraphTemplate(directoryPath, hfSettings);
return createTextGenerationGraphTemplate(directoryPath, hfSettings, writeToFile);
} else if (hfSettings.task == EMBEDDINGS_GRAPH) {
return createEmbeddingsGraphTemplate(directoryPath, hfSettings);
return createEmbeddingsGraphTemplate(directoryPath, hfSettings, writeToFile);
} else if (hfSettings.task == RERANK_GRAPH) {
return createRerankGraphTemplate(directoryPath, hfSettings);
return createRerankGraphTemplate(directoryPath, hfSettings, writeToFile);
} else if (hfSettings.task == IMAGE_GENERATION_GRAPH) {
return createImageGenerationGraphTemplate(directoryPath, hfSettings);
return createImageGenerationGraphTemplate(directoryPath, hfSettings, writeToFile);
} else if (hfSettings.task == TEXT_TO_SPEECH_GRAPH) {
return createTextToSpeechGraphTemplate(directoryPath, hfSettings);
return createTextToSpeechGraphTemplate(directoryPath, hfSettings, writeToFile);
} else if (hfSettings.task == SPEECH_TO_TEXT_GRAPH) {
return createSpeechToTextGraphTemplate(directoryPath, hfSettings);
return createSpeechToTextGraphTemplate(directoryPath, hfSettings, writeToFile);
} else if (hfSettings.task == UNKNOWN_GRAPH) {
SPDLOG_ERROR("Graph options not initialized.");
return StatusCode::INTERNAL_ERROR;
Expand Down
6 changes: 5 additions & 1 deletion src/graph_export/graph_export.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -27,9 +27,13 @@ class Status;
class GraphExport {
public:
GraphExport();
Status createServableConfig(const std::string& directoryPath, const HFSettingsImpl& graphSettings);
Status createServableConfig(const std::string& directoryPath, const HFSettingsImpl& graphSettings, bool writeToFile = true);
static std::variant<std::optional<std::string>, Status> createPluginString(const ExportSettings& exportSettings);
static std::string getDraftModelDirectoryName(std::string draftModel);
static std::string getDraftModelDirectoryPath(const std::string& directoryPath, const std::string& draftModel);

static bool hasInMemoryGraphContent();
static const std::string& getInMemoryGraphContent();
static void clearInMemoryGraphContent();
};
} // namespace ovms
1 change: 1 addition & 0 deletions src/mediapipe_internal/BUILD
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,7 @@ ovms_cc_library(
"//src:libovms_servable_name_checker",
"//src/metrics:libovms_metric_provider",
"//src/filesystem:libovmsfilesystem",
"//src/graph_export:graph_export",
"//src:libovms_version",
"//src:libovms_execution_context",
"//src:libovmstimer",
Expand Down
5 changes: 5 additions & 0 deletions src/mediapipe_internal/mediapipegraphconfig.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
#include <spdlog/spdlog.h>

#include "src/filesystem/filesystem.hpp"
#include "src/graph_export/graph_export.hpp"
#include "../status.hpp"

namespace ovms {
Expand Down Expand Up @@ -129,6 +130,10 @@ Status MediapipeGraphConfig::parseNode(const rapidjson::Value& v) {
}

void MediapipeGraphConfig::logGraphConfigContent() const {
if (GraphExport::hasInMemoryGraphContent()) {
SPDLOG_DEBUG("Content of in-memory graph config:\n{}", GraphExport::getInMemoryGraphContent());
return;
}
std::ifstream fileStream(this->graphPath);
if (!fileStream.is_open()) {
SPDLOG_ERROR("Failed to open file: {}", this->graphPath);
Expand Down
8 changes: 8 additions & 0 deletions src/mediapipe_internal/mediapipegraphdefinition.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@

#include "../execution_context.hpp"
#include "src/filesystem/filesystem.hpp"
#include "src/graph_export/graph_export.hpp"
#include "src/metrics/metric.hpp"
#include "../model_metric_reporter.hpp"
#include "../ov_utils.hpp"
Expand Down Expand Up @@ -60,6 +61,13 @@ const tensor_map_t MediapipeGraphDefinition::getOutputsInfo() const {
}

Status MediapipeGraphDefinition::validateForConfigFileExistence() {
if (GraphExport::hasInMemoryGraphContent()) {
const std::string& content = GraphExport::getInMemoryGraphContent();
Copy link
Copy Markdown
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is it global? Not tied to any specific servable object?

Copy link
Copy Markdown
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

yes, assumption is that only one graph can be stored in memory. using multiple models requires creating graph.pbtxt in model folder via --pull command.

this->chosenConfig = content;
this->mgconfig.setCurrentGraphPbTxtMD5(ovms::FileSystem::getStringMD5(content));
SPDLOG_LOGGER_DEBUG(modelmanager_logger, "Using in-memory graph content for mediapipe graph definition: {}", this->getName());
return StatusCode::OK;
}
Comment on lines 63 to +70
Copy link

Copilot AI Apr 15, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

validateForConfigFileExistence() unconditionally prefers GraphExport in-memory content whenever it is set. Since that content is global and not scoped to a particular graph/model, this can cause the wrong graph to be loaded (e.g., if a previous startup/test left in-memory content set) and will also bypass filesystem-based reload semantics. Consider scoping the in-memory graph to the specific startup flow (or at least clearing it after it has been consumed) so normal file-based graphs aren’t shadowed.

Copilot uses AI. Check for mistakes.
Copy link
Copy Markdown
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Good point.

std::ifstream ifs(this->mgconfig.getGraphPath());
if (!ifs.is_open()) {
SPDLOG_LOGGER_ERROR(modelmanager_logger, "Failed to open mediapipe graph definition: {}, file: {}\n", this->getName(), this->mgconfig.getGraphPath());
Expand Down
Loading