14
14
namespace commands {
15
15
16
16
std::optional<std::string> SelectLocalModel (std::string host, int port,
17
- const std::string& model_handle) {
17
+ const std::string& model_handle,
18
+ DatabaseService& db_service) {
18
19
std::optional<std::string> model_id = model_handle;
19
- cortex::db::Models modellist_handler;
20
-
21
20
if (model_handle.empty ()) {
22
- auto all_local_models = modellist_handler .LoadModelList ();
21
+ auto all_local_models = db_service .LoadModelList ();
23
22
if (all_local_models.has_error () || all_local_models.value ().empty ()) {
24
23
CLI_LOG (" No local models available!" );
25
24
return std::nullopt ;
@@ -42,7 +41,7 @@ std::optional<std::string> SelectLocalModel(std::string host, int port,
42
41
CLI_LOG (" Selected: " << selection.value ());
43
42
}
44
43
} else {
45
- auto related_models_ids = modellist_handler .FindRelatedModel (model_handle);
44
+ auto related_models_ids = db_service .FindRelatedModel (model_handle);
46
45
if (related_models_ids.has_error () || related_models_ids.value ().empty ()) {
47
46
auto result = ModelPullCmd ().Exec (host, port, model_handle);
48
47
if (!result) {
@@ -69,19 +68,18 @@ std::optional<std::string> SelectLocalModel(std::string host, int port,
69
68
void RunCmd::Exec (bool run_detach,
70
69
const std::unordered_map<std::string, std::string>& options) {
71
70
std::optional<std::string> model_id =
72
- SelectLocalModel (host_, port_, model_handle_);
71
+ SelectLocalModel (host_, port_, model_handle_, *db_service_ );
73
72
if (!model_id.has_value ()) {
74
73
return ;
75
74
}
76
75
77
- cortex::db::Models modellist_handler;
78
76
config::YamlHandler yaml_handler;
79
77
auto address = host_ + " :" + std::to_string (port_);
80
78
81
79
try {
82
80
namespace fs = std::filesystem;
83
81
namespace fmu = file_manager_utils;
84
- auto model_entry = modellist_handler. GetModelInfo (*model_id);
82
+ auto model_entry = db_service_-> GetModelInfo (*model_id);
85
83
if (model_entry.has_error ()) {
86
84
CLI_LOG (" Error: " + model_entry.error ());
87
85
return ;
@@ -128,7 +126,7 @@ void RunCmd::Exec(bool run_detach,
128
126
mc.engine .find (kLlamaEngine ) == std::string::npos) ||
129
127
!commands::ModelStatusCmd ().IsLoaded (host_, port_, *model_id)) {
130
128
131
- auto res = commands::ModelStartCmd ()
129
+ auto res = commands::ModelStartCmd (db_service_ )
132
130
.Exec (host_, port_, *model_id, options,
133
131
false /* print_success_log*/ );
134
132
if (!res) {
@@ -144,7 +142,7 @@ void RunCmd::Exec(bool run_detach,
144
142
<< commands::GetCortexBinary () << " run " << *model_id
145
143
<< " ` for interactive chat shell" );
146
144
} else {
147
- ChatCompletionCmd ().Exec (host_, port_, *model_id, mc, " " );
145
+ ChatCompletionCmd (db_service_ ).Exec (host_, port_, *model_id, mc, " " );
148
146
}
149
147
}
150
148
} catch (const std::exception& e) {
0 commit comments