Thanks to visit codestin.com
Credit goes to github.com

Skip to content

Commit 00342b1

Browse files
authored
Merge pull request #17 from db-agent/gemini
Added ollama changes and supported model
2 parents 5eaad15 + f86ab9b commit 00342b1

File tree

2 files changed

+17
-0
lines changed

2 files changed

+17
-0
lines changed

helpers/supported_models.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@
88
"defog/sqlcoder-70b-alpha",
99
"meta-llama/Llama-3.3-70B-Instruct"],
1010
"ollama": ["llama3.2:1b",
11+
"llama3.2:latest",
1112
"llama3.3",
1213
"hf.co/defog/sqlcoder-7b-2"],
1314
"vllm": ["microsoft/Phi-3.5-mini-instruct",

textgen/ollama.py

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,8 @@ def construct_sql_payload(self, user_question, db_schema):
1313
prompt = (
1414
"<|begin_of_text|><|start_header_id|>user<|end_header_id|>\n\n"
1515
f"Generate a SQL query only to answer this question without explanation: `{user_question}`\n"
16+
"considering values like true,TRUE,yes,Yes,YES in a case-insensitive manner.\n"
17+
"considering values like false,FALSE,no,No,NO in a case-insensitive manner.\n"
1618
"DDL statements:\n"
1719
f"{db_schema}<|start_header_id|>assistant<|end_header_id|>\n\n"
1820
)
@@ -22,7 +24,21 @@ def construct_sql_payload(self, user_question, db_schema):
2224
"stream": False,
2325
"temperature": 0
2426
}
27+
def construct_generic_payload(self, user_question):
28+
prompt = user_question
29+
return {
30+
"model": self.model_name,
31+
"messages": [{"role": "user", "content": prompt}],
32+
"temperature": 0.7,
33+
"max_new_tokens": 4096,
34+
"max_length": 4096,
35+
"top_p": 0.9,
36+
"repetition_penalty": 1.1,
37+
"stream": False,
38+
"max_tokens": 1024
2539

40+
}
41+
2642
def parse_response(self, response):
2743
logger.info(f"Parsing response for Ollama {response}")
2844
return response.get('message', {}).get('content', '').strip()

0 commit comments

Comments
 (0)