Thanks to visit codestin.com
Credit goes to github.com

Skip to content

Commit 3561ebf

Browse files
committed
Merge branch 'main' of https://github.com/abetlen/llama-cpp-python into main
2 parents 32efed7 + 3921e10 commit 3561ebf

File tree

3 files changed

+38
-8
lines changed

3 files changed

+38
-8
lines changed

llama_cpp/_internals.py

+6
Original file line numberDiff line numberDiff line change
@@ -51,6 +51,9 @@ def __init__(
5151
self.path_model.encode("utf-8"), self.params
5252
)
5353

54+
if self.model is None:
55+
raise ValueError(f"Failed to load model from file: {path_model}")
56+
5457
def __del__(self):
5558
if self.model is not None and self._llama_free_model is not None:
5659
self._llama_free_model(self.model)
@@ -258,6 +261,9 @@ def __init__(
258261
self.model.model, self.params
259262
)
260263

264+
if self.ctx is None:
265+
raise ValueError("Failed to create llama_context")
266+
261267
def __del__(self):
262268
if self.ctx is not None and self._llama_free is not None:
263269
self._llama_free(self.ctx)

llama_cpp/llama.py

+17-5
Original file line numberDiff line numberDiff line change
@@ -1885,8 +1885,9 @@ def from_pretrained(
18851885
cls,
18861886
repo_id: str,
18871887
filename: Optional[str],
1888-
local_dir: Optional[Union[str, os.PathLike[str]]] = ".",
1888+
local_dir: Optional[Union[str, os.PathLike[str]]] = None,
18891889
local_dir_use_symlinks: Union[bool, Literal["auto"]] = "auto",
1890+
cache_dir: Optional[Union[str, os.PathLike[str]]] = None,
18901891
**kwargs: Any,
18911892
) -> "Llama":
18921893
"""Create a Llama model from a pretrained model name or path.
@@ -1945,18 +1946,29 @@ def from_pretrained(
19451946
subfolder = str(Path(matching_file).parent)
19461947
filename = Path(matching_file).name
19471948

1948-
local_dir = "."
1949-
19501949
# download the file
19511950
hf_hub_download(
19521951
repo_id=repo_id,
1953-
local_dir=local_dir,
19541952
filename=filename,
19551953
subfolder=subfolder,
1954+
local_dir=local_dir,
19561955
local_dir_use_symlinks=local_dir_use_symlinks,
1956+
cache_dir=cache_dir,
19571957
)
19581958

1959-
model_path = os.path.join(local_dir, filename)
1959+
if local_dir is None:
1960+
model_path = hf_hub_download(
1961+
repo_id=repo_id,
1962+
filename=filename,
1963+
subfolder=subfolder,
1964+
local_dir=local_dir,
1965+
local_dir_use_symlinks=local_dir_use_symlinks,
1966+
cache_dir=cache_dir,
1967+
local_files_only=True,
1968+
1969+
)
1970+
else:
1971+
model_path = os.path.join(local_dir, filename)
19601972

19611973
return cls(
19621974
model_path=model_path,

llama_cpp/llama_grammar.py

+15-3
Original file line numberDiff line numberDiff line change
@@ -1498,9 +1498,21 @@ def visit(self, schema: Dict[str, Any], name: str) -> str:
14981498
item_rule_name = self.visit(
14991499
schema["items"], f'{name}{"-" if name else ""}item'
15001500
)
1501-
rule = (
1502-
f'"[" space ({item_rule_name} ("," space {item_rule_name})*)? "]" space'
1503-
)
1501+
list_item_operator = f'("," space {item_rule_name})'
1502+
successive_items = ""
1503+
min_items = schema.get("minItems", 0)
1504+
if min_items > 0:
1505+
first_item = f"({item_rule_name})"
1506+
successive_items = list_item_operator * (min_items - 1)
1507+
min_items -= 1
1508+
else:
1509+
first_item = f"({item_rule_name})?"
1510+
max_items = schema.get("maxItems")
1511+
if max_items is not None and max_items > min_items:
1512+
successive_items += (list_item_operator + "?") * (max_items - min_items - 1)
1513+
else:
1514+
successive_items += list_item_operator + "*"
1515+
rule = f'"[" space {first_item} {successive_items} "]" space'
15041516
return self._add_rule(rule_name, rule)
15051517

15061518
else:

0 commit comments

Comments
 (0)