@@ -214,6 +214,8 @@ class _LlamaModel:
214
214
NOTE: For stability it's recommended you use the Llama class instead."""
215
215
216
216
_llama_free_model = None
217
+ # NOTE: this must be "saved" here to avoid exceptions when calling __del__
218
+ suppress_stdout_stderr = suppress_stdout_stderr
217
219
218
220
def __init__ (
219
221
self ,
@@ -237,7 +239,7 @@ def __init__(
237
239
)
238
240
239
241
def __del__ (self ):
240
- with suppress_stdout_stderr (disable = self .verbose ):
242
+ with self . suppress_stdout_stderr (disable = self .verbose ):
241
243
if self .model is not None and self ._llama_free_model is not None :
242
244
self ._llama_free_model (self .model )
243
245
self .model = None
@@ -399,6 +401,8 @@ class _LlamaContext:
399
401
NOTE: For stability it's recommended you use the Llama class instead."""
400
402
401
403
_llama_free = None
404
+ # NOTE: this must be "saved" here to avoid exceptions when calling __del__
405
+ suppress_stdout_stderr = suppress_stdout_stderr
402
406
403
407
def __init__ (
404
408
self ,
@@ -419,7 +423,7 @@ def __init__(
419
423
)
420
424
421
425
def __del__ (self ):
422
- with suppress_stdout_stderr (disable = self .verbose ):
426
+ with self . suppress_stdout_stderr (disable = self .verbose ):
423
427
if self .ctx is not None and self ._llama_free is not None :
424
428
self ._llama_free (self .ctx )
425
429
self .ctx = None
@@ -650,6 +654,8 @@ def default_params():
650
654
651
655
class _LlamaBatch :
652
656
_llama_batch_free = None
657
+ # NOTE: this must be "saved" here to avoid exceptions when calling __del__
658
+ suppress_stdout_stderr = suppress_stdout_stderr
653
659
654
660
def __init__ (
655
661
self , * , n_tokens : int , embd : int , n_seq_max : int , verbose : bool = True
@@ -667,7 +673,7 @@ def __init__(
667
673
)
668
674
669
675
def __del__ (self ):
670
- with suppress_stdout_stderr (disable = self .verbose ):
676
+ with self . suppress_stdout_stderr (disable = self .verbose ):
671
677
if self .batch is not None and self ._llama_batch_free is not None :
672
678
self ._llama_batch_free (self .batch )
673
679
self .batch = None
0 commit comments