diff --git a/google/generativeai/client.py b/google/generativeai/client.py index 938018b96..d2eb6b1c9 100644 --- a/google/generativeai/client.py +++ b/google/generativeai/client.py @@ -8,6 +8,7 @@ from typing import Any, cast from collections.abc import Sequence import httplib2 +from io import IOBase import google.ai.generativelanguage as glm import google.generativeai.protos as protos @@ -88,7 +89,7 @@ def _setup_discovery_api(self, metadata: dict | Sequence[tuple[str, str]] = ()): def create_file( self, - path: str | pathlib.Path | os.PathLike, + path: str | pathlib.Path | os.PathLike | IOBase, *, mime_type: str | None = None, name: str | None = None, @@ -105,9 +106,15 @@ def create_file( if display_name is not None: file["displayName"] = display_name - media = googleapiclient.http.MediaFileUpload( - filename=path, mimetype=mime_type, resumable=resumable - ) + if isinstance(path, IOBase): + media = googleapiclient.http.MediaIoBaseUpload( + fd=path, mimetype=mime_type, resumable=resumable + ) + else: + media = googleapiclient.http.MediaFileUpload( + filename=path, mimetype=mime_type, resumable=resumable + ) + request = self._discovery_api.media().upload(body={"file": file}, media_body=media) for key, value in metadata: request.headers[key] = value diff --git a/google/generativeai/files.py b/google/generativeai/files.py index c0d8e1e0a..b2581bdcd 100644 --- a/google/generativeai/files.py +++ b/google/generativeai/files.py @@ -21,6 +21,7 @@ import logging from google.generativeai import protos from itertools import islice +from io import IOBase from google.generativeai.types import file_types @@ -32,7 +33,7 @@ def upload_file( - path: str | pathlib.Path | os.PathLike, + path: str | pathlib.Path | os.PathLike | IOBase, *, mime_type: str | None = None, name: str | None = None, @@ -42,7 +43,7 @@ def upload_file( """Calls the API to upload a file using a supported file service. Args: - path: The path to the file to be uploaded. + path: The path to the file or a file-like object (e.g., BytesIO) to be uploaded. mime_type: The MIME type of the file. If not provided, it will be inferred from the file extension. name: The name of the file in the destination (e.g., 'files/sample-image'). @@ -57,17 +58,30 @@ def upload_file( """ client = get_default_file_client() - path = pathlib.Path(os.fspath(path)) + if isinstance(path, IOBase): + if mime_type is None: + raise ValueError( + "Unknown mime type: When passing a file like object to `path` (instead of a\n" + " path-like object) you must set the `mime_type` argument" + ) + else: + path = pathlib.Path(os.fspath(path)) - if mime_type is None: - mime_type, _ = mimetypes.guess_type(path) + if display_name is None: + display_name = path.name + + if mime_type is None: + mime_type, _ = mimetypes.guess_type(path) + + if mime_type is None: + raise ValueError( + "Unknown mime type: Could not determine the mimetype for your file\n" + " please set the `mime_type` argument" + ) if name is not None and "/" not in name: name = f"files/{name}" - if display_name is None: - display_name = path.name - response = client.create_file( path=path, mime_type=mime_type, name=name, display_name=display_name, resumable=resumable ) diff --git a/google/generativeai/generative_models.py b/google/generativeai/generative_models.py index 50b15261a..134430b2e 100644 --- a/google/generativeai/generative_models.py +++ b/google/generativeai/generative_models.py @@ -72,7 +72,7 @@ class GenerativeModel: def __init__( self, - model_name: str = "gemini-pro", + model_name: str = "gemini-1.5-flash-002", safety_settings: safety_types.SafetySettingOptions | None = None, generation_config: generation_types.GenerationConfigType | None = None, tools: content_types.FunctionLibraryType | None = None, diff --git a/google/generativeai/types/content_types.py b/google/generativeai/types/content_types.py index b925967c8..f3db610e1 100644 --- a/google/generativeai/types/content_types.py +++ b/google/generativeai/types/content_types.py @@ -19,6 +19,7 @@ import io import inspect import mimetypes +import pathlib import typing from typing import Any, Callable, Union from typing_extensions import TypedDict @@ -30,7 +31,7 @@ if typing.TYPE_CHECKING: import PIL.Image - import PIL.PngImagePlugin + import PIL.ImageFile import IPython.display IMAGE_TYPES = (PIL.Image.Image, IPython.display.Image) @@ -38,7 +39,7 @@ IMAGE_TYPES = () try: import PIL.Image - import PIL.PngImagePlugin + import PIL.ImageFile IMAGE_TYPES = IMAGE_TYPES + (PIL.Image.Image,) except ImportError: @@ -71,47 +72,61 @@ "FunctionLibraryType", ] +Mode = protos.DynamicRetrievalConfig.Mode -def pil_to_blob(img): - # When you load an image with PIL you get a subclass of PIL.Image - # The subclass knows what file type it was loaded from it has a `.format` class attribute - # and the `get_format_mimetype` method. Convert these back to the same file type. - # - # The base image class doesn't know its file type, it just knows its mode. - # RGBA converts to PNG easily, P[allet] converts to GIF, RGB to GIF. - # But for anything else I'm not going to bother mapping it out (for now) let's just convert to RGB and send it. - # - # References: - # - file formats: https://pillow.readthedocs.io/en/stable/handbook/image-file-formats.html - # - image modes: https://pillow.readthedocs.io/en/stable/handbook/concepts.html#modes - - bytesio = io.BytesIO() - - get_mime = getattr(img, "get_format_mimetype", None) - if get_mime is not None: - # If the image is created from a file, convert back to the same file type. - img.save(bytesio, format=img.format) - mime_type = img.get_format_mimetype() - elif img.mode == "RGBA": - img.save(bytesio, format="PNG") - mime_type = "image/png" - elif img.mode == "P": - img.save(bytesio, format="GIF") - mime_type = "image/gif" - else: - if img.mode != "RGB": - img = img.convert("RGB") - img.save(bytesio, format="JPEG") - mime_type = "image/jpeg" - bytesio.seek(0) - data = bytesio.read() - return protos.Blob(mime_type=mime_type, data=data) +ModeOptions = Union[int, str, Mode] + +_MODE: dict[ModeOptions, Mode] = { + Mode.MODE_UNSPECIFIED: Mode.MODE_UNSPECIFIED, + 0: Mode.MODE_UNSPECIFIED, + "mode_unspecified": Mode.MODE_UNSPECIFIED, + "unspecified": Mode.MODE_UNSPECIFIED, + Mode.MODE_DYNAMIC: Mode.MODE_DYNAMIC, + 1: Mode.MODE_DYNAMIC, + "mode_dynamic": Mode.MODE_DYNAMIC, + "dynamic": Mode.MODE_DYNAMIC, +} + + +def to_mode(x: ModeOptions) -> Mode: + if isinstance(x, str): + x = x.lower() + return _MODE[x] + + +def _pil_to_blob(image: PIL.Image.Image) -> protos.Blob: + # If the image is a local file, return a file-based blob without any modification. + # Otherwise, return a lossless WebP blob (same quality with optimized size). + def file_blob(image: PIL.Image.Image) -> protos.Blob | None: + if not isinstance(image, PIL.ImageFile.ImageFile) or image.filename is None: + return None + filename = str(image.filename) + if not pathlib.Path(filename).is_file(): + return None + + mime_type = image.get_format_mimetype() + image_bytes = pathlib.Path(filename).read_bytes() + + return protos.Blob(mime_type=mime_type, data=image_bytes) + + def webp_blob(image: PIL.Image.Image) -> protos.Blob: + # Reference: https://pillow.readthedocs.io/en/stable/handbook/image-file-formats.html#webp + image_io = io.BytesIO() + image.save(image_io, format="webp", lossless=True) + image_io.seek(0) + + mime_type = "image/webp" + image_bytes = image_io.read() + + return protos.Blob(mime_type=mime_type, data=image_bytes) + + return file_blob(image) or webp_blob(image) def image_to_blob(image) -> protos.Blob: if PIL is not None: if isinstance(image, PIL.Image.Image): - return pil_to_blob(image) + return _pil_to_blob(image) if IPython is not None: if isinstance(image, IPython.display.Image): @@ -650,16 +665,54 @@ def _encode_fd(fd: FunctionDeclaration | protos.FunctionDeclaration) -> protos.F return fd.to_proto() +class DynamicRetrievalConfigDict(TypedDict): + mode: protos.DynamicRetrievalConfig.mode + dynamic_threshold: float + + +DynamicRetrievalConfig = Union[protos.DynamicRetrievalConfig, DynamicRetrievalConfigDict] + + +class GoogleSearchRetrievalDict(TypedDict): + dynamic_retrieval_config: DynamicRetrievalConfig + + +GoogleSearchRetrievalType = Union[protos.GoogleSearchRetrieval, GoogleSearchRetrievalDict] + + +def _make_google_search_retrieval(gsr: GoogleSearchRetrievalType): + if isinstance(gsr, protos.GoogleSearchRetrieval): + return gsr + elif isinstance(gsr, Mapping): + drc = gsr.get("dynamic_retrieval_config", None) + if drc is not None and isinstance(drc, Mapping): + mode = drc.get("mode", None) + if mode is not None: + mode = to_mode(mode) + gsr = gsr.copy() + gsr["dynamic_retrieval_config"]["mode"] = mode + return protos.GoogleSearchRetrieval(gsr) + else: + raise TypeError( + "Invalid input type. Expected an instance of `genai.GoogleSearchRetrieval`.\n" + f"However, received an object of type: {type(gsr)}.\n" + f"Object Value: {gsr}" + ) + + class Tool: - """A wrapper for `protos.Tool`, Contains a collection of related `FunctionDeclaration` objects.""" + """A wrapper for `protos.Tool`, Contains a collection of related `FunctionDeclaration` objects, + protos.CodeExecution object, and protos.GoogleSearchRetrieval object.""" def __init__( self, + *, function_declarations: Iterable[FunctionDeclarationType] | None = None, + google_search_retrieval: GoogleSearchRetrievalType | None = None, code_execution: protos.CodeExecution | None = None, ): # The main path doesn't use this but is seems useful. - if function_declarations: + if function_declarations is not None: self._function_declarations = [ _make_function_declaration(f) for f in function_declarations ] @@ -674,8 +727,14 @@ def __init__( self._function_declarations = [] self._index = {} + if google_search_retrieval is not None: + self._google_search_retrieval = _make_google_search_retrieval(google_search_retrieval) + else: + self._google_search_retrieval = None + self._proto = protos.Tool( function_declarations=[_encode_fd(fd) for fd in self._function_declarations], + google_search_retrieval=google_search_retrieval, code_execution=code_execution, ) @@ -683,6 +742,10 @@ def __init__( def function_declarations(self) -> list[FunctionDeclaration | protos.FunctionDeclaration]: return self._function_declarations + @property + def google_search_retrieval(self) -> protos.GoogleSearchRetrieval: + return self._google_search_retrieval + @property def code_execution(self) -> protos.CodeExecution: return self._proto.code_execution @@ -711,7 +774,7 @@ class ToolDict(TypedDict): ToolType = Union[ - Tool, protos.Tool, ToolDict, Iterable[FunctionDeclarationType], FunctionDeclarationType + str, Tool, protos.Tool, ToolDict, Iterable[FunctionDeclarationType], FunctionDeclarationType ] @@ -723,9 +786,23 @@ def _make_tool(tool: ToolType) -> Tool: code_execution = tool.code_execution else: code_execution = None - return Tool(function_declarations=tool.function_declarations, code_execution=code_execution) + + if "google_search_retrieval" in tool: + google_search_retrieval = tool.google_search_retrieval + else: + google_search_retrieval = None + + return Tool( + function_declarations=tool.function_declarations, + google_search_retrieval=google_search_retrieval, + code_execution=code_execution, + ) elif isinstance(tool, dict): - if "function_declarations" in tool or "code_execution" in tool: + if ( + "function_declarations" in tool + or "google_search_retrieval" in tool + or "code_execution" in tool + ): return Tool(**tool) else: fd = tool @@ -733,10 +810,17 @@ def _make_tool(tool: ToolType) -> Tool: elif isinstance(tool, str): if tool.lower() == "code_execution": return Tool(code_execution=protos.CodeExecution()) + # Check to see if one of the mode enums matches + elif tool.lower() == "google_search_retrieval": + return Tool(google_search_retrieval=protos.GoogleSearchRetrieval()) else: - raise ValueError("The only string that can be passed as a tool is 'code_execution'.") + raise ValueError( + "The only string that can be passed as a tool is 'code_execution', or one of the specified values for the `mode` parameter for google_search_retrieval." + ) elif isinstance(tool, protos.CodeExecution): return Tool(code_execution=tool) + elif isinstance(tool, protos.GoogleSearchRetrieval): + return Tool(google_search_retrieval=tool) elif isinstance(tool, Iterable): return Tool(function_declarations=tool) else: @@ -792,7 +876,7 @@ def to_proto(self): def _make_tools(tools: ToolsType) -> list[Tool]: if isinstance(tools, str): - if tools.lower() == "code_execution": + if tools.lower() == "code_execution" or tools.lower() == "google_search_retrieval": return [_make_tool(tools)] else: raise ValueError("The only string that can be passed as a tool is 'code_execution'.") diff --git a/google/generativeai/types/generation_types.py b/google/generativeai/types/generation_types.py index 8bd0a7736..8602d69a4 100644 --- a/google/generativeai/types/generation_types.py +++ b/google/generativeai/types/generation_types.py @@ -473,8 +473,6 @@ def text(self): "`Part`, but none were returned. The candidate's " f"[finish_reason](https://ai.google.dev/api/generate-content#finishreason) is {fr}." ) - if candidate.finish_message: - msg += 'The `finish_message` is "{candidate.finish_message}".' if fr is FinishReason.FINISH_REASON_UNSPECIFIED: raise ValueError(msg) diff --git a/google/generativeai/version.py b/google/generativeai/version.py index f6bf29d13..c612b5034 100644 --- a/google/generativeai/version.py +++ b/google/generativeai/version.py @@ -14,4 +14,4 @@ # limitations under the License. from __future__ import annotations -__version__ = "0.8.2" +__version__ = "0.8.3" diff --git a/samples/files.py b/samples/files.py index cbed68a1e..8f98365aa 100644 --- a/samples/files.py +++ b/samples/files.py @@ -83,6 +83,18 @@ def test_files_create_pdf(self): print(response.text) # [END files_create_pdf] + def test_files_create_from_IO(self): + # [START files_create_io] + # You can pass a file-like object, instead of a path. + # Useful for streaming. + model = genai.GenerativeModel("gemini-1.5-flash") + fpath = media / "test.pdf" + with open(fpath, "rb") as f: + sample_pdf = genai.upload_file(f, mime_type="application/pdf") + response = model.generate_content(["Give me a summary of this pdf file.", sample_pdf]) + print(response.text) + # [END files_create_io] + def test_files_list(self): # [START files_list] print("My files:") diff --git a/samples/rest/tuned_models.sh b/samples/rest/tuned_models.sh index 1e105377e..9b652febd 100644 --- a/samples/rest/tuned_models.sh +++ b/samples/rest/tuned_models.sh @@ -1,14 +1,9 @@ set -eu -access_token=$(gcloud auth application-default print-access-token) - - echo "[START tuned_models_create]" # [START tuned_models_create] -curl -X POST https://generativelanguage.googleapis.com/v1beta/tunedModels \ +curl -X POST "https://generativelanguage.googleapis.com/v1beta/tunedModels?key=$GOOGLE_API_KEY" \ -H 'Content-Type: application/json' \ - -H "Authorization: Bearer ${access_token}" \ - -H "x-goog-user-project: ${project_id}" \ -d ' { "display_name": "number generator model", @@ -82,10 +77,9 @@ tuning_done=false while [[ "$tuning_done" != "true" ]]; do sleep 5 - curl -X GET https://generativelanguage.googleapis.com/v1/${operation} \ + curl -X GET "https://generativelanguage.googleapis.com/v1/${operation}?key=$GOOGLE_API_KEY" \ -H 'Content-Type: application/json' \ - -H "Authorization: Bearer ${access_token}" \ - -H "x-goog-user-project: ${project_id}" 2> /dev/null > tuning_operation.json + 2> /dev/null > tuning_operation.json complete=$(jq .metadata.completedPercent < tuning_operation.json) tput cuu1 @@ -96,10 +90,8 @@ done # Or get the TunedModel and check it's state. The model is ready to use if the state is active. modelname=$(cat tunemodel.json | jq ".metadata.tunedModel" | tr -d '"') -curl -X GET https://generativelanguage.googleapis.com/v1beta/${modelname} \ - -H 'Content-Type: application/json' \ - -H "Authorization: Bearer ${access_token}" \ - -H "x-goog-user-project: ${project_id}" > tuned_model.json +curl -X GET https://generativelanguage.googleapis.com/v1beta/${modelname}?key=$GOOGLE_API_KEY \ + -H 'Content-Type: application/json' > tuned_model.json cat tuned_model.json | jq ".state" # [END tuned_models_create] @@ -107,10 +99,8 @@ cat tuned_model.json | jq ".state" echo "[START tuned_models_generate_content]" # [START tuned_models_generate_content] -curl -X POST https://generativelanguage.googleapis.com/v1beta/$modelname:generateContent \ +curl -X POST https://generativelanguage.googleapis.com/v1beta/$modelname:generateContent?key=$GOOGLE_API_KEY \ -H 'Content-Type: application/json' \ - -H "Authorization: Bearer ${access_token}" \ - -H "x-goog-user-project: ${project_id}" \ -d '{ "contents": [{ "parts": [{ @@ -122,10 +112,8 @@ curl -X POST https://generativelanguage.googleapis.com/v1beta/$modelname:generat echo "[START tuned_models_get]" # [START tuned_models_get] -curl -X GET https://generativelanguage.googleapis.com/v1beta/${modelname} \ - -H 'Content-Type: application/json' \ - -H "Authorization: Bearer ${access_token}" \ - -H "x-goog-user-project: ${project_id}" | grep state +curl -X GET https://generativelanguage.googleapis.com/v1beta/${modelname}?key=$GOOGLE_API_KEY \ + -H 'Content-Type: application/json' | grep state # [END tuned_models_get] echo "[START tuned_models_list]" @@ -142,18 +130,14 @@ jq .tunedModels[].name < tuned_models.json page_token=$(jq .nextPageToken < tuned_models.json | tr -d '"') if [[ "$page_token" != "null"" ]]; then -curl -X GET https://generativelanguage.googleapis.com/v1beta/tunedModels?page_size=5\&page_token=${page_token} \ - -H "Content-Type: application/json" \ - -H "Authorization: Bearer ${access_token}" \ - -H "x-goog-user-project: ${project_id}" > tuned_models2.json +curl -X GET https://generativelanguage.googleapis.com/v1beta/tunedModels?page_size=5\&page_token=${page_token}?key=$GOOGLE_API_KEY \ + -H "Content-Type: application/json" > tuned_models2.json jq .tunedModels[].name < tuned_models.json fi # [END tuned_models_list] echo "[START tuned_models_delete]" # [START tuned_models_delete] -curl -X DELETE https://generativelanguage.googleapis.com/v1beta/${modelname} \ - -H 'Content-Type: application/json' \ - -H "Authorization: Bearer ${access_token}" \ - -H "x-goog-user-project: ${project_id}" +curl -X DELETE https://generativelanguage.googleapis.com/v1beta/${modelname}?key=$GOOGLE_API_KEY \ + -H 'Content-Type: application/json' # [END tuned_models_delete] \ No newline at end of file diff --git a/tests/test_content.py b/tests/test_content.py index dc62e997b..2031e40ae 100644 --- a/tests/test_content.py +++ b/tests/test_content.py @@ -83,9 +83,20 @@ class HasEnum: class UnitTests(parameterized.TestCase): + @parameterized.named_parameters( - ["PIL", PIL.Image.open(TEST_PNG_PATH)], ["RGBA", PIL.Image.fromarray(np.zeros([6, 6, 4], dtype=np.uint8))], + ["RGB", PIL.Image.fromarray(np.zeros([6, 6, 3], dtype=np.uint8))], + ["P", PIL.Image.fromarray(np.zeros([6, 6, 3], dtype=np.uint8)).convert("P")], + ) + def test_numpy_to_blob(self, image): + blob = content_types.image_to_blob(image) + self.assertIsInstance(blob, protos.Blob) + self.assertEqual(blob.mime_type, "image/webp") + self.assertStartsWith(blob.data, b"RIFF \x00\x00\x00WEBPVP8L") + + @parameterized.named_parameters( + ["PIL", PIL.Image.open(TEST_PNG_PATH)], ["IPython", IPython.display.Image(filename=TEST_PNG_PATH)], ) def test_png_to_blob(self, image): @@ -96,7 +107,6 @@ def test_png_to_blob(self, image): @parameterized.named_parameters( ["PIL", PIL.Image.open(TEST_JPG_PATH)], - ["RGB", PIL.Image.fromarray(np.zeros([6, 6, 3], dtype=np.uint8))], ["IPython", IPython.display.Image(filename=TEST_JPG_PATH)], ) def test_jpg_to_blob(self, image): @@ -107,7 +117,6 @@ def test_jpg_to_blob(self, image): @parameterized.named_parameters( ["PIL", PIL.Image.open(TEST_GIF_PATH)], - ["P", PIL.Image.fromarray(np.zeros([6, 6, 3], dtype=np.uint8)).convert("P")], ["IPython", IPython.display.Image(filename=TEST_GIF_PATH)], ) def test_gif_to_blob(self, image): @@ -426,12 +435,78 @@ def no_args(): ["empty_dictionary_list", [{"code_execution": {}}]], ) def test_code_execution(self, tools): - if isinstance(tools, Iterable): - t = content_types._make_tools(tools) - self.assertIsInstance(t[0].code_execution, protos.CodeExecution) - else: - t = content_types._make_tool(tools) # Pass code execution into tools - self.assertIsInstance(t.code_execution, protos.CodeExecution) + t = content_types._make_tools(tools) + self.assertIsInstance(t[0].code_execution, protos.CodeExecution) + + @parameterized.named_parameters( + ["string", "google_search_retrieval"], + ["empty_dictionary", {"google_search_retrieval": {}}], + [ + "empty_dictionary_with_dynamic_retrieval_config", + {"google_search_retrieval": {"dynamic_retrieval_config": {}}}, + ], + [ + "dictionary_with_mode_integer", + {"google_search_retrieval": {"dynamic_retrieval_config": {"mode": 0}}}, + ], + [ + "dictionary_with_mode_string", + {"google_search_retrieval": {"dynamic_retrieval_config": {"mode": "DYNAMIC"}}}, + ], + [ + "dictionary_with_dynamic_retrieval_config", + { + "google_search_retrieval": { + "dynamic_retrieval_config": {"mode": "unspecified", "dynamic_threshold": 0.5} + } + }, + ], + [ + "proto_object", + protos.GoogleSearchRetrieval( + dynamic_retrieval_config=protos.DynamicRetrievalConfig( + mode="MODE_UNSPECIFIED", dynamic_threshold=0.5 + ) + ), + ], + [ + "proto_passed_in", + protos.Tool( + google_search_retrieval=protos.GoogleSearchRetrieval( + dynamic_retrieval_config=protos.DynamicRetrievalConfig( + mode="MODE_UNSPECIFIED", dynamic_threshold=0.5 + ) + ) + ), + ], + [ + "proto_object_list", + [ + protos.GoogleSearchRetrieval( + dynamic_retrieval_config=protos.DynamicRetrievalConfig( + mode="MODE_UNSPECIFIED", dynamic_threshold=0.5 + ) + ) + ], + ], + [ + "proto_passed_in_list", + [ + protos.Tool( + google_search_retrieval=protos.GoogleSearchRetrieval( + dynamic_retrieval_config=protos.DynamicRetrievalConfig( + mode="MODE_UNSPECIFIED", dynamic_threshold=0.5 + ) + ) + ) + ], + ], + ) + def test_search_grounding(self, tools): + if self._testMethodName == "test_search_grounding_empty_dictionary": + pass + t = content_types._make_tools(tools) + self.assertIsInstance(t[0].google_search_retrieval, protos.GoogleSearchRetrieval) def test_two_fun_is_one_tool(self): def a(): diff --git a/tests/test_files.py b/tests/test_files.py index cb48316bd..0f7ca5707 100644 --- a/tests/test_files.py +++ b/tests/test_files.py @@ -18,6 +18,7 @@ import collections import datetime +import io import os from typing import Iterable, Sequence import pathlib @@ -38,7 +39,7 @@ def __init__(self, test): def create_file( self, - path: str | pathlib.Path | os.PathLike, + path: str | io.IOBase | os.PathLike, *, mime_type: str | None = None, name: str | None = None, @@ -102,12 +103,13 @@ def test_video_metadata(self): protos.File( uri="https://test", state="ACTIVE", + mime_type="video/quicktime", video_metadata=dict(video_duration=datetime.timedelta(seconds=30)), error=dict(code=7, message="ok?"), ) ) - f = genai.upload_file(path="dummy") + f = genai.upload_file(path="dummy.mov") self.assertEqual(google.rpc.status_pb2.Status(code=7, message="ok?"), f.error) self.assertEqual( protos.VideoMetadata(dict(video_duration=datetime.timedelta(seconds=30))),