Thanks to visit codestin.com
Credit goes to github.com

Skip to content

Commit 9b1a539

Browse files
authored
feat: add code sample and test for concatenating two input videos (GoogleCloudPlatform#7197)
Fixes 197546828
1 parent e931200 commit 9b1a539

File tree

4 files changed

+263
-20
lines changed

4 files changed

+263
-20
lines changed

media/testdata/ChromeCast.mp4

-2.69 MB
Binary file not shown.

media/testdata/overlay.jpg

-23.2 KB
Binary file not shown.
Lines changed: 203 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,203 @@
1+
#!/usr/bin/env python
2+
3+
# Copyright 2022 Google LLC
4+
#
5+
# Licensed under the Apache License, Version 2.0 (the "License");
6+
# you may not use this file except in compliance with the License.
7+
# You may obtain a copy of the License at
8+
#
9+
# http://www.apache.org/licenses/LICENSE-2.0
10+
#
11+
# Unless required by applicable law or agreed to in writing, software
12+
# distributed under the License is distributed on an "AS IS" BASIS,
13+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
# See the License for the specific language governing permissions and
15+
# limitations under the License.
16+
17+
"""Google Cloud Transcoder sample for creating a job based on concatenating two input videos.
18+
19+
Example usage:
20+
python create_job_with_concatenated_inputs.py --project_id <project-id> --location <location> \
21+
--input1_uri <uri> --start_time_input1 <sec> --end_time_input1 <sec> \
22+
--input2_uri <uri> --start_time_input2 <sec> --end_time_input2 <sec> \
23+
--output_uri <uri>
24+
"""
25+
26+
# [START transcoder_create_job_with_concatenated_inputs]
27+
28+
import argparse
29+
30+
from google.cloud.video import transcoder_v1
31+
from google.cloud.video.transcoder_v1.services.transcoder_service import (
32+
TranscoderServiceClient,
33+
)
34+
from google.protobuf import duration_pb2 as duration
35+
36+
37+
def create_job_with_concatenated_inputs(
38+
project_id,
39+
location,
40+
input1_uri,
41+
start_time_input1,
42+
end_time_input1,
43+
input2_uri,
44+
start_time_input2,
45+
end_time_input2,
46+
output_uri,
47+
):
48+
"""Creates a job based on an ad-hoc job configuration that concatenates two input videos.
49+
50+
Args:
51+
project_id (str): The GCP project ID.
52+
location (str): The location to start the job in.
53+
input1_uri (str): Uri of the first video in the Cloud Storage bucket.
54+
start_time_input1 (str): Start time, in fractional seconds ending in 's'
55+
(e.g., '0s'), relative to the first input video timeline.
56+
end_time_input1 (str): End time, in fractional seconds ending in 's'
57+
(e.g., '8.1s'), relative to the first input video timeline.
58+
input2_uri (str): Uri of the second video in the Cloud Storage bucket.
59+
start_time_input2 (str): Start time, in fractional seconds ending in 's'
60+
(e.g., '3.5s'), relative to the second input video timeline.
61+
end_time_input2 (str): End time, in fractional seconds ending in 's'
62+
(e.g., '15s'), relative to the second input video timeline.
63+
output_uri (str): Uri of the video output folder in the Cloud Storage
64+
bucket."""
65+
66+
s1 = duration.Duration()
67+
s1.FromJsonString(start_time_input1)
68+
e1 = duration.Duration()
69+
e1.FromJsonString(end_time_input1)
70+
71+
s2 = duration.Duration()
72+
s2.FromJsonString(start_time_input2)
73+
e2 = duration.Duration()
74+
e2.FromJsonString(end_time_input2)
75+
76+
client = TranscoderServiceClient()
77+
78+
parent = f"projects/{project_id}/locations/{location}"
79+
job = transcoder_v1.types.Job()
80+
job.output_uri = output_uri
81+
job.config = transcoder_v1.types.JobConfig(
82+
inputs=[
83+
transcoder_v1.types.Input(
84+
key="input1",
85+
uri=input1_uri,
86+
),
87+
transcoder_v1.types.Input(
88+
key="input2",
89+
uri=input2_uri,
90+
),
91+
],
92+
edit_list=[
93+
transcoder_v1.types.EditAtom(
94+
key="atom1",
95+
inputs=["input1"],
96+
start_time_offset=s1,
97+
end_time_offset=e1,
98+
),
99+
transcoder_v1.types.EditAtom(
100+
key="atom2",
101+
inputs=["input2"],
102+
start_time_offset=s2,
103+
end_time_offset=e2,
104+
),
105+
],
106+
elementary_streams=[
107+
transcoder_v1.types.ElementaryStream(
108+
key="video-stream0",
109+
video_stream=transcoder_v1.types.VideoStream(
110+
h264=transcoder_v1.types.VideoStream.H264CodecSettings(
111+
height_pixels=360,
112+
width_pixels=640,
113+
bitrate_bps=550000,
114+
frame_rate=60,
115+
),
116+
),
117+
),
118+
transcoder_v1.types.ElementaryStream(
119+
key="audio-stream0",
120+
audio_stream=transcoder_v1.types.AudioStream(
121+
codec="aac", bitrate_bps=64000
122+
),
123+
),
124+
],
125+
mux_streams=[
126+
transcoder_v1.types.MuxStream(
127+
key="sd",
128+
container="mp4",
129+
elementary_streams=["video-stream0", "audio-stream0"],
130+
),
131+
],
132+
)
133+
response = client.create_job(parent=parent, job=job)
134+
print(f"Job: {response.name}")
135+
return response
136+
137+
138+
# [END transcoder_create_job_with_concatenated_inputs]
139+
140+
if __name__ == "__main__":
141+
parser = argparse.ArgumentParser()
142+
parser.add_argument("--project_id", help="Your Cloud project ID.", required=True)
143+
parser.add_argument(
144+
"--location",
145+
help="The location to start this job in.",
146+
default="us-central1",
147+
)
148+
parser.add_argument(
149+
"--input1_uri",
150+
help="Uri of the first video in the Cloud Storage bucket.",
151+
required=True,
152+
)
153+
parser.add_argument(
154+
"--start_time_input1",
155+
help="Start time, in fractional seconds ending in 's' (e.g., '1.1s'), "
156+
+ "relative to the first input video timeline. Use this field to trim "
157+
+ "content from the beginning of the first video.",
158+
required=True,
159+
)
160+
parser.add_argument(
161+
"--end_time_input1",
162+
help="End time, in fractional seconds ending in 's' (e.g., '9.5s'), "
163+
+ "relative to the first input video timeline. Use this field to trim "
164+
+ "content from the end of the first video.",
165+
required=True,
166+
)
167+
parser.add_argument(
168+
"--input2_uri",
169+
help="Uri of the second video in the Cloud Storage bucket.",
170+
required=True,
171+
)
172+
parser.add_argument(
173+
"--start_time_input2",
174+
help="Start time, in fractional seconds ending in 's' (e.g., '1.1s'), "
175+
+ "relative to the second input video timeline. Use this field to trim "
176+
+ "content from the beginning of the second video.",
177+
required=True,
178+
)
179+
parser.add_argument(
180+
"--end_time_input2",
181+
help="End time, in fractional seconds ending in 's' (e.g., '9.5s'), "
182+
+ "relative to the second input video timeline. Use this field to trim "
183+
+ "content from the end of the second video.",
184+
required=True,
185+
)
186+
parser.add_argument(
187+
"--output_uri",
188+
help="Uri of the video output folder in the Cloud Storage bucket. "
189+
+ "Must end in '/'.",
190+
required=True,
191+
)
192+
args = parser.parse_args()
193+
create_job_with_concatenated_inputs(
194+
args.project_id,
195+
args.location,
196+
args.input1_uri,
197+
args.start_time_input1,
198+
args.end_time_input1,
199+
args.input2_uri,
200+
args.start_time_input2,
201+
args.end_time_input2,
202+
args.output_uri,
203+
)

media/transcoder/job_test.py

Lines changed: 60 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,7 @@
2626
import create_job_from_template
2727
import create_job_template
2828
import create_job_with_animated_overlay
29+
import create_job_with_concatenated_inputs
2930
import create_job_with_periodic_images_spritesheet
3031
import create_job_with_set_number_images_spritesheet
3132
import create_job_with_static_overlay
@@ -40,47 +41,46 @@
4041
project_number = os.environ["GOOGLE_CLOUD_PROJECT_NUMBER"]
4142
template_id = f"my-python-test-template-{uuid.uuid4()}"
4243

43-
bucket_name = f"python-samples-transcoder-{uuid.uuid4()}"
44+
input_bucket_name = "cloud-samples-data/media/"
45+
output_bucket_name = f"python-samples-transcoder-{uuid.uuid4()}"
4446
test_video_file_name = "ChromeCast.mp4"
4547
test_overlay_image_file_name = "overlay.jpg"
46-
input_uri = "gs://" + bucket_name + "/" + test_video_file_name
47-
overlay_image_uri = "gs://" + bucket_name + "/" + test_overlay_image_file_name
48-
output_uri_for_preset = "gs://" + bucket_name + "/test-output-preset/"
49-
output_uri_for_template = "gs://" + bucket_name + "/test-output-template/"
50-
output_uri_for_adhoc = "gs://" + bucket_name + "/test-output-adhoc/"
51-
output_uri_for_static_overlay = "gs://" + bucket_name + "/test-output-static-overlay/"
48+
test_concat1_file_name = "ForBiggerEscapes.mp4"
49+
test_concat2_file_name = "ForBiggerJoyrides.mp4"
50+
51+
input_uri = f"gs://{input_bucket_name}{test_video_file_name}"
52+
overlay_image_uri = f"gs://{input_bucket_name}{test_overlay_image_file_name}"
53+
concat1_uri = f"gs://{input_bucket_name}{test_concat1_file_name}"
54+
concat2_uri = f"gs://{input_bucket_name}{test_concat2_file_name}"
55+
output_uri_for_preset = f"gs://{output_bucket_name}/test-output-preset/"
56+
output_uri_for_template = f"gs://{output_bucket_name}/test-output-template/"
57+
output_uri_for_adhoc = f"gs://{output_bucket_name}/test-output-adhoc/"
58+
output_uri_for_static_overlay = f"gs://{output_bucket_name}/test-output-static-overlay/"
5259
output_uri_for_animated_overlay = (
53-
"gs://" + bucket_name + "/test-output-animated-overlay/"
60+
f"gs://{output_bucket_name}/test-output-animated-overlay/"
5461
)
5562
small_spritesheet_file_prefix = "small-sprite-sheet"
5663
large_spritesheet_file_prefix = "large-sprite-sheet"
5764
spritesheet_file_suffix = "0000000000.jpeg"
5865

5966
output_dir_for_set_number_spritesheet = "test-output-set-number-spritesheet/"
6067
output_uri_for_set_number_spritesheet = (
61-
"gs://" + bucket_name + "/" + output_dir_for_set_number_spritesheet
68+
f"gs://{output_bucket_name}/{output_dir_for_set_number_spritesheet}"
6269
)
6370
output_dir_for_periodic_spritesheet = "test-output-periodic-spritesheet/"
6471
output_uri_for_periodic_spritesheet = (
65-
"gs://" + bucket_name + "/" + output_dir_for_periodic_spritesheet
72+
f"gs://{output_bucket_name}/{output_dir_for_periodic_spritesheet}"
6673
)
74+
output_uri_for_concat = f"gs://{output_bucket_name}/test-output-concat/"
75+
6776
preset = "preset/web-hd"
6877
job_succeeded_state = "ProcessingState.SUCCEEDED"
69-
test_data = os.path.join(os.path.dirname(__file__), "..", "testdata")
70-
test_file = os.path.join(test_data, test_video_file_name)
71-
test_overlay_file = os.path.join(test_data, test_overlay_image_file_name)
7278

7379

7480
@pytest.fixture(scope="module")
7581
def test_bucket():
7682
storage_client = storage.Client()
77-
bucket = storage_client.create_bucket(bucket_name)
78-
79-
blob = bucket.blob(test_video_file_name)
80-
blob.upload_from_filename(test_file)
81-
82-
blob = bucket.blob(test_overlay_image_file_name)
83-
blob.upload_from_filename(test_overlay_file)
83+
bucket = storage_client.create_bucket(output_bucket_name)
8484

8585
yield bucket
8686
bucket.delete(force=True)
@@ -359,6 +359,46 @@ def test_create_job_with_periodic_spritesheet(capsys, test_bucket):
359359
assert "Deleted job" in out
360360

361361

362+
def test_create_job_with_concatenated_inputs(capsys, test_bucket):
363+
create_job_with_concatenated_inputs.create_job_with_concatenated_inputs(
364+
project_id,
365+
location,
366+
concat1_uri,
367+
"0s",
368+
"8.1s",
369+
concat2_uri,
370+
"3.5s",
371+
"15s",
372+
output_uri_for_concat,
373+
)
374+
out, _ = capsys.readouterr()
375+
job_name_prefix = f"projects/{project_number}/locations/{location}/jobs/"
376+
assert job_name_prefix in out
377+
378+
str_slice = out.split("/")
379+
job_id = str_slice[len(str_slice) - 1].rstrip("\n")
380+
job_name = f"projects/{project_number}/locations/{location}/jobs/{job_id}"
381+
assert job_name in out
382+
383+
get_job.get_job(project_id, location, job_id)
384+
out, _ = capsys.readouterr()
385+
assert job_name in out
386+
387+
time.sleep(
388+
30
389+
) # Transcoding jobs need time to complete. Once the job completes, check the job state.
390+
391+
_assert_job_state_succeeded(capsys, job_id)
392+
393+
list_jobs.list_jobs(project_id, location)
394+
out, _ = capsys.readouterr()
395+
assert job_name in out
396+
397+
delete_job.delete_job(project_id, location, job_id)
398+
out, _ = capsys.readouterr()
399+
assert "Deleted job" in out
400+
401+
362402
# Retrying up to 10 mins.
363403
@backoff.on_exception(backoff.expo, AssertionError, max_time=600)
364404
def _assert_job_state_succeeded(capsys, job_id):

0 commit comments

Comments
 (0)