This page explains how to generate a spritesheet that contains frames from a
video you transcode. These reduced-sized frames, also known as thumbnails, are
useful for organizing and previewing content. To generate the spritesheet, use
the
spriteSheets
array in the JobConfig template.
You have two options for generating the spritesheet:
Before using any of the request data,
make the following replacements:
PROJECT_ID: Your Google Cloud project ID listed in
the IAM Settings.
LOCATION: The location where your job will run. Use
one of the supported regions:
us-central1
us-west1
us-west2
us-east1
us-east4
southamerica-east1
asia-east1
asia-south1
asia-southeast1
europe-west1
europe-west2
europe-west4
STORAGE_BUCKET_NAME: The name of the Cloud Storage
bucket you created.
STORAGE_INPUT_VIDEO: The name of the video in your
Cloud Storage bucket that you are transcoding, such as my-vid.mp4.
This field should take into account any folders that you created in the
bucket (for example, input/my-vid.mp4).
STORAGE_OUTPUT_FOLDER: The Cloud Storage
folder name where you want to save the encoded video outputs.
Create a request.json file that defines the job fields. Make
the following replacements for the
gcloud
command:
STORAGE_BUCKET_NAME: The
name of the Cloud Storage bucket you created.
STORAGE_INPUT_VIDEO: The
name of the video in your Cloud Storage bucket that you are
transcoding, such as my-vid.mp4. This field should take into
account any folders that you created in the bucket (for example,
input/my-vid.mp4).
LOCATION: The location where
your job will run. Use a location from the following list:
us-central1
us-west1
us-west2
us-east1
us-east4
southamerica-east1
asia-east1
asia-south1
asia-southeast1
europe-west1
europe-west2
europe-west4
STORAGE_OUTPUT_FOLDER: The
Cloud Storage folder name where you want to save the encoded video
outputs.
using Google.Api.Gax.ResourceNames;
using Google.Cloud.Video.Transcoder.V1;
public class CreateJobWithSetNumberImagesSpritesheetSample
{
public const string SmallSpritesheetFilePrefix = "small-sprite-sheet";
public const string LargeSpritesheetFilePrefix = "large-sprite-sheet";
public Job CreateJobWithSetNumberImagesSpritesheet(
string projectId, string location, string inputUri, string outputUri)
{
// Create the client.
TranscoderServiceClient client = TranscoderServiceClient.Create();
// Build the parent location name.
LocationName parent = new LocationName(projectId, location);
// Build the job config.
VideoStream videoStream0 = new VideoStream
{
H264 = new VideoStream.Types.H264CodecSettings
{
BitrateBps = 550000,
FrameRate = 60,
HeightPixels = 360,
WidthPixels = 640
}
};
AudioStream audioStream0 = new AudioStream
{
Codec = "aac",
BitrateBps = 64000
};
// Generates a 10x10 spritesheet of small images from the input video.
// To preserve the source aspect ratio, you should set the
// SpriteWidthPixels field or the SpriteHeightPixels field, but not
// both (the API will automatically calculate the missing field). For
// this sample, we don't care about the aspect ratio so we set both
// fields.
SpriteSheet smallSpriteSheet = new SpriteSheet
{
FilePrefix = SmallSpritesheetFilePrefix,
SpriteHeightPixels = 32,
SpriteWidthPixels = 64,
ColumnCount = 10,
RowCount = 10,
TotalCount = 100
};
// Generates a 10x10 spritesheet of larger images from the input video.
// input video. To preserve the source aspect ratio, you should set the
// SpriteWidthPixels field or the SpriteHeightPixels field, but not
// both (the API will automatically calculate the missing field). For
// this sample, we don't care about the aspect ratio so we set both
// fields.
SpriteSheet largeSpriteSheet = new SpriteSheet
{
FilePrefix = LargeSpritesheetFilePrefix,
SpriteHeightPixels = 72,
SpriteWidthPixels = 128,
ColumnCount = 10,
RowCount = 10,
TotalCount = 100
};
ElementaryStream elementaryStream0 = new ElementaryStream
{
Key = "video_stream0",
VideoStream = videoStream0
};
ElementaryStream elementaryStream1 = new ElementaryStream
{
Key = "audio_stream0",
AudioStream = audioStream0
};
MuxStream muxStream0 = new MuxStream
{
Key = "sd",
Container = "mp4",
ElementaryStreams = { "video_stream0", "audio_stream0" }
};
Input input = new Input
{
Key = "input0",
Uri = inputUri
};
Output output = new Output
{
Uri = outputUri
};
JobConfig jobConfig = new JobConfig
{
Inputs = { input },
Output = output,
ElementaryStreams = { elementaryStream0, elementaryStream1 },
MuxStreams = { muxStream0 },
SpriteSheets = { smallSpriteSheet, largeSpriteSheet }
};
// Build the job.
Job newJob = new Job();
newJob.InputUri = inputUri;
newJob.OutputUri = outputUri;
newJob.Config = jobConfig;
// Call the API.
Job job = client.CreateJob(parent, newJob);
// Return the result.
return job;
}
}
import com.google.cloud.video.transcoder.v1.AudioStream;
import com.google.cloud.video.transcoder.v1.CreateJobRequest;
import com.google.cloud.video.transcoder.v1.ElementaryStream;
import com.google.cloud.video.transcoder.v1.Input;
import com.google.cloud.video.transcoder.v1.Job;
import com.google.cloud.video.transcoder.v1.JobConfig;
import com.google.cloud.video.transcoder.v1.LocationName;
import com.google.cloud.video.transcoder.v1.MuxStream;
import com.google.cloud.video.transcoder.v1.Output;
import com.google.cloud.video.transcoder.v1.SpriteSheet;
import com.google.cloud.video.transcoder.v1.TranscoderServiceClient;
import com.google.cloud.video.transcoder.v1.VideoStream;
import java.io.IOException;
public class CreateJobWithSetNumberImagesSpritesheet {
public static final String smallSpritesheetFilePrefix = "small-sprite-sheet";
public static final String largeSpritesheetFilePrefix = "large-sprite-sheet";
public static final String spritesheetFileSuffix = "0000000000.jpeg";
public static void main(String[] args) throws IOException {
// TODO(developer): Replace these variables before running the sample.
String projectId = "my-project-id";
String location = "us-central1";
String inputUri = "gs://my-bucket/my-video-file";
String outputUri = "gs://my-bucket/my-output-folder/";
createJobWithSetNumberImagesSpritesheet(projectId, location, inputUri, outputUri);
}
// Creates a job from an ad-hoc configuration and generates two spritesheets from the input video.
// Each spritesheet contains a set number of images.
public static void createJobWithSetNumberImagesSpritesheet(
String projectId, String location, String inputUri, String outputUri) throws IOException {
// Initialize client that will be used to send requests. This client only needs to be created
// once, and can be reused for multiple requests.
try (TranscoderServiceClient transcoderServiceClient = TranscoderServiceClient.create()) {
VideoStream videoStream0 =
VideoStream.newBuilder()
.setH264(
VideoStream.H264CodecSettings.newBuilder()
.setBitrateBps(550000)
.setFrameRate(60)
.setHeightPixels(360)
.setWidthPixels(640))
.build();
AudioStream audioStream0 =
AudioStream.newBuilder().setCodec("aac").setBitrateBps(64000).build();
// Generates a 10x10 spritesheet of small images from the input video. To preserve the source
// aspect ratio, you should set the spriteWidthPixels field or the spriteHeightPixels
// field, but not both.
SpriteSheet smallSpriteSheet =
SpriteSheet.newBuilder()
.setFilePrefix(smallSpritesheetFilePrefix)
.setSpriteHeightPixels(32)
.setSpriteWidthPixels(64)
.setColumnCount(10)
.setRowCount(10)
.setTotalCount(100)
.build();
// Generates a 10x10 spritesheet of larger images from the input video.
SpriteSheet largeSpriteSheet =
SpriteSheet.newBuilder()
.setFilePrefix(largeSpritesheetFilePrefix)
.setSpriteHeightPixels(72)
.setSpriteWidthPixels(128)
.setColumnCount(10)
.setRowCount(10)
.setTotalCount(100)
.build();
JobConfig config =
JobConfig.newBuilder()
.addInputs(Input.newBuilder().setKey("input0").setUri(inputUri))
.setOutput(Output.newBuilder().setUri(outputUri))
.addElementaryStreams(
ElementaryStream.newBuilder()
.setKey("video_stream0")
.setVideoStream(videoStream0))
.addElementaryStreams(
ElementaryStream.newBuilder()
.setKey("audio_stream0")
.setAudioStream(audioStream0))
.addMuxStreams(
MuxStream.newBuilder()
.setKey("sd")
.setContainer("mp4")
.addElementaryStreams("video_stream0")
.addElementaryStreams("audio_stream0")
.build())
.addSpriteSheets(smallSpriteSheet) // Add the spritesheet config to the job config
.addSpriteSheets(largeSpriteSheet) // Add the spritesheet config to the job config
.build();
var createJobRequest =
CreateJobRequest.newBuilder()
.setJob(
Job.newBuilder()
.setInputUri(inputUri)
.setOutputUri(outputUri)
.setConfig(config)
.build())
.setParent(LocationName.of(projectId, location).toString())
.build();
// Send the job creation request and process the response.
Job job = transcoderServiceClient.createJob(createJobRequest);
System.out.println("Job: " + job.getName());
}
}
}
use Google\Cloud\Video\Transcoder\V1\AudioStream;
use Google\Cloud\Video\Transcoder\V1\ElementaryStream;
use Google\Cloud\Video\Transcoder\V1\Job;
use Google\Cloud\Video\Transcoder\V1\JobConfig;
use Google\Cloud\Video\Transcoder\V1\MuxStream;
use Google\Cloud\Video\Transcoder\V1\SpriteSheet;
use Google\Cloud\Video\Transcoder\V1\TranscoderServiceClient;
use Google\Cloud\Video\Transcoder\V1\VideoStream;
/**
* Creates a job that generates two spritesheets from the input video. Each
* spritesheet contains a set number of images.
*
* @param string $projectId The ID of your Google Cloud Platform project.
* @param string $location The location of the job.
* @param string $inputUri Uri of the video in the Cloud Storage bucket.
* @param string $outputUri Uri of the video output folder in the Cloud Storage bucket.
*/
function create_job_with_set_number_images_spritesheet($projectId, $location, $inputUri, $outputUri)
{
// Instantiate a client.
$transcoderServiceClient = new TranscoderServiceClient();
$formattedParent = $transcoderServiceClient->locationName($projectId, $location);
$jobConfig =
(new JobConfig())->setElementaryStreams([
(new ElementaryStream())
->setKey('video-stream0')
->setVideoStream(
(new VideoStream())
->setH264(
(new VideoStream\H264CodecSettings())
->setBitrateBps(550000)
->setFrameRate(60)
->setHeightPixels(360)
->setWidthPixels(640)
)
),
(new ElementaryStream())
->setKey('audio-stream0')
->setAudioStream(
(new AudioStream())
->setCodec('aac')
->setBitrateBps(64000)
)
])->setMuxStreams([
(new MuxStream())
->setKey('sd')
->setContainer('mp4')
->setElementaryStreams(['video-stream0', 'audio-stream0'])
])->setSpriteSheets([
(new SpriteSheet())
->setFilePrefix('small-sprite-sheet')
->setSpriteWidthPixels(64)
->setSpriteHeightPixels(32)
->setColumnCount(10)
->setRowCount(10)
->setTotalCount(100),
(new SpriteSheet())
->setFilePrefix('large-sprite-sheet')
->setSpriteWidthPixels(128)
->setSpriteHeightPixels(72)
->setColumnCount(10)
->setRowCount(10)
->setTotalCount(100)
]);
$job = (new Job())
->setInputUri($inputUri)
->setOutputUri($outputUri)
->setConfig($jobConfig);
$response = $transcoderServiceClient->createJob($formattedParent, $job);
// Print job name.
printf('Job: %s' . PHP_EOL, $response->getName());
}
import argparse
from google.cloud.video import transcoder_v1
from google.cloud.video.transcoder_v1.services.transcoder_service import (
TranscoderServiceClient,
)
def create_job_with_set_number_images_spritesheet(
project_id, location, input_uri, output_uri
):
"""Creates a job based on an ad-hoc job configuration that generates two spritesheets.
Args:
project_id: The GCP project ID.
location: The location to start the job in.
input_uri: Uri of the video in the Cloud Storage bucket.
output_uri: Uri of the video output folder in the Cloud Storage bucket."""
client = TranscoderServiceClient()
parent = f"projects/{project_id}/locations/{location}"
job = transcoder_v1.types.Job()
job.input_uri = input_uri
job.output_uri = output_uri
job.config = transcoder_v1.types.JobConfig(
# Create an ad-hoc job. For more information, see https://cloud.google.com/transcoder/docs/how-to/jobs#create_jobs_ad_hoc.
# See all options for the job config at https://cloud.google.com/transcoder/docs/reference/rest/v1/JobConfig.
elementary_streams=[
# This section defines the output video stream.
transcoder_v1.types.ElementaryStream(
key="video-stream0",
video_stream=transcoder_v1.types.VideoStream(
h264=transcoder_v1.types.VideoStream.H264CodecSettings(
height_pixels=360,
width_pixels=640,
bitrate_bps=550000,
frame_rate=60,
),
),
),
# This section defines the output audio stream.
transcoder_v1.types.ElementaryStream(
key="audio-stream0",
audio_stream=transcoder_v1.types.AudioStream(
codec="aac", bitrate_bps=64000
),
),
],
# This section multiplexes the output audio and video together into a container.
mux_streams=[
transcoder_v1.types.MuxStream(
key="sd",
container="mp4",
elementary_streams=["video-stream0", "audio-stream0"],
),
],
# Generate two sprite sheets from the input video into the GCS bucket. For more information, see
# https://cloud.google.com/transcoder/docs/how-to/generate-spritesheet#generate_set_number_of_images.
sprite_sheets=[
# Generate a 10x10 sprite sheet with 64x32px images.
transcoder_v1.types.SpriteSheet(
file_prefix="small-sprite-sheet",
sprite_width_pixels=64,
sprite_height_pixels=32,
column_count=10,
row_count=10,
total_count=100,
),
# Generate a 10x10 sprite sheet with 128x72px images.
transcoder_v1.types.SpriteSheet(
file_prefix="large-sprite-sheet",
sprite_width_pixels=128,
sprite_height_pixels=72,
column_count=10,
row_count=10,
total_count=100,
),
],
)
response = client.create_job(parent=parent, job=job)
print(f"Job: {response.name}")
return response
Use this configuration on the sample video
to generate the following spritesheets:
Figure 1. Small image spritesheet (100 thumbnails)
Figure 2. Large image spritesheet (100 thumbnails)
Generate a thumbnail image periodically
The following configuration generates a spritesheet of small images
and a spritesheet of large images. Each spritesheet contains
thumbnails that were generated every 7 seconds from the input video.
Before using any of the request data,
make the following replacements:
PROJECT_ID: Your Google Cloud project ID listed in
the IAM Settings.
LOCATION: The location where your job will run. Use
one of the supported regions:
us-central1
us-west1
us-west2
us-east1
us-east4
southamerica-east1
asia-east1
asia-south1
asia-southeast1
europe-west1
europe-west2
europe-west4
STORAGE_BUCKET_NAME: The name of the Cloud Storage
bucket you created.
STORAGE_INPUT_VIDEO: The name of the video in your
Cloud Storage bucket that you are transcoding, such as my-vid.mp4.
This field should take into account any folders that you created in the
bucket (for example, input/my-vid.mp4).
STORAGE_OUTPUT_FOLDER: The Cloud Storage
folder name where you want to save the encoded video outputs.
Create a request.json file that defines the job fields. Make
the following replacements for the
gcloud
command:
STORAGE_BUCKET_NAME: The
name of the Cloud Storage bucket you created.
STORAGE_INPUT_VIDEO: The
name of the video in your Cloud Storage bucket that you are
transcoding, such as my-vid.mp4. This field should take into
account any folders that you created in the bucket (for example,
input/my-vid.mp4).
LOCATION: The location where
your job will run. Use a location from the following list:
us-central1
us-west1
us-west2
us-east1
us-east4
southamerica-east1
asia-east1
asia-south1
asia-southeast1
europe-west1
europe-west2
europe-west4
STORAGE_OUTPUT_FOLDER: The
Cloud Storage folder name where you want to save the encoded video
outputs.
using Google.Api.Gax.ResourceNames;
using Google.Cloud.Video.Transcoder.V1;
using Google.Protobuf.WellKnownTypes;
using System;
public class CreateJobWithPeriodicImagesSpritesheetSample
{
public const string SmallSpritesheetFilePrefix = "small-sprite-sheet";
public const string LargeSpritesheetFilePrefix = "large-sprite-sheet";
public Job CreateJobWithPeriodicImagesSpritesheet(
string projectId, string location, string inputUri, string outputUri)
{
// Create the client.
TranscoderServiceClient client = TranscoderServiceClient.Create();
// Build the parent location name.
LocationName parent = new LocationName(projectId, location);
// Build the job config.
VideoStream videoStream0 = new VideoStream
{
H264 = new VideoStream.Types.H264CodecSettings
{
BitrateBps = 550000,
FrameRate = 60,
HeightPixels = 360,
WidthPixels = 640
}
};
AudioStream audioStream0 = new AudioStream
{
Codec = "aac",
BitrateBps = 64000
};
// Generates a spritesheet of small images taken periodically from the
// input video. To preserve the source aspect ratio, you should set the
// SpriteWidthPixels field or the SpriteHeightPixels field, but not
// both (the API will automatically calculate the missing field). For
// this sample, we don't care about the aspect ratio so we set both
// fields.
SpriteSheet smallSpriteSheet = new SpriteSheet
{
FilePrefix = SmallSpritesheetFilePrefix,
SpriteHeightPixels = 32,
SpriteWidthPixels = 64,
Interval = Duration.FromTimeSpan(TimeSpan.FromSeconds(7))
};
// Generates a spritesheet of larger images taken periodically from the
// input video. To preserve the source aspect ratio, you should set the
// SpriteWidthPixels field or the SpriteHeightPixels field, but not
// both (the API will automatically calculate the missing field). For
// this sample, we don't care about the aspect ratio so we set both
// fields.
SpriteSheet largeSpriteSheet = new SpriteSheet
{
FilePrefix = LargeSpritesheetFilePrefix,
SpriteHeightPixels = 72,
SpriteWidthPixels = 128,
Interval = Duration.FromTimeSpan(TimeSpan.FromSeconds(7))
};
ElementaryStream elementaryStream0 = new ElementaryStream
{
Key = "video_stream0",
VideoStream = videoStream0
};
ElementaryStream elementaryStream1 = new ElementaryStream
{
Key = "audio_stream0",
AudioStream = audioStream0
};
MuxStream muxStream0 = new MuxStream
{
Key = "sd",
Container = "mp4",
ElementaryStreams = { "video_stream0", "audio_stream0" }
};
Input input = new Input
{
Key = "input0",
Uri = inputUri
};
Output output = new Output
{
Uri = outputUri
};
JobConfig jobConfig = new JobConfig
{
Inputs = { input },
Output = output,
ElementaryStreams = { elementaryStream0, elementaryStream1 },
MuxStreams = { muxStream0 },
SpriteSheets = { smallSpriteSheet, largeSpriteSheet }
};
// Build the job.
Job newJob = new Job
{
InputUri = inputUri,
OutputUri = outputUri,
Config = jobConfig
};
// Call the API.
Job job = client.CreateJob(parent, newJob);
// Return the result.
return job;
}
}
import com.google.cloud.video.transcoder.v1.AudioStream;
import com.google.cloud.video.transcoder.v1.CreateJobRequest;
import com.google.cloud.video.transcoder.v1.ElementaryStream;
import com.google.cloud.video.transcoder.v1.Input;
import com.google.cloud.video.transcoder.v1.Job;
import com.google.cloud.video.transcoder.v1.JobConfig;
import com.google.cloud.video.transcoder.v1.LocationName;
import com.google.cloud.video.transcoder.v1.MuxStream;
import com.google.cloud.video.transcoder.v1.Output;
import com.google.cloud.video.transcoder.v1.SpriteSheet;
import com.google.cloud.video.transcoder.v1.TranscoderServiceClient;
import com.google.cloud.video.transcoder.v1.VideoStream;
import com.google.protobuf.Duration;
import java.io.IOException;
public class CreateJobWithPeriodicImagesSpritesheet {
public static final String smallSpritesheetFilePrefix = "small-sprite-sheet";
public static final String largeSpritesheetFilePrefix = "large-sprite-sheet";
public static final String spritesheetFileSuffix = "0000000000.jpeg";
public static void main(String[] args) throws IOException {
// TODO(developer): Replace these variables before running the sample.
String projectId = "my-project-id";
String location = "us-central1";
String inputUri = "gs://my-bucket/my-video-file";
String outputUri = "gs://my-bucket/my-output-folder/";
createJobWithPeriodicImagesSpritesheet(projectId, location, inputUri, outputUri);
}
// Creates a job from an ad-hoc configuration and generates two spritesheets from the input video.
// Each spritesheet contains images that are captured periodically based on a user-defined time
// interval.
public static void createJobWithPeriodicImagesSpritesheet(
String projectId, String location, String inputUri, String outputUri) throws IOException {
// Initialize client that will be used to send requests. This client only needs to be created
// once, and can be reused for multiple requests.
try (TranscoderServiceClient transcoderServiceClient = TranscoderServiceClient.create()) {
VideoStream videoStream0 =
VideoStream.newBuilder()
.setH264(
VideoStream.H264CodecSettings.newBuilder()
.setBitrateBps(550000)
.setFrameRate(60)
.setHeightPixels(360)
.setWidthPixels(640))
.build();
AudioStream audioStream0 =
AudioStream.newBuilder().setCodec("aac").setBitrateBps(64000).build();
// Generates a spritesheet of small images taken periodically from the input video. To
// preserve the source aspect ratio, you should set the spriteWidthPixels field or the
// spriteHeightPixels field, but not both.
SpriteSheet smallSpriteSheet =
SpriteSheet.newBuilder()
.setFilePrefix(smallSpritesheetFilePrefix)
.setSpriteHeightPixels(32)
.setSpriteWidthPixels(64)
.setInterval(Duration.newBuilder().setSeconds(7).build())
.build();
// Generates a spritesheet of larger images taken periodically from the input video. To
SpriteSheet largeSpriteSheet =
SpriteSheet.newBuilder()
.setFilePrefix(largeSpritesheetFilePrefix)
.setSpriteHeightPixels(72)
.setSpriteWidthPixels(128)
.setInterval(Duration.newBuilder().setSeconds(7).build())
.build();
JobConfig config =
JobConfig.newBuilder()
.addInputs(Input.newBuilder().setKey("input0").setUri(inputUri))
.setOutput(Output.newBuilder().setUri(outputUri))
.addElementaryStreams(
ElementaryStream.newBuilder()
.setKey("video_stream0")
.setVideoStream(videoStream0))
.addElementaryStreams(
ElementaryStream.newBuilder()
.setKey("audio_stream0")
.setAudioStream(audioStream0))
.addMuxStreams(
MuxStream.newBuilder()
.setKey("sd")
.setContainer("mp4")
.addElementaryStreams("video_stream0")
.addElementaryStreams("audio_stream0")
.build())
.addSpriteSheets(smallSpriteSheet) // Add the spritesheet config to the job config
.addSpriteSheets(largeSpriteSheet) // Add the spritesheet config to the job config
.build();
var createJobRequest =
CreateJobRequest.newBuilder()
.setJob(
Job.newBuilder()
.setInputUri(inputUri)
.setOutputUri(outputUri)
.setConfig(config)
.build())
.setParent(LocationName.of(projectId, location).toString())
.build();
// Send the job creation request and process the response.
Job job = transcoderServiceClient.createJob(createJobRequest);
System.out.println("Job: " + job.getName());
}
}
}
use Google\Cloud\Video\Transcoder\V1\AudioStream;
use Google\Cloud\Video\Transcoder\V1\ElementaryStream;
use Google\Cloud\Video\Transcoder\V1\Job;
use Google\Cloud\Video\Transcoder\V1\JobConfig;
use Google\Cloud\Video\Transcoder\V1\MuxStream;
use Google\Cloud\Video\Transcoder\V1\SpriteSheet;
use Google\Cloud\Video\Transcoder\V1\TranscoderServiceClient;
use Google\Cloud\Video\Transcoder\V1\VideoStream;
use Google\Protobuf\Duration;
/**
* Creates a job that generates two spritesheets from the input video. Each
* spritesheet contains images that are captured periodically.
*
* @param string $projectId The ID of your Google Cloud Platform project.
* @param string $location The location of the job.
* @param string $inputUri Uri of the video in the Cloud Storage bucket.
* @param string $outputUri Uri of the video output folder in the Cloud Storage bucket.
*/
function create_job_with_periodic_images_spritesheet($projectId, $location, $inputUri, $outputUri)
{
// Instantiate a client.
$transcoderServiceClient = new TranscoderServiceClient();
$formattedParent = $transcoderServiceClient->locationName($projectId, $location);
$jobConfig =
(new JobConfig())->setElementaryStreams([
(new ElementaryStream())
->setKey('video-stream0')
->setVideoStream(
(new VideoStream())
->setH264(
(new VideoStream\H264CodecSettings())
->setBitrateBps(550000)
->setFrameRate(60)
->setHeightPixels(360)
->setWidthPixels(640)
)
),
(new ElementaryStream())
->setKey('audio-stream0')
->setAudioStream(
(new AudioStream())
->setCodec('aac')
->setBitrateBps(64000)
)
])->setMuxStreams([
(new MuxStream())
->setKey('sd')
->setContainer('mp4')
->setElementaryStreams(['video-stream0', 'audio-stream0'])
])->setSpriteSheets([
(new SpriteSheet())
->setFilePrefix('small-sprite-sheet')
->setSpriteWidthPixels(64)
->setSpriteHeightPixels(32)
->setInterval(
(new Duration())
->setSeconds(7)
),
(new SpriteSheet())
->setFilePrefix('large-sprite-sheet')
->setSpriteWidthPixels(128)
->setSpriteHeightPixels(72)
->setInterval(new Duration(['seconds' => 7]))
]);
$job = (new Job())
->setInputUri($inputUri)
->setOutputUri($outputUri)
->setConfig($jobConfig);
$response = $transcoderServiceClient->createJob($formattedParent, $job);
// Print job name.
printf('Job: %s' . PHP_EOL, $response->getName());
}
import argparse
from google.cloud.video import transcoder_v1
from google.cloud.video.transcoder_v1.services.transcoder_service import (
TranscoderServiceClient,
)
from google.protobuf import duration_pb2 as duration
def create_job_with_periodic_images_spritesheet(
project_id, location, input_uri, output_uri
):
"""Creates a job based on an ad-hoc job configuration that generates two spritesheets.
Args:
project_id: The GCP project ID.
location: The location to start the job in.
input_uri: Uri of the video in the Cloud Storage bucket.
output_uri: Uri of the video output folder in the Cloud Storage bucket."""
client = TranscoderServiceClient()
parent = f"projects/{project_id}/locations/{location}"
job = transcoder_v1.types.Job()
job.input_uri = input_uri
job.output_uri = output_uri
job.config = transcoder_v1.types.JobConfig(
# Create an ad-hoc job. For more information, see https://cloud.google.com/transcoder/docs/how-to/jobs#create_jobs_ad_hoc.
# See all options for the job config at https://cloud.google.com/transcoder/docs/reference/rest/v1/JobConfig.
elementary_streams=[
# This section defines the output video stream.
transcoder_v1.types.ElementaryStream(
key="video-stream0",
video_stream=transcoder_v1.types.VideoStream(
h264=transcoder_v1.types.VideoStream.H264CodecSettings(
height_pixels=360,
width_pixels=640,
bitrate_bps=550000,
frame_rate=60,
),
),
),
# This section defines the output audio stream.
transcoder_v1.types.ElementaryStream(
key="audio-stream0",
audio_stream=transcoder_v1.types.AudioStream(
codec="aac", bitrate_bps=64000
),
),
],
# This section multiplexes the output audio and video together into a container.
mux_streams=[
transcoder_v1.types.MuxStream(
key="sd",
container="mp4",
elementary_streams=["video-stream0", "audio-stream0"],
),
],
# Generate two sprite sheets from the input video into the GCS bucket. For more information, see
# https://cloud.google.com/transcoder/docs/how-to/generate-spritesheet#generate_image_periodically.
sprite_sheets=[
# Generate a sprite sheet with 64x32px images. An image is taken every 7 seconds from the video.
transcoder_v1.types.SpriteSheet(
file_prefix="small-sprite-sheet",
sprite_width_pixels=64,
sprite_height_pixels=32,
interval=duration.Duration(
seconds=7,
),
),
# Generate a sprite sheet with 128x72px images. An image is taken every 7 seconds from the video.
transcoder_v1.types.SpriteSheet(
file_prefix="large-sprite-sheet",
sprite_width_pixels=128,
sprite_height_pixels=72,
interval=duration.Duration(
seconds=7,
),
),
],
)
response = client.create_job(parent=parent, job=job)
print(f"Job: {response.name}")
return response