public async Task HandleAsync(CloudEvent cloudEvent, StorageObjectData data, CancellationToken cancellationToken)
{
// Validate parameters
if (data.Bucket is null || data.Name is null)
{
_logger.LogError("Malformed GCS event.");
return;
}
// Construct URI to GCS bucket and file.
string gcsUri = $"gs://{data.Bucket}/{data.Name}";
_logger.LogInformation("Analyzing {uri}", gcsUri);
// Perform safe search detection using the Vision API.
Image image = Image.FromUri(gcsUri);
SafeSearchAnnotation annotation;
try
{
annotation = await _visionClient.DetectSafeSearchAsync(image);
}
// If the call to the Vision API fails, log the error but let the function complete normally.
// If the exceptions weren't caught (and just propagated) the event would be retried.
// See the "Best Practices" section in the documentation for more details about retry.
catch (AnnotateImageException e)
{
_logger.LogError(e, "Vision API reported an error while performing safe search detection");
return;
}
catch (RpcException e)
{
_logger.LogError(e, "Error communicating with the Vision API");
return;
}
if (annotation.Adult == Likelihood.VeryLikely || annotation.Violence == Likelihood.VeryLikely)
{
_logger.LogInformation("Detected {uri} as inappropriate.", gcsUri);
await BlurImageAsync(data, cancellationToken);
}
else
{
_logger.LogInformation("Detected {uri} as OK.", gcsUri);
}
}
# Blurs uploaded images that are flagged as Adult or Violence.
FunctionsFramework.cloud_event "blur_offensive_images" do |event|
# Event-triggered Ruby functions receive a CloudEvents::Event::V1 object.
# See https://cloudevents.github.io/sdk-ruby/latest/CloudEvents/Event/V1.html
# The storage event payload can be obtained from the event data.
payload = event.data
file_name = payload["name"]
bucket_name = payload["bucket"]
# Ignore already-blurred files
if file_name.start_with? "blurred-"
logger.info "The image #{file_name} is already blurred."
return
end
# Get image annotations from the Vision service
logger.info "Analyzing #{file_name}."
gs_uri = "gs://#{bucket_name}/#{file_name}"
result = global(:vision_client).safe_search_detection image: gs_uri
annotation = result.responses.first.safe_search_annotation
# Respond to annotations by possibly blurring the image
if annotation.adult == :VERY_LIKELY || annotation.violence == :VERY_LIKELY
logger.info "The image #{file_name} was detected as inappropriate."
blur_image bucket_name, file_name
else
logger.info "The image #{file_name} was detected as OK."
end
end