Paso 1 del instructivo de ImageMagick: Analizar imagen

Demuestra cómo usar ImageMagick y la API de Google Cloud Vision para detectar y desenfocar imágenes ofensivas que se suben al bucket de Cloud Storage.

Páginas de documentación que incluyen esta muestra de código

Para ver la muestra de código usada en contexto, consulta la siguiente documentación:

Muestra de código

C#

public async Task HandleAsync(CloudEvent cloudEvent, StorageObjectData data, CancellationToken cancellationToken)
{
    // Validate parameters
    if (data.Bucket is null || data.Name is null)
    {
        _logger.LogError("Malformed GCS event.");
        return;
    }

    // Construct URI to GCS bucket and file.
    string gcsUri = $"gs://{data.Bucket}/{data.Name}";
    _logger.LogInformation("Analyzing {uri}", gcsUri);

    // Perform safe search detection using the Vision API.
    Image image = Image.FromUri(gcsUri);
    SafeSearchAnnotation annotation;
    try
    {
        annotation = await _visionClient.DetectSafeSearchAsync(image);
    }
    // If the call to the Vision API fails, log the error but let the function complete normally.
    // If the exceptions weren't caught (and just propagated) the event would be retried.
    // See the "Best Practices" section in the documentation for more details about retry.
    catch (AnnotateImageException e)
    {
        _logger.LogError(e, "Vision API reported an error while performing safe search detection");
        return;
    }
    catch (RpcException e)
    {
        _logger.LogError(e, "Error communicating with the Vision API");
        return;
    }

    if (annotation.Adult == Likelihood.VeryLikely || annotation.Violence == Likelihood.VeryLikely)
    {
        _logger.LogInformation("Detected {uri} as inappropriate.", gcsUri);
        await BlurImageAsync(data, cancellationToken);
    }
    else
    {
        _logger.LogInformation("Detected {uri} as OK.", gcsUri);
    }
}

Go


// GCSEvent is the payload of a GCS event.
type GCSEvent struct {
	Bucket string `json:"bucket"`
	Name   string `json:"name"`
}

// BlurOffensiveImages blurs offensive images uploaded to GCS.
func BlurOffensiveImages(ctx context.Context, e GCSEvent) error {
	outputBucket := os.Getenv("BLURRED_BUCKET_NAME")
	if outputBucket == "" {
		return errors.New("BLURRED_BUCKET_NAME must be set")
	}

	img := vision.NewImageFromURI(fmt.Sprintf("gs://%s/%s", e.Bucket, e.Name))

	resp, err := visionClient.DetectSafeSearch(ctx, img, nil)
	if err != nil {
		return fmt.Errorf("AnnotateImage: %v", err)
	}

	if resp.GetAdult() == visionpb.Likelihood_VERY_LIKELY ||
		resp.GetViolence() == visionpb.Likelihood_VERY_LIKELY {
		return blur(ctx, e.Bucket, outputBucket, e.Name)
	}
	log.Printf("The image %q was detected as OK.", e.Name)
	return nil
}

Java

@Override
// Blurs uploaded images that are flagged as Adult or Violence.
public void accept(GcsEvent event, Context context) {
  // Validate parameters
  if (event.getBucket() == null || event.getName() == null) {
    logger.severe("Error: Malformed GCS event.");
    return;
  }

  BlobInfo blobInfo = BlobInfo.newBuilder(event.getBucket(), event.getName()).build();

  // Construct URI to GCS bucket and file.
  String gcsPath = String.format("gs://%s/%s", event.getBucket(), event.getName());
  logger.info(String.format("Analyzing %s", event.getName()));

  // Construct request.
  ImageSource imgSource = ImageSource.newBuilder().setImageUri(gcsPath).build();
  Image img = Image.newBuilder().setSource(imgSource).build();
  Feature feature = Feature.newBuilder().setType(Type.SAFE_SEARCH_DETECTION).build();
  AnnotateImageRequest request =
      AnnotateImageRequest.newBuilder().addFeatures(feature).setImage(img).build();
  List<AnnotateImageRequest> requests = List.of(request);

  // Send request to the Vision API.
  try (ImageAnnotatorClient client = ImageAnnotatorClient.create()) {
    BatchAnnotateImagesResponse response = client.batchAnnotateImages(requests);
    List<AnnotateImageResponse> responses = response.getResponsesList();
    for (AnnotateImageResponse res : responses) {
      if (res.hasError()) {
        logger.info(String.format("Error: %s", res.getError().getMessage()));
        return;
      }
      // Get Safe Search Annotations
      SafeSearchAnnotation annotation = res.getSafeSearchAnnotation();
      if (annotation.getAdultValue() == 5 || annotation.getViolenceValue() == 5) {
        logger.info(String.format("Detected %s as inappropriate.", event.getName()));
        blur(blobInfo);
      } else {
        logger.info(String.format("Detected %s as OK.", event.getName()));
      }
    }
  } catch (IOException e) {
    logger.log(Level.SEVERE, "Error with Vision API: " + e.getMessage(), e);
  }
}

Node.js

// Blurs uploaded images that are flagged as Adult or Violence.
exports.blurOffensiveImages = async event => {
  // This event represents the triggering Cloud Storage object.
  const object = event;

  const file = storage.bucket(object.bucket).file(object.name);
  const filePath = `gs://${object.bucket}/${object.name}`;

  console.log(`Analyzing ${file.name}.`);

  try {
    const [result] = await client.safeSearchDetection(filePath);
    const detections = result.safeSearchAnnotation || {};

    if (
      // Levels are defined in https://cloud.google.com/vision/docs/reference/rest/v1/AnnotateImageResponse#likelihood
      detections.adult === 'VERY_LIKELY' ||
      detections.violence === 'VERY_LIKELY'
    ) {
      console.log(`Detected ${file.name} as inappropriate.`);
      return await blurImage(file, BLURRED_BUCKET_NAME);
    } else {
      console.log(`Detected ${file.name} as OK.`);
    }
  } catch (err) {
    console.error(`Failed to analyze ${file.name}.`, err);
    throw err;
  }
};

PHP

function blurOffensiveImages(CloudEvent $cloudevent): void
{
    $log = fopen(getenv('LOGGER_OUTPUT') ?: 'php://stderr', 'wb');
    $storage = new StorageClient();
    $data = $cloudevent->getData();

    $file = $storage->bucket($data['bucket'])->object($data['name']);
    $filePath = 'gs://' . $data['bucket'] . '/' . $data['name'];
    fwrite($log, 'Analyzing ' . $filePath . PHP_EOL);

    $annotator = new ImageAnnotatorClient();
    $storage = new StorageClient();

    try {
        $request = $annotator->safeSearchDetection($filePath);
        $response = $request->getSafeSearchAnnotation();

        // Handle missing files
        // (This is uncommon, but can happen if race conditions occur)
        if ($response === null) {
            fwrite($log, 'Could not find ' . $filePath . PHP_EOL);
            return;
        }

        $isInappropriate =
            $response->getAdult() === Likelihood::VERY_LIKELY ||
            $response->getViolence() === Likelihood::VERY_LIKELY;

        if ($isInappropriate) {
            fwrite($log, 'Detected ' . $data['name'] . ' as inappropriate.' . PHP_EOL);
            $blurredBucketName = getenv('BLURRED_BUCKET_NAME');

            blurImage($log, $file, $blurredBucketName);
        } else {
            fwrite($log, 'Detected ' . $data['name'] . ' as OK.' . PHP_EOL);
        }
    } catch (Exception $e) {
        fwrite($log, 'Failed to analyze ' . $data['name'] . PHP_EOL);
        fwrite($log, $e->getMessage() . PHP_EOL);
    }
}

Python

# Blurs uploaded images that are flagged as Adult or Violence.
def blur_offensive_images(data, context):
    file_data = data

    file_name = file_data["name"]
    bucket_name = file_data["bucket"]

    blob = storage_client.bucket(bucket_name).get_blob(file_name)
    blob_uri = f"gs://{bucket_name}/{file_name}"
    blob_source = vision.Image(source=vision.ImageSource(gcs_image_uri=blob_uri))

    # Ignore already-blurred files
    if file_name.startswith("blurred-"):
        print(f"The image {file_name} is already blurred.")
        return

    print(f"Analyzing {file_name}.")

    result = vision_client.safe_search_detection(image=blob_source)
    detected = result.safe_search_annotation

    # Process image
    if detected.adult == 5 or detected.violence == 5:
        print(f"The image {file_name} was detected as inappropriate.")
        return __blur_image(blob)
    else:
        print(f"The image {file_name} was detected as OK.")

Ruby

# Blurs uploaded images that are flagged as Adult or Violence.
FunctionsFramework.cloud_event "blur_offensive_images" do |event|
  # Event-triggered Ruby functions receive a CloudEvents::Event::V1 object.
  # See https://cloudevents.github.io/sdk-ruby/latest/CloudEvents/Event/V1.html
  # The storage event payload can be obtained from the event data.
  payload = event.data
  file_name = payload["name"]
  bucket_name = payload["bucket"]

  # Ignore already-blurred files
  if file_name.start_with? "blurred-"
    logger.info "The image #{file_name} is already blurred."
    return
  end

  # Get image annotations from the Vision service
  logger.info "Analyzing #{file_name}."
  gs_uri = "gs://#{bucket_name}/#{file_name}"
  result = global(:vision_client).safe_search_detection image: gs_uri
  annotation = result.responses.first.safe_search_annotation

  # Respond to annotations by possibly blurring the image
  if annotation.adult == :VERY_LIKELY || annotation.violence == :VERY_LIKELY
    logger.info "The image #{file_name} was detected as inappropriate."
    blur_image bucket_name, file_name
  else
    logger.info "The image #{file_name} was detected as OK."
  end
end

¿Qué sigue?

A fin de buscar y filtrar muestras de código para otros productos de Google Cloud, consulta el navegador de muestra de Google Cloud.