ImageMagick-Anleitung Schritt 1 – Bilder analysieren

Zeigt die Verwendung der Google Cloud Vision API und ImageMagick, um anstößige Bilder zu erkennen und unkenntlich zu machen, die in einen Cloud Storage-Bucket hochgeladen wurden.

Weitere Informationen

Eine ausführliche Dokumentation, die dieses Codebeispiel enthält, finden Sie hier:

Codebeispiel

C#

public async Task HandleAsync(CloudEvent cloudEvent, StorageObjectData data, CancellationToken cancellationToken)
{
    // Validate parameters
    if (data.Bucket is null || data.Name is null)
    {
        _logger.LogError("Malformed GCS event.");
        return;
    }

    // Construct URI to GCS bucket and file.
    string gcsUri = $"gs://{data.Bucket}/{data.Name}";
    _logger.LogInformation("Analyzing {uri}", gcsUri);

    // Perform safe search detection using the Vision API.
    Image image = Image.FromUri(gcsUri);
    SafeSearchAnnotation annotation;
    try
    {
        annotation = await _visionClient.DetectSafeSearchAsync(image);
    }
    // If the call to the Vision API fails, log the error but let the function complete normally.
    // If the exceptions weren't caught (and just propagated) the event would be retried.
    // See the "Best Practices" section in the documentation for more details about retry.
    catch (AnnotateImageException e)
    {
        _logger.LogError(e, "Vision API reported an error while performing safe search detection");
        return;
    }
    catch (RpcException e)
    {
        _logger.LogError(e, "Error communicating with the Vision API");
        return;
    }

    if (annotation.Adult == Likelihood.VeryLikely || annotation.Violence == Likelihood.VeryLikely)
    {
        _logger.LogInformation("Detected {uri} as inappropriate.", gcsUri);
        await BlurImageAsync(data, cancellationToken);
    }
    else
    {
        _logger.LogInformation("Detected {uri} as OK.", gcsUri);
    }
}

Go


// GCSEvent is the payload of a GCS event.
// additional fields are documented at
// https://cloud.google.com/storage/docs/json_api/v1/objects#resource
type GCSEvent struct {
	Bucket string `json:"bucket"`
	Name   string `json:"name"`
}

// blurOffensiveImages blurs offensive images uploaded to GCS.
func blurOffensiveImages(ctx context.Context, e cloudevents.Event) error {
	outputBucket := os.Getenv("BLURRED_BUCKET_NAME")
	if outputBucket == "" {
		return errors.New("environment variable BLURRED_BUCKET_NAME must be set")
	}

	gcsEvent := &GCSEvent{}
	if err := e.DataAs(gcsEvent); err != nil {
		return fmt.Errorf("e.DataAs: failed to decode event data: %v", err)
	}
	img := vision.NewImageFromURI(fmt.Sprintf("gs://%s/%s", gcsEvent.Bucket, gcsEvent.Name))

	resp, err := visionClient.DetectSafeSearch(ctx, img, nil)
	if err != nil {
		return fmt.Errorf("visionClient.DetectSafeSearch: %v", err)
	}

	if resp.GetAdult() == visionpb.Likelihood_VERY_LIKELY ||
		resp.GetViolence() == visionpb.Likelihood_VERY_LIKELY {
		return blur(ctx, gcsEvent.Bucket, outputBucket, gcsEvent.Name)
	}
	log.Printf("The image %q was detected as OK.", gcsEvent.Name)
	return nil
}

Java

@Override
// Blurs uploaded images that are flagged as Adult or Violence.
public void accept(CloudEvent event) {
  // Extract the GCS Event data from the CloudEvent's data payload.
  GcsEvent data = getEventData(event);
  // Validate parameters
  if (data.getBucket() == null || data.getName() == null) {
    logger.severe("Error: Malformed GCS event.");
    return;
  }

  BlobInfo blobInfo = BlobInfo.newBuilder(data.getBucket(), data.getName()).build();

  // Construct URI to GCS bucket and file.
  String gcsPath = String.format("gs://%s/%s", data.getBucket(), data.getName());
  logger.info(String.format("Analyzing %s", data.getName()));

  // Construct request.
  ImageSource imgSource = ImageSource.newBuilder().setImageUri(gcsPath).build();
  Image img = Image.newBuilder().setSource(imgSource).build();
  Feature feature = Feature.newBuilder().setType(Type.SAFE_SEARCH_DETECTION).build();
  AnnotateImageRequest request =
      AnnotateImageRequest.newBuilder().addFeatures(feature).setImage(img).build();
  List<AnnotateImageRequest> requests = List.of(request);

  // Send request to the Vision API.
  try (ImageAnnotatorClient client = ImageAnnotatorClient.create()) {
    BatchAnnotateImagesResponse response = client.batchAnnotateImages(requests);
    List<AnnotateImageResponse> responses = response.getResponsesList();
    for (AnnotateImageResponse res : responses) {
      if (res.hasError()) {
        logger.info(String.format("Error: %s", res.getError().getMessage()));
        return;
      }
      // Get Safe Search Annotations
      SafeSearchAnnotation annotation = res.getSafeSearchAnnotation();
      if (annotation.getAdultValue() == 5 || annotation.getViolenceValue() == 5) {
        logger.info(String.format("Detected %s as inappropriate.", data.getName()));
        blur(blobInfo);
      } else {
        logger.info(String.format("Detected %s as OK.", data.getName()));
      }
    }
  } catch (IOException e) {
    logger.log(Level.SEVERE, "Error with Vision API: " + e.getMessage(), e);
  }
}

Node.js

// Blurs uploaded images that are flagged as Adult or Violence.
exports.blurOffensiveImages = async event => {
  // This event represents the triggering Cloud Storage object.
  const object = event;

  const file = storage.bucket(object.bucket).file(object.name);
  const filePath = `gs://${object.bucket}/${object.name}`;

  console.log(`Analyzing ${file.name}.`);

  try {
    const [result] = await client.safeSearchDetection(filePath);
    const detections = result.safeSearchAnnotation || {};

    if (
      // Levels are defined in https://cloud.google.com/vision/docs/reference/rest/v1/AnnotateImageResponse#likelihood
      detections.adult === 'VERY_LIKELY' ||
      detections.violence === 'VERY_LIKELY'
    ) {
      console.log(`Detected ${file.name} as inappropriate.`);
      return await blurImage(file, BLURRED_BUCKET_NAME);
    } else {
      console.log(`Detected ${file.name} as OK.`);
    }
  } catch (err) {
    console.error(`Failed to analyze ${file.name}.`, err);
    throw err;
  }
};

PHP

function blurOffensiveImages(CloudEvent $cloudevent): void
{
    $log = fopen(getenv('LOGGER_OUTPUT') ?: 'php://stderr', 'wb');

    $storage = new StorageClient();
    $data = $cloudevent->getData();

    $file = $storage->bucket($data['bucket'])->object($data['name']);
    $filePath = 'gs://' . $data['bucket'] . '/' . $data['name'];
    fwrite($log, 'Analyzing ' . $filePath . PHP_EOL);

    $annotator = new ImageAnnotatorClient();
    $storage = new StorageClient();

    try {
        $response = $annotator->safeSearchDetection($filePath);

        // Handle error
        if ($response->hasError()) {
            $code = Code::name($response->getError()->getCode());
            $message = $response->getError()->getMessage();
            fwrite($log, sprintf('%s: %s' . PHP_EOL, $code, $message));
            return;
        }

        $annotation = $response->getSafeSearchAnnotation();

        $isInappropriate =
            $annotation->getAdult() === Likelihood::VERY_LIKELY ||
            $annotation->getViolence() === Likelihood::VERY_LIKELY;

        if ($isInappropriate) {
            fwrite($log, 'Detected ' . $data['name'] . ' as inappropriate.' . PHP_EOL);
            $blurredBucketName = getenv('BLURRED_BUCKET_NAME');

            blurImage($log, $file, $blurredBucketName);
        } else {
            fwrite($log, 'Detected ' . $data['name'] . ' as OK.' . PHP_EOL);
        }
    } catch (Exception $e) {
        fwrite($log, 'Failed to analyze ' . $data['name'] . PHP_EOL);
        fwrite($log, $e->getMessage() . PHP_EOL);
    }
}

Python

# Blurs uploaded images that are flagged as Adult or Violent imagery.
@functions_framework.cloud_event
def blur_offensive_images(cloud_event):
    file_data = cloud_event.data

    file_name = file_data["name"]
    bucket_name = file_data["bucket"]

    blob = storage_client.bucket(bucket_name).get_blob(file_name)
    blob_uri = f"gs://{bucket_name}/{file_name}"
    blob_source = vision.Image(source=vision.ImageSource(gcs_image_uri=blob_uri))

    # Ignore already-blurred files
    if file_name.startswith("blurred-"):
        print(f"The image {file_name} is already blurred.")
        return

    print(f"Analyzing {file_name}.")

    result = vision_client.safe_search_detection(image=blob_source)
    detected = result.safe_search_annotation

    # Process image
    # 5 maps to VERY_LIKELY
    if detected.adult == 5 or detected.violence == 5:
        print(f"The image {file_name} was detected as inappropriate.")
        return __blur_image(blob)
    else:
        print(f"The image {file_name} was detected as OK.")

Ruby

# Blurs uploaded images that are flagged as Adult or Violence.
FunctionsFramework.cloud_event "blur_offensive_images" do |event|
  # Event-triggered Ruby functions receive a CloudEvents::Event::V1 object.
  # See https://cloudevents.github.io/sdk-ruby/latest/CloudEvents/Event/V1.html
  # The storage event payload can be obtained from the event data.
  payload = event.data
  file_name = payload["name"]
  bucket_name = payload["bucket"]

  # Ignore already-blurred files
  if file_name.start_with? "blurred-"
    logger.info "The image #{file_name} is already blurred."
    return
  end

  # Get image annotations from the Vision service
  logger.info "Analyzing #{file_name}."
  gs_uri = "gs://#{bucket_name}/#{file_name}"
  result = global(:vision_client).safe_search_detection image: gs_uri
  annotation = result.responses.first.safe_search_annotation

  # Respond to annotations by possibly blurring the image
  if annotation.adult == :VERY_LIKELY || annotation.violence == :VERY_LIKELY
    logger.info "The image #{file_name} was detected as inappropriate."
    blur_image bucket_name, file_name
  else
    logger.info "The image #{file_name} was detected as OK."
  end
end

Nächste Schritte

Informationen zum Suchen und Filtern von Codebeispielen für andere Google Cloud-Produkte finden Sie im Google Cloud-Beispielbrowser.