Analyzing sentiment

After your model has been successfully trained using the AutoML API, you call the predict method to analyze content using the model.

Web UI

  1. Open the AutoML Natural Language Sentiment Analysis UI, select the Launch app link in the AutoML Sentiment Analysis box, and click the lightbulb icon in the left navigation bar to display the available models.

    To view the models for a different project, select the project from the drop-down list in the upper right of the title bar.

  2. Click the row for the model you want to use to evaluate sentiment.

  3. Click the Predict tab just below the title bar.

  4. Enter the content you want to analyze the sentiment for into the text box and click Predict.

Command-line

Note: The sample command below uses the gcloud auth application-default print-access-token command to obtain a valid authorization token for the request. Make sure that you have set the GOOGLE_APPLICATION_CREDENTIALS environment variable before you execute this command.

  • Replace model-name with the full name of your model, from the response when you created the model. The full name has the format: projects/{project-id}/locations/us-central1/models/{model-id}
curl -X POST \
  -H "Authorization: Bearer $(gcloud auth application-default print-access-token)" \
  -H "Content-Type: application/json" \
  https://automl.googleapis.com/v1beta1/model-name:predict \
  -d '{
        "payload" : {
          "textSnippet": {
               "content": "Enjoy your vacation!",
                "mime_type": "text/plain"
           },
        }
      }'

You should see output similar to the following. The textSentiment element predicts the sentiment value using the scale of your labeled training data. The sentiment_score evaluates the sentiment on a scale from -1 (extremely negative) to +1 (extremely positive).

{
    "payload": [
        {
            "textSentiment": {
                "sentiment": 3
            }
        }
    ],
    "metadata": {
        "sentiment_score": "0.6036837"
    }
}

Java

/**
 * Demonstrates using the AutoML client to detect detect the content sentiment.
 *
 * @param projectId the Id of the project.
 * @param computeRegion the Region name. (e.g., "us-central1")
 * @param modelId the Id of the model which will be used for text sentiment.
 * @param filePath the Local text file path of the content to be predicted.
 * @throws IOException
 */
public static void predict(
    String projectId, String computeRegion, String modelId, String filePath) throws IOException {

  // Create client for prediction service.
  PredictionServiceClient predictionClient = PredictionServiceClient.create();

  // Get full path of model
  ModelName modelName = ModelName.of(projectId, computeRegion, modelId);

  // Read the file content for prediction.
  String content = new String(Files.readAllBytes(Paths.get(filePath)));

  // Set the payload by giving the content and type of the file.
  TextSnippet textSnippet =
      TextSnippet.newBuilder().setContent(content).setMimeType("text/plain").build();
  ExamplePayload payload = ExamplePayload.newBuilder().setTextSnippet(textSnippet).build();

  // params is additional domain-specific parameters.
  // currently there is no additional parameters supported.
  Map<String, String> params = new HashMap<String, String>();
  PredictResponse response = predictionClient.predict(modelName, payload, params);

  System.out.println("Prediction results:");
  for (AnnotationPayload annotationPayload : response.getPayloadList()) {
    System.out.println(
        "\tPredicted sentiment label: " + annotationPayload.getTextSentiment().getSentiment());
  }
  System.out.println(
      "\tNormalized sentiment score: " + response.getMetadataOrThrow("sentiment_score"));
}

Node.js

const automl = require(`@google-cloud/automl`);
const fs = require(`fs`);

// Create client for prediction service.
const client = new automl.v1beta1.PredictionServiceClient();

/**
 * Demonstrates using the AutoML client to detect the content sentiment.
 * TODO(developer): Uncomment the following lines before running the sample.
 */
// const projectId = '[PROJECT_ID]' e.g., "my-gcloud-project";
// const computeRegion = '[REGION_NAME]' e.g., "us-central1";
// const modelId = '[MODEL_ID]' e.g., "TST5200971474357190656";
// const filePath = '[LOCAL_FILE_PATH]' e.g., "./resource/sentimentInput.txt",
// `local text file path of content to be predicted`;

// Get the full path of the model.
const modelFullId = client.modelPath(projectId, computeRegion, modelId);

// Read the file content for prediction.
const snippet = fs.readFileSync(filePath, `utf8`);

// Set the payload by giving the content and type of the file.
const payload = {
  textSnippet: {
    content: snippet,
    mimeType: `text/plain`,
  },
};

// Params is additional domain-specific parameters.
// Currently there is no additional parameters supported.
client
  .predict({name: modelFullId, payload: payload, params: {}})
  .then(responses => {
    console.log(`Prediction results:`);
    for (const result of responses[0].payload) {
      console.log(
        `\tPredicted sentiment label: ${result.textSentiment.sentiment}`
      );
    }
    if (responses[0].metadata.sentiment_score) {
      console.log(
        `\tNormalized sentiment score: ${
          responses[0].metadata.sentiment_score
        }`
      );
    }
  })
  .catch(err => {
    console.error(err);
  });

Python

    # TODO(developer): Uncomment and set the following variables
    # project_id = '[PROJECT_ID]'
    # compute_region = '[COMPUTE_REGION]'
    # model_id = '[MODEL_ID]'
    # file_path = '/local/path/to/file'

    from google.cloud import automl_v1beta1 as automl

    automl_client = automl.AutoMlClient()

    # Create client for prediction service.
    prediction_client = automl.PredictionServiceClient()

    # Get the full path of the model.
    model_full_id = automl_client.model_path(
        project_id, compute_region, model_id
    )

    # Read the file content for prediction.
    with open(file_path, "rb") as content_file:
        snippet = content_file.read()

    # Set the payload by giving the content and type of the file.
    payload = {"text_snippet": {"content": snippet, "mime_type": "text/plain"}}

    # params is additional domain-specific parameters.
    # currently there is no additional parameters supported.
    params = {}
    response = prediction_client.predict(model_full_id, payload, params)
    print("Prediction results:")
    for result in response.payload:
        print("Predicted sentiment label: {}".format(result.text_sentiment.sentiment))
    for key, data in response.metadata.items():
        if key == 'sentiment_score':
            print("Normalized sentiment score: {}".format(data))

Was this page helpful? Let us know how we did:

Send feedback about...

AutoML Natural Language Sentiment Analysis