Class ClassificationProto.ClassificationEvaluationMetrics.ConfidenceMetricsEntry.Builder (2.53.0)

public static final class ClassificationProto.ClassificationEvaluationMetrics.ConfidenceMetricsEntry.Builder extends GeneratedMessageV3.Builder<ClassificationProto.ClassificationEvaluationMetrics.ConfidenceMetricsEntry.Builder> implements ClassificationProto.ClassificationEvaluationMetrics.ConfidenceMetricsEntryOrBuilder

Metrics for a single confidence threshold.

Protobuf type google.cloud.automl.v1beta1.ClassificationEvaluationMetrics.ConfidenceMetricsEntry

Inheritance

Object > AbstractMessageLite.Builder<MessageType,BuilderType> > AbstractMessage.Builder<BuilderType> > GeneratedMessageV3.Builder > ClassificationProto.ClassificationEvaluationMetrics.ConfidenceMetricsEntry.Builder
com.google.protobuf.GeneratedMessageV3.Builder.getUnknownFieldSetBuilder()
com.google.protobuf.GeneratedMessageV3.Builder.internalGetMapFieldReflection(int)
com.google.protobuf.GeneratedMessageV3.Builder.internalGetMutableMapFieldReflection(int)
com.google.protobuf.GeneratedMessageV3.Builder.mergeUnknownLengthDelimitedField(int,com.google.protobuf.ByteString)
com.google.protobuf.GeneratedMessageV3.Builder.mergeUnknownVarintField(int,int)
com.google.protobuf.GeneratedMessageV3.Builder.parseUnknownField(com.google.protobuf.CodedInputStream,com.google.protobuf.ExtensionRegistryLite,int)
com.google.protobuf.GeneratedMessageV3.Builder.setUnknownFieldSetBuilder(com.google.protobuf.UnknownFieldSet.Builder)

Static Methods

getDescriptor()

public static final Descriptors.Descriptor getDescriptor()
Returns
Type Description
Descriptor

Methods

addRepeatedField(Descriptors.FieldDescriptor field, Object value)

public ClassificationProto.ClassificationEvaluationMetrics.ConfidenceMetricsEntry.Builder addRepeatedField(Descriptors.FieldDescriptor field, Object value)
Parameters
Name Description
field FieldDescriptor
value Object
Overrides

build()

public ClassificationProto.ClassificationEvaluationMetrics.ConfidenceMetricsEntry build()

buildPartial()

public ClassificationProto.ClassificationEvaluationMetrics.ConfidenceMetricsEntry buildPartial()

clear()

public ClassificationProto.ClassificationEvaluationMetrics.ConfidenceMetricsEntry.Builder clear()
Overrides

clearConfidenceThreshold()

public ClassificationProto.ClassificationEvaluationMetrics.ConfidenceMetricsEntry.Builder clearConfidenceThreshold()

Output only. Metrics are computed with an assumption that the model never returns predictions with score lower than this value.

float confidence_threshold = 1;

clearF1Score()

public ClassificationProto.ClassificationEvaluationMetrics.ConfidenceMetricsEntry.Builder clearF1Score()

Output only. The harmonic mean of recall and precision.

float f1_score = 4;

clearF1ScoreAt1()

public ClassificationProto.ClassificationEvaluationMetrics.ConfidenceMetricsEntry.Builder clearF1ScoreAt1()

Output only. The harmonic mean of recall_at1 and precision_at1.

float f1_score_at1 = 7;

clearFalseNegativeCount()

public ClassificationProto.ClassificationEvaluationMetrics.ConfidenceMetricsEntry.Builder clearFalseNegativeCount()

Output only. The number of ground truth labels that are not matched by a model created label.

int64 false_negative_count = 12;

clearFalsePositiveCount()

public ClassificationProto.ClassificationEvaluationMetrics.ConfidenceMetricsEntry.Builder clearFalsePositiveCount()

Output only. The number of model created labels that do not match a ground truth label.

int64 false_positive_count = 11;

clearFalsePositiveRate()

public ClassificationProto.ClassificationEvaluationMetrics.ConfidenceMetricsEntry.Builder clearFalsePositiveRate()

Output only. False Positive Rate for the given confidence threshold.

float false_positive_rate = 8;

clearFalsePositiveRateAt1()

public ClassificationProto.ClassificationEvaluationMetrics.ConfidenceMetricsEntry.Builder clearFalsePositiveRateAt1()

Output only. The False Positive Rate when only considering the label that has the highest prediction score and not below the confidence threshold for each example.

float false_positive_rate_at1 = 9;

clearField(Descriptors.FieldDescriptor field)

public ClassificationProto.ClassificationEvaluationMetrics.ConfidenceMetricsEntry.Builder clearField(Descriptors.FieldDescriptor field)
Parameter
Name Description
field FieldDescriptor
Overrides

clearOneof(Descriptors.OneofDescriptor oneof)

public ClassificationProto.ClassificationEvaluationMetrics.ConfidenceMetricsEntry.Builder clearOneof(Descriptors.OneofDescriptor oneof)
Parameter
Name Description
oneof OneofDescriptor
Overrides

clearPositionThreshold()

public ClassificationProto.ClassificationEvaluationMetrics.ConfidenceMetricsEntry.Builder clearPositionThreshold()

Output only. Metrics are computed with an assumption that the model always returns at most this many predictions (ordered by their score, descendingly), but they all still need to meet the confidence_threshold.

int32 position_threshold = 14;

clearPrecision()

public ClassificationProto.ClassificationEvaluationMetrics.ConfidenceMetricsEntry.Builder clearPrecision()

Output only. Precision for the given confidence threshold.

float precision = 3;

clearPrecisionAt1()

public ClassificationProto.ClassificationEvaluationMetrics.ConfidenceMetricsEntry.Builder clearPrecisionAt1()

Output only. The precision when only considering the label that has the highest prediction score and not below the confidence threshold for each example.

float precision_at1 = 6;

clearRecall()

public ClassificationProto.ClassificationEvaluationMetrics.ConfidenceMetricsEntry.Builder clearRecall()

Output only. Recall (True Positive Rate) for the given confidence threshold.

float recall = 2;

clearRecallAt1()

public ClassificationProto.ClassificationEvaluationMetrics.ConfidenceMetricsEntry.Builder clearRecallAt1()

Output only. The Recall (True Positive Rate) when only considering the label that has the highest prediction score and not below the confidence threshold for each example.

float recall_at1 = 5;

clearTrueNegativeCount()

public ClassificationProto.ClassificationEvaluationMetrics.ConfidenceMetricsEntry.Builder clearTrueNegativeCount()

Output only. The number of labels that were not created by the model, but if they would, they would not match a ground truth label.

int64 true_negative_count = 13;

clearTruePositiveCount()

public ClassificationProto.ClassificationEvaluationMetrics.ConfidenceMetricsEntry.Builder clearTruePositiveCount()

Output only. The number of model created labels that match a ground truth label.

int64 true_positive_count = 10;

clone()

public ClassificationProto.ClassificationEvaluationMetrics.ConfidenceMetricsEntry.Builder clone()
Overrides

getConfidenceThreshold()

public float getConfidenceThreshold()

Output only. Metrics are computed with an assumption that the model never returns predictions with score lower than this value.

float confidence_threshold = 1;

Returns
Type Description
float

The confidenceThreshold.

getDefaultInstanceForType()

public ClassificationProto.ClassificationEvaluationMetrics.ConfidenceMetricsEntry getDefaultInstanceForType()

getDescriptorForType()

public Descriptors.Descriptor getDescriptorForType()
Returns
Type Description
Descriptor
Overrides

getF1Score()

public float getF1Score()

Output only. The harmonic mean of recall and precision.

float f1_score = 4;

Returns
Type Description
float

The f1Score.

getF1ScoreAt1()

public float getF1ScoreAt1()

Output only. The harmonic mean of recall_at1 and precision_at1.

float f1_score_at1 = 7;

Returns
Type Description
float

The f1ScoreAt1.

getFalseNegativeCount()

public long getFalseNegativeCount()

Output only. The number of ground truth labels that are not matched by a model created label.

int64 false_negative_count = 12;

Returns
Type Description
long

The falseNegativeCount.

getFalsePositiveCount()

public long getFalsePositiveCount()

Output only. The number of model created labels that do not match a ground truth label.

int64 false_positive_count = 11;

Returns
Type Description
long

The falsePositiveCount.

getFalsePositiveRate()

public float getFalsePositiveRate()

Output only. False Positive Rate for the given confidence threshold.

float false_positive_rate = 8;

Returns
Type Description
float

The falsePositiveRate.

getFalsePositiveRateAt1()

public float getFalsePositiveRateAt1()

Output only. The False Positive Rate when only considering the label that has the highest prediction score and not below the confidence threshold for each example.

float false_positive_rate_at1 = 9;

Returns
Type Description
float

The falsePositiveRateAt1.

getPositionThreshold()

public int getPositionThreshold()

Output only. Metrics are computed with an assumption that the model always returns at most this many predictions (ordered by their score, descendingly), but they all still need to meet the confidence_threshold.

int32 position_threshold = 14;

Returns
Type Description
int

The positionThreshold.

getPrecision()

public float getPrecision()

Output only. Precision for the given confidence threshold.

float precision = 3;

Returns
Type Description
float

The precision.

getPrecisionAt1()

public float getPrecisionAt1()

Output only. The precision when only considering the label that has the highest prediction score and not below the confidence threshold for each example.

float precision_at1 = 6;

Returns
Type Description
float

The precisionAt1.

getRecall()

public float getRecall()

Output only. Recall (True Positive Rate) for the given confidence threshold.

float recall = 2;

Returns
Type Description
float

The recall.

getRecallAt1()

public float getRecallAt1()

Output only. The Recall (True Positive Rate) when only considering the label that has the highest prediction score and not below the confidence threshold for each example.

float recall_at1 = 5;

Returns
Type Description
float

The recallAt1.

getTrueNegativeCount()

public long getTrueNegativeCount()

Output only. The number of labels that were not created by the model, but if they would, they would not match a ground truth label.

int64 true_negative_count = 13;

Returns
Type Description
long

The trueNegativeCount.

getTruePositiveCount()

public long getTruePositiveCount()

Output only. The number of model created labels that match a ground truth label.

int64 true_positive_count = 10;

Returns
Type Description
long

The truePositiveCount.

internalGetFieldAccessorTable()

protected GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable()
Returns
Type Description
FieldAccessorTable
Overrides

isInitialized()

public final boolean isInitialized()
Returns
Type Description
boolean
Overrides

mergeFrom(ClassificationProto.ClassificationEvaluationMetrics.ConfidenceMetricsEntry other)

public ClassificationProto.ClassificationEvaluationMetrics.ConfidenceMetricsEntry.Builder mergeFrom(ClassificationProto.ClassificationEvaluationMetrics.ConfidenceMetricsEntry other)

mergeFrom(CodedInputStream input, ExtensionRegistryLite extensionRegistry)

public ClassificationProto.ClassificationEvaluationMetrics.ConfidenceMetricsEntry.Builder mergeFrom(CodedInputStream input, ExtensionRegistryLite extensionRegistry)
Parameters
Name Description
input CodedInputStream
extensionRegistry ExtensionRegistryLite
Overrides
Exceptions
Type Description
IOException

mergeFrom(Message other)

public ClassificationProto.ClassificationEvaluationMetrics.ConfidenceMetricsEntry.Builder mergeFrom(Message other)
Parameter
Name Description
other Message
Overrides

mergeUnknownFields(UnknownFieldSet unknownFields)

public final ClassificationProto.ClassificationEvaluationMetrics.ConfidenceMetricsEntry.Builder mergeUnknownFields(UnknownFieldSet unknownFields)
Parameter
Name Description
unknownFields UnknownFieldSet
Overrides

setConfidenceThreshold(float value)

public ClassificationProto.ClassificationEvaluationMetrics.ConfidenceMetricsEntry.Builder setConfidenceThreshold(float value)

Output only. Metrics are computed with an assumption that the model never returns predictions with score lower than this value.

float confidence_threshold = 1;

Parameter
Name Description
value float

The confidenceThreshold to set.

setF1Score(float value)

public ClassificationProto.ClassificationEvaluationMetrics.ConfidenceMetricsEntry.Builder setF1Score(float value)

Output only. The harmonic mean of recall and precision.

float f1_score = 4;

Parameter
Name Description
value float

The f1Score to set.

setF1ScoreAt1(float value)

public ClassificationProto.ClassificationEvaluationMetrics.ConfidenceMetricsEntry.Builder setF1ScoreAt1(float value)

Output only. The harmonic mean of recall_at1 and precision_at1.

float f1_score_at1 = 7;

Parameter
Name Description
value float

The f1ScoreAt1 to set.

setFalseNegativeCount(long value)

public ClassificationProto.ClassificationEvaluationMetrics.ConfidenceMetricsEntry.Builder setFalseNegativeCount(long value)

Output only. The number of ground truth labels that are not matched by a model created label.

int64 false_negative_count = 12;

Parameter
Name Description
value long

The falseNegativeCount to set.

setFalsePositiveCount(long value)

public ClassificationProto.ClassificationEvaluationMetrics.ConfidenceMetricsEntry.Builder setFalsePositiveCount(long value)

Output only. The number of model created labels that do not match a ground truth label.

int64 false_positive_count = 11;

Parameter
Name Description
value long

The falsePositiveCount to set.

setFalsePositiveRate(float value)

public ClassificationProto.ClassificationEvaluationMetrics.ConfidenceMetricsEntry.Builder setFalsePositiveRate(float value)

Output only. False Positive Rate for the given confidence threshold.

float false_positive_rate = 8;

Parameter
Name Description
value float

The falsePositiveRate to set.

setFalsePositiveRateAt1(float value)

public ClassificationProto.ClassificationEvaluationMetrics.ConfidenceMetricsEntry.Builder setFalsePositiveRateAt1(float value)

Output only. The False Positive Rate when only considering the label that has the highest prediction score and not below the confidence threshold for each example.

float false_positive_rate_at1 = 9;

Parameter
Name Description
value float

The falsePositiveRateAt1 to set.

setField(Descriptors.FieldDescriptor field, Object value)

public ClassificationProto.ClassificationEvaluationMetrics.ConfidenceMetricsEntry.Builder setField(Descriptors.FieldDescriptor field, Object value)
Parameters
Name Description
field FieldDescriptor
value Object
Overrides

setPositionThreshold(int value)

public ClassificationProto.ClassificationEvaluationMetrics.ConfidenceMetricsEntry.Builder setPositionThreshold(int value)

Output only. Metrics are computed with an assumption that the model always returns at most this many predictions (ordered by their score, descendingly), but they all still need to meet the confidence_threshold.

int32 position_threshold = 14;

Parameter
Name Description
value int

The positionThreshold to set.

setPrecision(float value)

public ClassificationProto.ClassificationEvaluationMetrics.ConfidenceMetricsEntry.Builder setPrecision(float value)

Output only. Precision for the given confidence threshold.

float precision = 3;

Parameter
Name Description
value float

The precision to set.

setPrecisionAt1(float value)

public ClassificationProto.ClassificationEvaluationMetrics.ConfidenceMetricsEntry.Builder setPrecisionAt1(float value)

Output only. The precision when only considering the label that has the highest prediction score and not below the confidence threshold for each example.

float precision_at1 = 6;

Parameter
Name Description
value float

The precisionAt1 to set.

setRecall(float value)

public ClassificationProto.ClassificationEvaluationMetrics.ConfidenceMetricsEntry.Builder setRecall(float value)

Output only. Recall (True Positive Rate) for the given confidence threshold.

float recall = 2;

Parameter
Name Description
value float

The recall to set.

setRecallAt1(float value)

public ClassificationProto.ClassificationEvaluationMetrics.ConfidenceMetricsEntry.Builder setRecallAt1(float value)

Output only. The Recall (True Positive Rate) when only considering the label that has the highest prediction score and not below the confidence threshold for each example.

float recall_at1 = 5;

Parameter
Name Description
value float

The recallAt1 to set.

setRepeatedField(Descriptors.FieldDescriptor field, int index, Object value)

public ClassificationProto.ClassificationEvaluationMetrics.ConfidenceMetricsEntry.Builder setRepeatedField(Descriptors.FieldDescriptor field, int index, Object value)
Parameters
Name Description
field FieldDescriptor
index int
value Object
Overrides

setTrueNegativeCount(long value)

public ClassificationProto.ClassificationEvaluationMetrics.ConfidenceMetricsEntry.Builder setTrueNegativeCount(long value)

Output only. The number of labels that were not created by the model, but if they would, they would not match a ground truth label.

int64 true_negative_count = 13;

Parameter
Name Description
value long

The trueNegativeCount to set.

setTruePositiveCount(long value)

public ClassificationProto.ClassificationEvaluationMetrics.ConfidenceMetricsEntry.Builder setTruePositiveCount(long value)

Output only. The number of model created labels that match a ground truth label.

int64 true_positive_count = 10;

Parameter
Name Description
value long

The truePositiveCount to set.

setUnknownFields(UnknownFieldSet unknownFields)

public final ClassificationProto.ClassificationEvaluationMetrics.ConfidenceMetricsEntry.Builder setUnknownFields(UnknownFieldSet unknownFields)
Parameter
Name Description
unknownFields UnknownFieldSet
Overrides