Class LlmModelSettings.Parameters (0.89.0)

public static final class LlmModelSettings.Parameters extends GeneratedMessageV3 implements LlmModelSettings.ParametersOrBuilder

Generative model parameters to control the model behavior.

Protobuf type google.cloud.dialogflow.cx.v3beta1.LlmModelSettings.Parameters

Static Fields

INPUT_TOKEN_LIMIT_FIELD_NUMBER

public static final int INPUT_TOKEN_LIMIT_FIELD_NUMBER
Field Value
Type Description
int

OUTPUT_TOKEN_LIMIT_FIELD_NUMBER

public static final int OUTPUT_TOKEN_LIMIT_FIELD_NUMBER
Field Value
Type Description
int

TEMPERATURE_FIELD_NUMBER

public static final int TEMPERATURE_FIELD_NUMBER
Field Value
Type Description
int

Static Methods

getDefaultInstance()

public static LlmModelSettings.Parameters getDefaultInstance()
Returns
Type Description
LlmModelSettings.Parameters

getDescriptor()

public static final Descriptors.Descriptor getDescriptor()
Returns
Type Description
Descriptor

newBuilder()

public static LlmModelSettings.Parameters.Builder newBuilder()
Returns
Type Description
LlmModelSettings.Parameters.Builder

newBuilder(LlmModelSettings.Parameters prototype)

public static LlmModelSettings.Parameters.Builder newBuilder(LlmModelSettings.Parameters prototype)
Parameter
Name Description
prototype LlmModelSettings.Parameters
Returns
Type Description
LlmModelSettings.Parameters.Builder

parseDelimitedFrom(InputStream input)

public static LlmModelSettings.Parameters parseDelimitedFrom(InputStream input)
Parameter
Name Description
input InputStream
Returns
Type Description
LlmModelSettings.Parameters
Exceptions
Type Description
IOException

parseDelimitedFrom(InputStream input, ExtensionRegistryLite extensionRegistry)

public static LlmModelSettings.Parameters parseDelimitedFrom(InputStream input, ExtensionRegistryLite extensionRegistry)
Parameters
Name Description
input InputStream
extensionRegistry ExtensionRegistryLite
Returns
Type Description
LlmModelSettings.Parameters
Exceptions
Type Description
IOException

parseFrom(byte[] data)

public static LlmModelSettings.Parameters parseFrom(byte[] data)
Parameter
Name Description
data byte[]
Returns
Type Description
LlmModelSettings.Parameters
Exceptions
Type Description
InvalidProtocolBufferException

parseFrom(byte[] data, ExtensionRegistryLite extensionRegistry)

public static LlmModelSettings.Parameters parseFrom(byte[] data, ExtensionRegistryLite extensionRegistry)
Parameters
Name Description
data byte[]
extensionRegistry ExtensionRegistryLite
Returns
Type Description
LlmModelSettings.Parameters
Exceptions
Type Description
InvalidProtocolBufferException

parseFrom(ByteString data)

public static LlmModelSettings.Parameters parseFrom(ByteString data)
Parameter
Name Description
data ByteString
Returns
Type Description
LlmModelSettings.Parameters
Exceptions
Type Description
InvalidProtocolBufferException

parseFrom(ByteString data, ExtensionRegistryLite extensionRegistry)

public static LlmModelSettings.Parameters parseFrom(ByteString data, ExtensionRegistryLite extensionRegistry)
Parameters
Name Description
data ByteString
extensionRegistry ExtensionRegistryLite
Returns
Type Description
LlmModelSettings.Parameters
Exceptions
Type Description
InvalidProtocolBufferException

parseFrom(CodedInputStream input)

public static LlmModelSettings.Parameters parseFrom(CodedInputStream input)
Parameter
Name Description
input CodedInputStream
Returns
Type Description
LlmModelSettings.Parameters
Exceptions
Type Description
IOException

parseFrom(CodedInputStream input, ExtensionRegistryLite extensionRegistry)

public static LlmModelSettings.Parameters parseFrom(CodedInputStream input, ExtensionRegistryLite extensionRegistry)
Parameters
Name Description
input CodedInputStream
extensionRegistry ExtensionRegistryLite
Returns
Type Description
LlmModelSettings.Parameters
Exceptions
Type Description
IOException

parseFrom(InputStream input)

public static LlmModelSettings.Parameters parseFrom(InputStream input)
Parameter
Name Description
input InputStream
Returns
Type Description
LlmModelSettings.Parameters
Exceptions
Type Description
IOException

parseFrom(InputStream input, ExtensionRegistryLite extensionRegistry)

public static LlmModelSettings.Parameters parseFrom(InputStream input, ExtensionRegistryLite extensionRegistry)
Parameters
Name Description
input InputStream
extensionRegistry ExtensionRegistryLite
Returns
Type Description
LlmModelSettings.Parameters
Exceptions
Type Description
IOException

parseFrom(ByteBuffer data)

public static LlmModelSettings.Parameters parseFrom(ByteBuffer data)
Parameter
Name Description
data ByteBuffer
Returns
Type Description
LlmModelSettings.Parameters
Exceptions
Type Description
InvalidProtocolBufferException

parseFrom(ByteBuffer data, ExtensionRegistryLite extensionRegistry)

public static LlmModelSettings.Parameters parseFrom(ByteBuffer data, ExtensionRegistryLite extensionRegistry)
Parameters
Name Description
data ByteBuffer
extensionRegistry ExtensionRegistryLite
Returns
Type Description
LlmModelSettings.Parameters
Exceptions
Type Description
InvalidProtocolBufferException

parser()

public static Parser<LlmModelSettings.Parameters> parser()
Returns
Type Description
Parser<Parameters>

Methods

equals(Object obj)

public boolean equals(Object obj)
Parameter
Name Description
obj Object
Returns
Type Description
boolean
Overrides

getDefaultInstanceForType()

public LlmModelSettings.Parameters getDefaultInstanceForType()
Returns
Type Description
LlmModelSettings.Parameters

getInputTokenLimit()

public LlmModelSettings.Parameters.InputTokenLimit getInputTokenLimit()

The input token limit. This setting is currently only supported by playbooks.

optional .google.cloud.dialogflow.cx.v3beta1.LlmModelSettings.Parameters.InputTokenLimit input_token_limit = 2;

Returns
Type Description
LlmModelSettings.Parameters.InputTokenLimit

The inputTokenLimit.

getInputTokenLimitValue()

public int getInputTokenLimitValue()

The input token limit. This setting is currently only supported by playbooks.

optional .google.cloud.dialogflow.cx.v3beta1.LlmModelSettings.Parameters.InputTokenLimit input_token_limit = 2;

Returns
Type Description
int

The enum numeric value on the wire for inputTokenLimit.

getOutputTokenLimit()

public LlmModelSettings.Parameters.OutputTokenLimit getOutputTokenLimit()

The output token limit. This setting is currently only supported by playbooks. Only one of output_token_limit and max_output_tokens is allowed to be set.

optional .google.cloud.dialogflow.cx.v3beta1.LlmModelSettings.Parameters.OutputTokenLimit output_token_limit = 3;

Returns
Type Description
LlmModelSettings.Parameters.OutputTokenLimit

The outputTokenLimit.

getOutputTokenLimitValue()

public int getOutputTokenLimitValue()

The output token limit. This setting is currently only supported by playbooks. Only one of output_token_limit and max_output_tokens is allowed to be set.

optional .google.cloud.dialogflow.cx.v3beta1.LlmModelSettings.Parameters.OutputTokenLimit output_token_limit = 3;

Returns
Type Description
int

The enum numeric value on the wire for outputTokenLimit.

getParserForType()

public Parser<LlmModelSettings.Parameters> getParserForType()
Returns
Type Description
Parser<Parameters>
Overrides

getSerializedSize()

public int getSerializedSize()
Returns
Type Description
int
Overrides

getTemperature()

public float getTemperature()

The temperature used for sampling during response generation. Value ranges from 0 to 1. Temperature controls the degree of randomness in token selection. Lower temperature means less randomness, while higher temperature means more randomness. Valid range: [0.0, 1.0]

optional float temperature = 1;

Returns
Type Description
float

The temperature.

hasInputTokenLimit()

public boolean hasInputTokenLimit()

The input token limit. This setting is currently only supported by playbooks.

optional .google.cloud.dialogflow.cx.v3beta1.LlmModelSettings.Parameters.InputTokenLimit input_token_limit = 2;

Returns
Type Description
boolean

Whether the inputTokenLimit field is set.

hasOutputTokenLimit()

public boolean hasOutputTokenLimit()

The output token limit. This setting is currently only supported by playbooks. Only one of output_token_limit and max_output_tokens is allowed to be set.

optional .google.cloud.dialogflow.cx.v3beta1.LlmModelSettings.Parameters.OutputTokenLimit output_token_limit = 3;

Returns
Type Description
boolean

Whether the outputTokenLimit field is set.

hasTemperature()

public boolean hasTemperature()

The temperature used for sampling during response generation. Value ranges from 0 to 1. Temperature controls the degree of randomness in token selection. Lower temperature means less randomness, while higher temperature means more randomness. Valid range: [0.0, 1.0]

optional float temperature = 1;

Returns
Type Description
boolean

Whether the temperature field is set.

hashCode()

public int hashCode()
Returns
Type Description
int
Overrides

internalGetFieldAccessorTable()

protected GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable()
Returns
Type Description
FieldAccessorTable
Overrides

isInitialized()

public final boolean isInitialized()
Returns
Type Description
boolean
Overrides

newBuilderForType()

public LlmModelSettings.Parameters.Builder newBuilderForType()
Returns
Type Description
LlmModelSettings.Parameters.Builder

newBuilderForType(GeneratedMessageV3.BuilderParent parent)

protected LlmModelSettings.Parameters.Builder newBuilderForType(GeneratedMessageV3.BuilderParent parent)
Parameter
Name Description
parent BuilderParent
Returns
Type Description
LlmModelSettings.Parameters.Builder
Overrides

newInstance(GeneratedMessageV3.UnusedPrivateParameter unused)

protected Object newInstance(GeneratedMessageV3.UnusedPrivateParameter unused)
Parameter
Name Description
unused UnusedPrivateParameter
Returns
Type Description
Object
Overrides

toBuilder()

public LlmModelSettings.Parameters.Builder toBuilder()
Returns
Type Description
LlmModelSettings.Parameters.Builder

writeTo(CodedOutputStream output)

public void writeTo(CodedOutputStream output)
Parameter
Name Description
output CodedOutputStream
Overrides
Exceptions
Type Description
IOException