- 0.89.0 (latest)
- 0.87.0
- 0.86.0
- 0.85.0
- 0.84.0
- 0.83.0
- 0.81.0
- 0.79.0
- 0.78.0
- 0.75.0
- 0.74.0
- 0.73.0
- 0.71.0
- 0.70.0
- 0.69.0
- 0.68.0
- 0.67.0
- 0.66.0
- 0.65.0
- 0.64.0
- 0.63.0
- 0.62.0
- 0.60.0
- 0.59.0
- 0.58.0
- 0.57.0
- 0.56.0
- 0.55.0
- 0.54.0
- 0.53.0
- 0.52.0
- 0.51.0
- 0.50.0
- 0.48.0
- 0.47.0
- 0.46.0
- 0.45.0
- 0.44.0
- 0.43.0
- 0.42.0
- 0.41.0
- 0.40.0
- 0.39.0
- 0.38.0
- 0.35.0
- 0.34.0
- 0.33.0
- 0.32.0
- 0.31.0
- 0.30.0
- 0.29.0
- 0.28.0
- 0.27.0
- 0.26.0
- 0.25.0
- 0.24.0
- 0.23.0
- 0.22.0
- 0.20.0
- 0.19.0
- 0.18.0
- 0.17.0
- 0.16.0
- 0.15.0
- 0.14.7
- 0.13.1
- 0.12.1
- 0.11.5
public static final class LlmModelSettings.Parameters.Builder extends GeneratedMessageV3.Builder<LlmModelSettings.Parameters.Builder> implements LlmModelSettings.ParametersOrBuilderGenerative model parameters to control the model behavior.
Protobuf type google.cloud.dialogflow.cx.v3beta1.LlmModelSettings.Parameters
Inheritance
Object > AbstractMessageLite.Builder<MessageType,BuilderType> > AbstractMessage.Builder<BuilderType> > GeneratedMessageV3.Builder > LlmModelSettings.Parameters.BuilderImplements
LlmModelSettings.ParametersOrBuilderStatic Methods
getDescriptor()
public static final Descriptors.Descriptor getDescriptor()| Returns | |
|---|---|
| Type | Description |
Descriptor |
|
Methods
addRepeatedField(Descriptors.FieldDescriptor field, Object value)
public LlmModelSettings.Parameters.Builder addRepeatedField(Descriptors.FieldDescriptor field, Object value)| Parameters | |
|---|---|
| Name | Description |
field |
FieldDescriptor |
value |
Object |
| Returns | |
|---|---|
| Type | Description |
LlmModelSettings.Parameters.Builder |
|
build()
public LlmModelSettings.Parameters build()| Returns | |
|---|---|
| Type | Description |
LlmModelSettings.Parameters |
|
buildPartial()
public LlmModelSettings.Parameters buildPartial()| Returns | |
|---|---|
| Type | Description |
LlmModelSettings.Parameters |
|
clear()
public LlmModelSettings.Parameters.Builder clear()| Returns | |
|---|---|
| Type | Description |
LlmModelSettings.Parameters.Builder |
|
clearField(Descriptors.FieldDescriptor field)
public LlmModelSettings.Parameters.Builder clearField(Descriptors.FieldDescriptor field)| Parameter | |
|---|---|
| Name | Description |
field |
FieldDescriptor |
| Returns | |
|---|---|
| Type | Description |
LlmModelSettings.Parameters.Builder |
|
clearInputTokenLimit()
public LlmModelSettings.Parameters.Builder clearInputTokenLimit()The input token limit. This setting is currently only supported by playbooks.
optional .google.cloud.dialogflow.cx.v3beta1.LlmModelSettings.Parameters.InputTokenLimit input_token_limit = 2;
| Returns | |
|---|---|
| Type | Description |
LlmModelSettings.Parameters.Builder |
This builder for chaining. |
clearOneof(Descriptors.OneofDescriptor oneof)
public LlmModelSettings.Parameters.Builder clearOneof(Descriptors.OneofDescriptor oneof)| Parameter | |
|---|---|
| Name | Description |
oneof |
OneofDescriptor |
| Returns | |
|---|---|
| Type | Description |
LlmModelSettings.Parameters.Builder |
|
clearOutputTokenLimit()
public LlmModelSettings.Parameters.Builder clearOutputTokenLimit()The output token limit. This setting is currently only supported by playbooks. Only one of output_token_limit and max_output_tokens is allowed to be set.
optional .google.cloud.dialogflow.cx.v3beta1.LlmModelSettings.Parameters.OutputTokenLimit output_token_limit = 3;
| Returns | |
|---|---|
| Type | Description |
LlmModelSettings.Parameters.Builder |
This builder for chaining. |
clearTemperature()
public LlmModelSettings.Parameters.Builder clearTemperature()The temperature used for sampling during response generation. Value ranges from 0 to 1. Temperature controls the degree of randomness in token selection. Lower temperature means less randomness, while higher temperature means more randomness. Valid range: [0.0, 1.0]
optional float temperature = 1;
| Returns | |
|---|---|
| Type | Description |
LlmModelSettings.Parameters.Builder |
This builder for chaining. |
clone()
public LlmModelSettings.Parameters.Builder clone()| Returns | |
|---|---|
| Type | Description |
LlmModelSettings.Parameters.Builder |
|
getDefaultInstanceForType()
public LlmModelSettings.Parameters getDefaultInstanceForType()| Returns | |
|---|---|
| Type | Description |
LlmModelSettings.Parameters |
|
getDescriptorForType()
public Descriptors.Descriptor getDescriptorForType()| Returns | |
|---|---|
| Type | Description |
Descriptor |
|
getInputTokenLimit()
public LlmModelSettings.Parameters.InputTokenLimit getInputTokenLimit()The input token limit. This setting is currently only supported by playbooks.
optional .google.cloud.dialogflow.cx.v3beta1.LlmModelSettings.Parameters.InputTokenLimit input_token_limit = 2;
| Returns | |
|---|---|
| Type | Description |
LlmModelSettings.Parameters.InputTokenLimit |
The inputTokenLimit. |
getInputTokenLimitValue()
public int getInputTokenLimitValue()The input token limit. This setting is currently only supported by playbooks.
optional .google.cloud.dialogflow.cx.v3beta1.LlmModelSettings.Parameters.InputTokenLimit input_token_limit = 2;
| Returns | |
|---|---|
| Type | Description |
int |
The enum numeric value on the wire for inputTokenLimit. |
getOutputTokenLimit()
public LlmModelSettings.Parameters.OutputTokenLimit getOutputTokenLimit()The output token limit. This setting is currently only supported by playbooks. Only one of output_token_limit and max_output_tokens is allowed to be set.
optional .google.cloud.dialogflow.cx.v3beta1.LlmModelSettings.Parameters.OutputTokenLimit output_token_limit = 3;
| Returns | |
|---|---|
| Type | Description |
LlmModelSettings.Parameters.OutputTokenLimit |
The outputTokenLimit. |
getOutputTokenLimitValue()
public int getOutputTokenLimitValue()The output token limit. This setting is currently only supported by playbooks. Only one of output_token_limit and max_output_tokens is allowed to be set.
optional .google.cloud.dialogflow.cx.v3beta1.LlmModelSettings.Parameters.OutputTokenLimit output_token_limit = 3;
| Returns | |
|---|---|
| Type | Description |
int |
The enum numeric value on the wire for outputTokenLimit. |
getTemperature()
public float getTemperature()The temperature used for sampling during response generation. Value ranges from 0 to 1. Temperature controls the degree of randomness in token selection. Lower temperature means less randomness, while higher temperature means more randomness. Valid range: [0.0, 1.0]
optional float temperature = 1;
| Returns | |
|---|---|
| Type | Description |
float |
The temperature. |
hasInputTokenLimit()
public boolean hasInputTokenLimit()The input token limit. This setting is currently only supported by playbooks.
optional .google.cloud.dialogflow.cx.v3beta1.LlmModelSettings.Parameters.InputTokenLimit input_token_limit = 2;
| Returns | |
|---|---|
| Type | Description |
boolean |
Whether the inputTokenLimit field is set. |
hasOutputTokenLimit()
public boolean hasOutputTokenLimit()The output token limit. This setting is currently only supported by playbooks. Only one of output_token_limit and max_output_tokens is allowed to be set.
optional .google.cloud.dialogflow.cx.v3beta1.LlmModelSettings.Parameters.OutputTokenLimit output_token_limit = 3;
| Returns | |
|---|---|
| Type | Description |
boolean |
Whether the outputTokenLimit field is set. |
hasTemperature()
public boolean hasTemperature()The temperature used for sampling during response generation. Value ranges from 0 to 1. Temperature controls the degree of randomness in token selection. Lower temperature means less randomness, while higher temperature means more randomness. Valid range: [0.0, 1.0]
optional float temperature = 1;
| Returns | |
|---|---|
| Type | Description |
boolean |
Whether the temperature field is set. |
internalGetFieldAccessorTable()
protected GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable()| Returns | |
|---|---|
| Type | Description |
FieldAccessorTable |
|
isInitialized()
public final boolean isInitialized()| Returns | |
|---|---|
| Type | Description |
boolean |
|
mergeFrom(LlmModelSettings.Parameters other)
public LlmModelSettings.Parameters.Builder mergeFrom(LlmModelSettings.Parameters other)| Parameter | |
|---|---|
| Name | Description |
other |
LlmModelSettings.Parameters |
| Returns | |
|---|---|
| Type | Description |
LlmModelSettings.Parameters.Builder |
|
mergeFrom(CodedInputStream input, ExtensionRegistryLite extensionRegistry)
public LlmModelSettings.Parameters.Builder mergeFrom(CodedInputStream input, ExtensionRegistryLite extensionRegistry)| Parameters | |
|---|---|
| Name | Description |
input |
CodedInputStream |
extensionRegistry |
ExtensionRegistryLite |
| Returns | |
|---|---|
| Type | Description |
LlmModelSettings.Parameters.Builder |
|
| Exceptions | |
|---|---|
| Type | Description |
IOException |
|
mergeFrom(Message other)
public LlmModelSettings.Parameters.Builder mergeFrom(Message other)| Parameter | |
|---|---|
| Name | Description |
other |
Message |
| Returns | |
|---|---|
| Type | Description |
LlmModelSettings.Parameters.Builder |
|
mergeUnknownFields(UnknownFieldSet unknownFields)
public final LlmModelSettings.Parameters.Builder mergeUnknownFields(UnknownFieldSet unknownFields)| Parameter | |
|---|---|
| Name | Description |
unknownFields |
UnknownFieldSet |
| Returns | |
|---|---|
| Type | Description |
LlmModelSettings.Parameters.Builder |
|
setField(Descriptors.FieldDescriptor field, Object value)
public LlmModelSettings.Parameters.Builder setField(Descriptors.FieldDescriptor field, Object value)| Parameters | |
|---|---|
| Name | Description |
field |
FieldDescriptor |
value |
Object |
| Returns | |
|---|---|
| Type | Description |
LlmModelSettings.Parameters.Builder |
|
setInputTokenLimit(LlmModelSettings.Parameters.InputTokenLimit value)
public LlmModelSettings.Parameters.Builder setInputTokenLimit(LlmModelSettings.Parameters.InputTokenLimit value)The input token limit. This setting is currently only supported by playbooks.
optional .google.cloud.dialogflow.cx.v3beta1.LlmModelSettings.Parameters.InputTokenLimit input_token_limit = 2;
| Parameter | |
|---|---|
| Name | Description |
value |
LlmModelSettings.Parameters.InputTokenLimitThe inputTokenLimit to set. |
| Returns | |
|---|---|
| Type | Description |
LlmModelSettings.Parameters.Builder |
This builder for chaining. |
setInputTokenLimitValue(int value)
public LlmModelSettings.Parameters.Builder setInputTokenLimitValue(int value)The input token limit. This setting is currently only supported by playbooks.
optional .google.cloud.dialogflow.cx.v3beta1.LlmModelSettings.Parameters.InputTokenLimit input_token_limit = 2;
| Parameter | |
|---|---|
| Name | Description |
value |
intThe enum numeric value on the wire for inputTokenLimit to set. |
| Returns | |
|---|---|
| Type | Description |
LlmModelSettings.Parameters.Builder |
This builder for chaining. |
setOutputTokenLimit(LlmModelSettings.Parameters.OutputTokenLimit value)
public LlmModelSettings.Parameters.Builder setOutputTokenLimit(LlmModelSettings.Parameters.OutputTokenLimit value)The output token limit. This setting is currently only supported by playbooks. Only one of output_token_limit and max_output_tokens is allowed to be set.
optional .google.cloud.dialogflow.cx.v3beta1.LlmModelSettings.Parameters.OutputTokenLimit output_token_limit = 3;
| Parameter | |
|---|---|
| Name | Description |
value |
LlmModelSettings.Parameters.OutputTokenLimitThe outputTokenLimit to set. |
| Returns | |
|---|---|
| Type | Description |
LlmModelSettings.Parameters.Builder |
This builder for chaining. |
setOutputTokenLimitValue(int value)
public LlmModelSettings.Parameters.Builder setOutputTokenLimitValue(int value)The output token limit. This setting is currently only supported by playbooks. Only one of output_token_limit and max_output_tokens is allowed to be set.
optional .google.cloud.dialogflow.cx.v3beta1.LlmModelSettings.Parameters.OutputTokenLimit output_token_limit = 3;
| Parameter | |
|---|---|
| Name | Description |
value |
intThe enum numeric value on the wire for outputTokenLimit to set. |
| Returns | |
|---|---|
| Type | Description |
LlmModelSettings.Parameters.Builder |
This builder for chaining. |
setRepeatedField(Descriptors.FieldDescriptor field, int index, Object value)
public LlmModelSettings.Parameters.Builder setRepeatedField(Descriptors.FieldDescriptor field, int index, Object value)| Parameters | |
|---|---|
| Name | Description |
field |
FieldDescriptor |
index |
int |
value |
Object |
| Returns | |
|---|---|
| Type | Description |
LlmModelSettings.Parameters.Builder |
|
setTemperature(float value)
public LlmModelSettings.Parameters.Builder setTemperature(float value)The temperature used for sampling during response generation. Value ranges from 0 to 1. Temperature controls the degree of randomness in token selection. Lower temperature means less randomness, while higher temperature means more randomness. Valid range: [0.0, 1.0]
optional float temperature = 1;
| Parameter | |
|---|---|
| Name | Description |
value |
floatThe temperature to set. |
| Returns | |
|---|---|
| Type | Description |
LlmModelSettings.Parameters.Builder |
This builder for chaining. |
setUnknownFields(UnknownFieldSet unknownFields)
public final LlmModelSettings.Parameters.Builder setUnknownFields(UnknownFieldSet unknownFields)| Parameter | |
|---|---|
| Name | Description |
unknownFields |
UnknownFieldSet |
| Returns | |
|---|---|
| Type | Description |
LlmModelSettings.Parameters.Builder |
|