Skip to content

Commit d4d3af5

Browse files
author
awstools
committed
feat(client-bedrock-agent): Removing support for topK property in PromptModelInferenceConfiguration object, Making PromptTemplateConfiguration property as required, Limiting the maximum PromptVariant to 1
1 parent f4c5267 commit d4d3af5

13 files changed

+6
-32
lines changed

clients/client-bedrock-agent/src/commands/CreateFlowCommand.ts

-2
Original file line numberDiff line numberDiff line change
@@ -91,7 +91,6 @@ export interface CreateFlowCommandOutput extends CreateFlowResponse, __MetadataB
9191
* text: { // PromptModelInferenceConfiguration
9292
* temperature: Number("float"),
9393
* topP: Number("float"),
94-
* topK: Number("int"),
9594
* maxTokens: Number("int"),
9695
* stopSequences: [ // StopSequences
9796
* "STRING_VALUE",
@@ -221,7 +220,6 @@ export interface CreateFlowCommandOutput extends CreateFlowResponse, __MetadataB
221220
* // text: { // PromptModelInferenceConfiguration
222221
* // temperature: Number("float"),
223222
* // topP: Number("float"),
224-
* // topK: Number("int"),
225223
* // maxTokens: Number("int"),
226224
* // stopSequences: [ // StopSequences
227225
* // "STRING_VALUE",

clients/client-bedrock-agent/src/commands/CreateFlowVersionCommand.ts

-1
Original file line numberDiff line numberDiff line change
@@ -102,7 +102,6 @@ export interface CreateFlowVersionCommandOutput extends CreateFlowVersionRespons
102102
* // text: { // PromptModelInferenceConfiguration
103103
* // temperature: Number("float"),
104104
* // topP: Number("float"),
105-
* // topK: Number("int"),
106105
* // maxTokens: Number("int"),
107106
* // stopSequences: [ // StopSequences
108107
* // "STRING_VALUE",

clients/client-bedrock-agent/src/commands/CreatePromptCommand.ts

-2
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,6 @@ export interface CreatePromptCommandOutput extends CreatePromptResponse, __Metad
6464
* text: { // PromptModelInferenceConfiguration
6565
* temperature: Number("float"),
6666
* topP: Number("float"),
67-
* topK: Number("int"),
6867
* maxTokens: Number("int"),
6968
* stopSequences: [ // StopSequences
7069
* "STRING_VALUE",
@@ -110,7 +109,6 @@ export interface CreatePromptCommandOutput extends CreatePromptResponse, __Metad
110109
* // text: { // PromptModelInferenceConfiguration
111110
* // temperature: Number("float"),
112111
* // topP: Number("float"),
113-
* // topK: Number("int"),
114112
* // maxTokens: Number("int"),
115113
* // stopSequences: [ // StopSequences
116114
* // "STRING_VALUE",

clients/client-bedrock-agent/src/commands/CreatePromptVersionCommand.ts

-1
Original file line numberDiff line numberDiff line change
@@ -73,7 +73,6 @@ export interface CreatePromptVersionCommandOutput extends CreatePromptVersionRes
7373
* // text: { // PromptModelInferenceConfiguration
7474
* // temperature: Number("float"),
7575
* // topP: Number("float"),
76-
* // topK: Number("int"),
7776
* // maxTokens: Number("int"),
7877
* // stopSequences: [ // StopSequences
7978
* // "STRING_VALUE",

clients/client-bedrock-agent/src/commands/GetFlowCommand.ts

-1
Original file line numberDiff line numberDiff line change
@@ -97,7 +97,6 @@ export interface GetFlowCommandOutput extends GetFlowResponse, __MetadataBearer
9797
* // text: { // PromptModelInferenceConfiguration
9898
* // temperature: Number("float"),
9999
* // topP: Number("float"),
100-
* // topK: Number("int"),
101100
* // maxTokens: Number("int"),
102101
* // stopSequences: [ // StopSequences
103102
* // "STRING_VALUE",

clients/client-bedrock-agent/src/commands/GetFlowVersionCommand.ts

-1
Original file line numberDiff line numberDiff line change
@@ -101,7 +101,6 @@ export interface GetFlowVersionCommandOutput extends GetFlowVersionResponse, __M
101101
* // text: { // PromptModelInferenceConfiguration
102102
* // temperature: Number("float"),
103103
* // topP: Number("float"),
104-
* // topK: Number("int"),
105104
* // maxTokens: Number("int"),
106105
* // stopSequences: [ // StopSequences
107106
* // "STRING_VALUE",

clients/client-bedrock-agent/src/commands/GetIngestionJobCommand.ts

+1-1
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ export interface GetIngestionJobCommandInput extends GetIngestionJobRequest {}
2828
export interface GetIngestionJobCommandOutput extends GetIngestionJobResponse, __MetadataBearer {}
2929

3030
/**
31-
* <p>Gets information about a data ingestion job. Data sources are ingested into your knowledge base so that Large Lanaguage Models (LLMs) can use your data.</p>
31+
* <p>Gets information about a data ingestion job. Data sources are ingested into your knowledge base so that Large Language Models (LLMs) can use your data.</p>
3232
* @example
3333
* Use a bare-bones client and the command you need to make an API call.
3434
* ```javascript

clients/client-bedrock-agent/src/commands/GetPromptCommand.ts

-1
Original file line numberDiff line numberDiff line change
@@ -65,7 +65,6 @@ export interface GetPromptCommandOutput extends GetPromptResponse, __MetadataBea
6565
* // text: { // PromptModelInferenceConfiguration
6666
* // temperature: Number("float"),
6767
* // topP: Number("float"),
68-
* // topK: Number("int"),
6968
* // maxTokens: Number("int"),
7069
* // stopSequences: [ // StopSequences
7170
* // "STRING_VALUE",

clients/client-bedrock-agent/src/commands/UpdateFlowCommand.ts

-2
Original file line numberDiff line numberDiff line change
@@ -91,7 +91,6 @@ export interface UpdateFlowCommandOutput extends UpdateFlowResponse, __MetadataB
9191
* text: { // PromptModelInferenceConfiguration
9292
* temperature: Number("float"),
9393
* topP: Number("float"),
94-
* topK: Number("int"),
9594
* maxTokens: Number("int"),
9695
* stopSequences: [ // StopSequences
9796
* "STRING_VALUE",
@@ -218,7 +217,6 @@ export interface UpdateFlowCommandOutput extends UpdateFlowResponse, __MetadataB
218217
* // text: { // PromptModelInferenceConfiguration
219218
* // temperature: Number("float"),
220219
* // topP: Number("float"),
221-
* // topK: Number("int"),
222220
* // maxTokens: Number("int"),
223221
* // stopSequences: [ // StopSequences
224222
* // "STRING_VALUE",

clients/client-bedrock-agent/src/commands/UpdatePromptCommand.ts

-2
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,6 @@ export interface UpdatePromptCommandOutput extends UpdatePromptResponse, __Metad
6464
* text: { // PromptModelInferenceConfiguration
6565
* temperature: Number("float"),
6666
* topP: Number("float"),
67-
* topK: Number("int"),
6867
* maxTokens: Number("int"),
6968
* stopSequences: [ // StopSequences
7069
* "STRING_VALUE",
@@ -107,7 +106,6 @@ export interface UpdatePromptCommandOutput extends UpdatePromptResponse, __Metad
107106
* // text: { // PromptModelInferenceConfiguration
108107
* // temperature: Number("float"),
109108
* // topP: Number("float"),
110-
* // topK: Number("int"),
111109
* // maxTokens: Number("int"),
112110
* // stopSequences: [ // StopSequences
113111
* // "STRING_VALUE",

clients/client-bedrock-agent/src/models/models_0.ts

+1-7
Original file line numberDiff line numberDiff line change
@@ -3972,12 +3972,6 @@ export interface PromptModelInferenceConfiguration {
39723972
*/
39733973
topP?: number;
39743974

3975-
/**
3976-
* <p>The number of most-likely candidates that the model considers for the next token during generation.</p>
3977-
* @public
3978-
*/
3979-
topK?: number;
3980-
39813975
/**
39823976
* <p>The maximum number of tokens to return in the response.</p>
39833977
* @public
@@ -7514,7 +7508,7 @@ export interface PromptVariant {
75147508
* <p>Contains configurations for the prompt template.</p>
75157509
* @public
75167510
*/
7517-
templateConfiguration?: PromptTemplateConfiguration;
7511+
templateConfiguration: PromptTemplateConfiguration | undefined;
75187512

75197513
/**
75207514
* <p>The unique identifier of the model or <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/cross-region-inference.html">inference profile</a> with which to run inference on the prompt.</p>

clients/client-bedrock-agent/src/protocols/Aws_restJson1.ts

-2
Original file line numberDiff line numberDiff line change
@@ -3533,7 +3533,6 @@ const se_PromptModelInferenceConfiguration = (
35333533
maxTokens: [],
35343534
stopSequences: _json,
35353535
temperature: __serializeFloat,
3536-
topK: [],
35373536
topP: __serializeFloat,
35383537
});
35393538
};
@@ -4484,7 +4483,6 @@ const de_PromptModelInferenceConfiguration = (
44844483
maxTokens: __expectInt32,
44854484
stopSequences: _json,
44864485
temperature: __limitedParseFloat32,
4487-
topK: __expectInt32,
44884486
topP: __limitedParseFloat32,
44894487
}) as any;
44904488
};

codegen/sdk-codegen/aws-models/bedrock-agent.json

+4-9
Original file line numberDiff line numberDiff line change
@@ -7088,7 +7088,7 @@
70887088
}
70897089
],
70907090
"traits": {
7091-
"smithy.api#documentation": "<p>Gets information about a data ingestion job. Data sources are ingested into your knowledge base so that Large Lanaguage Models (LLMs) can use your data.</p>",
7091+
"smithy.api#documentation": "<p>Gets information about a data ingestion job. Data sources are ingested into your knowledge base so that Large Language Models (LLMs) can use your data.</p>",
70927092
"smithy.api#http": {
70937093
"code": 200,
70947094
"method": "GET",
@@ -10520,12 +10520,6 @@
1052010520
"smithy.api#documentation": "<p>The percentage of most-likely candidates that the model considers for the next token.</p>"
1052110521
}
1052210522
},
10523-
"topK": {
10524-
"target": "com.amazonaws.bedrockagent#TopK",
10525-
"traits": {
10526-
"smithy.api#documentation": "<p>The number of most-likely candidates that the model considers for the next token during generation.</p>"
10527-
}
10528-
},
1052910523
"maxTokens": {
1053010524
"target": "com.amazonaws.bedrockagent#MaximumLength",
1053110525
"traits": {
@@ -10762,7 +10756,8 @@
1076210756
"templateConfiguration": {
1076310757
"target": "com.amazonaws.bedrockagent#PromptTemplateConfiguration",
1076410758
"traits": {
10765-
"smithy.api#documentation": "<p>Contains configurations for the prompt template.</p>"
10759+
"smithy.api#documentation": "<p>Contains configurations for the prompt template.</p>",
10760+
"smithy.api#required": {}
1076610761
}
1076710762
},
1076810763
"modelId": {
@@ -10796,7 +10791,7 @@
1079610791
},
1079710792
"traits": {
1079810793
"smithy.api#length": {
10799-
"max": 3
10794+
"max": 1
1080010795
},
1080110796
"smithy.api#sensitive": {}
1080210797
}

0 commit comments

Comments
 (0)