Skip to content
This repository was archived by the owner on Mar 6, 2024. It is now read-only.

Commit 8107663

Browse files
authored
rename summary and review models (#113)
1 parent 26d98cd commit 8107663

File tree

4 files changed

+31
-33
lines changed

4 files changed

+31
-33
lines changed

action.yml

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -53,13 +53,14 @@ inputs:
5353
!**/_gen/**
5454
!**/generated/**
5555
!**/vendor/**
56-
openai_summary_model:
56+
openai_light_model:
5757
required: false
58-
description: 'Model to use for summarization'
58+
description:
59+
'Model to use for simple tasks like summarizing diff on a file.'
5960
default: 'gpt-3.5-turbo'
60-
openai_review_model:
61+
openai_heavy_model:
6162
required: false
62-
description: 'Model to use for review'
63+
description: 'Model to use for complex tasks such as code reviews.'
6364
default: 'gpt-3.5-turbo'
6465
openai_model_temperature:
6566
required: false

dist/index.js

Lines changed: 12 additions & 12 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

src/main.ts

Lines changed: 4 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -12,8 +12,8 @@ async function run(): Promise<void> {
1212
core.getBooleanInput('review_comment_lgtm'),
1313
core.getMultilineInput('path_filters'),
1414
core.getInput('system_message'),
15-
core.getInput('openai_summary_model'),
16-
core.getInput('openai_review_model'),
15+
core.getInput('openai_light_model'),
16+
core.getInput('openai_heavy_model'),
1717
core.getInput('openai_model_temperature'),
1818
core.getInput('openai_retries'),
1919
core.getInput('openai_timeout_ms'),
@@ -38,7 +38,7 @@ async function run(): Promise<void> {
3838
lightBot = new Bot(
3939
options,
4040
new OpenAIOptions(
41-
options.openai_summary_model,
41+
options.openai_light_model,
4242
options.summary_token_limits
4343
)
4444
)
@@ -53,10 +53,7 @@ async function run(): Promise<void> {
5353
try {
5454
heavyBot = new Bot(
5555
options,
56-
new OpenAIOptions(
57-
options.openai_review_model,
58-
options.review_token_limits
59-
)
56+
new OpenAIOptions(options.openai_heavy_model, options.review_token_limits)
6057
)
6158
} catch (e: any) {
6259
core.warning(

src/options.ts

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -185,8 +185,8 @@ export class Options {
185185
review_comment_lgtm: boolean
186186
path_filters: PathFilter
187187
system_message: string
188-
openai_summary_model: string
189-
openai_review_model: string
188+
openai_light_model: string
189+
openai_heavy_model: string
190190
openai_model_temperature: number
191191
openai_retries: number
192192
openai_timeout_ms: number
@@ -201,8 +201,8 @@ export class Options {
201201
review_comment_lgtm = false,
202202
path_filters: string[] | null = null,
203203
system_message = '',
204-
openai_summary_model = 'gpt-3.5-turbo',
205-
openai_review_model = 'gpt-3.5-turbo',
204+
openai_light_model = 'gpt-3.5-turbo',
205+
openai_heavy_model = 'gpt-3.5-turbo',
206206
openai_model_temperature = '0.0',
207207
openai_retries = '3',
208208
openai_timeout_ms = '120000',
@@ -214,14 +214,14 @@ export class Options {
214214
this.review_comment_lgtm = review_comment_lgtm
215215
this.path_filters = new PathFilter(path_filters)
216216
this.system_message = system_message
217-
this.openai_summary_model = openai_summary_model
218-
this.openai_review_model = openai_review_model
217+
this.openai_light_model = openai_light_model
218+
this.openai_heavy_model = openai_heavy_model
219219
this.openai_model_temperature = parseFloat(openai_model_temperature)
220220
this.openai_retries = parseInt(openai_retries)
221221
this.openai_timeout_ms = parseInt(openai_timeout_ms)
222222
this.openai_concurrency_limit = parseInt(openai_concurrency_limit)
223-
this.summary_token_limits = new TokenLimits(openai_summary_model)
224-
this.review_token_limits = new TokenLimits(openai_review_model)
223+
this.summary_token_limits = new TokenLimits(openai_light_model)
224+
this.review_token_limits = new TokenLimits(openai_heavy_model)
225225
}
226226

227227
// print all options using core.info
@@ -232,8 +232,8 @@ export class Options {
232232
core.info(`review_comment_lgtm: ${this.review_comment_lgtm}`)
233233
core.info(`path_filters: ${this.path_filters}`)
234234
core.info(`system_message: ${this.system_message}`)
235-
core.info(`openai_summary_model: ${this.openai_summary_model}`)
236-
core.info(`openai_review_model: ${this.openai_review_model}`)
235+
core.info(`openai_light_model: ${this.openai_light_model}`)
236+
core.info(`openai_heavy_model: ${this.openai_heavy_model}`)
237237
core.info(`openai_model_temperature: ${this.openai_model_temperature}`)
238238
core.info(`openai_retries: ${this.openai_retries}`)
239239
core.info(`openai_timeout_ms: ${this.openai_timeout_ms}`)

0 commit comments

Comments
 (0)