|
1 |
| -import "./fetch-polyfill.js"; |
| 1 | +import './fetch-polyfill.js' |
2 | 2 |
|
3 |
| -import * as core from "@actions/core"; |
4 |
| -import * as openai from "chatgpt"; |
5 |
| -import * as optionsJs from "./options.js"; |
| 3 | +import * as core from '@actions/core' |
| 4 | +import * as openai from 'chatgpt' |
| 5 | +import * as optionsJs from './options.js' |
6 | 6 |
|
7 | 7 | // define type to save parentMessageId and conversationId
|
8 | 8 | export type Ids = {
|
9 |
| - parentMessageId?: string; |
10 |
| - conversationId?: string; |
11 |
| -}; |
| 9 | + parentMessageId?: string |
| 10 | + conversationId?: string |
| 11 | +} |
12 | 12 |
|
13 | 13 | export class Bot {
|
14 |
| - private turbo: openai.ChatGPTAPI | null = null; // not free |
| 14 | + private turbo: openai.ChatGPTAPI | null = null // not free |
15 | 15 |
|
16 |
| - private options: optionsJs.Options; |
| 16 | + private options: optionsJs.Options |
17 | 17 |
|
18 | 18 | constructor(options: optionsJs.Options) {
|
19 |
| - this.options = options; |
| 19 | + this.options = options |
20 | 20 | if (process.env.OPENAI_API_KEY) {
|
21 | 21 | this.turbo = new openai.ChatGPTAPI({
|
22 | 22 | systemMessage: options.system_message,
|
23 | 23 | apiKey: process.env.OPENAI_API_KEY,
|
24 | 24 | debug: options.debug,
|
25 | 25 | completionParams: {
|
26 |
| - temperature: options.temperature, |
27 |
| - }, |
| 26 | + temperature: options.temperature |
| 27 | + } |
28 | 28 | // assistantLabel: " ",
|
29 | 29 | // userLabel: " ",
|
30 |
| - }); |
| 30 | + }) |
31 | 31 | } else {
|
32 | 32 | const err =
|
33 |
| - "Unable to initialize the OpenAI API, both 'OPENAI_API_KEY' environment variable are not available"; |
34 |
| - throw new Error(err); |
| 33 | + "Unable to initialize the OpenAI API, both 'OPENAI_API_KEY' environment variable are not available" |
| 34 | + throw new Error(err) |
35 | 35 | }
|
36 | 36 | }
|
37 | 37 |
|
38 | 38 | chat = async (message: string, ids: Ids): Promise<[string, Ids]> => {
|
39 |
| - let new_ids: Ids = {}; |
40 |
| - let response = ""; |
| 39 | + let new_ids: Ids = {} |
| 40 | + let response = '' |
41 | 41 | try {
|
42 |
| - [response, new_ids] = await this.chat_(message, ids); |
| 42 | + ;[response, new_ids] = await this.chat_(message, ids) |
43 | 43 | } catch (e: any) {
|
44 |
| - core.warning(`Failed to chat: ${e}, backtrace: ${e.stack}`); |
| 44 | + core.warning(`Failed to chat: ${e}, backtrace: ${e.stack}`) |
45 | 45 | } finally {
|
46 |
| - return [response, new_ids]; |
| 46 | + return [response, new_ids] |
47 | 47 | }
|
48 |
| - }; |
| 48 | + } |
49 | 49 |
|
50 | 50 | private chat_ = async (message: string, ids: Ids): Promise<[string, Ids]> => {
|
51 | 51 | if (!message) {
|
52 |
| - return ["", {}]; |
| 52 | + return ['', {}] |
53 | 53 | }
|
54 | 54 | if (this.options.debug) {
|
55 |
| - core.info(`sending to openai: ${message}`); |
| 55 | + core.info(`sending to openai: ${message}`) |
56 | 56 | }
|
57 | 57 |
|
58 |
| - let response: openai.ChatMessage | null = null; |
| 58 | + let response: openai.ChatMessage | null = null |
59 | 59 | if (this.turbo) {
|
60 |
| - let opts: openai.SendMessageOptions = {}; |
| 60 | + let opts: openai.SendMessageOptions = {} |
61 | 61 | if (ids.parentMessageId) {
|
62 |
| - opts.parentMessageId = ids.parentMessageId; |
| 62 | + opts.parentMessageId = ids.parentMessageId |
63 | 63 | }
|
64 |
| - response = await this.turbo.sendMessage(message, opts); |
| 64 | + response = await this.turbo.sendMessage(message, opts) |
65 | 65 | try {
|
66 |
| - core.info(`response: ${JSON.stringify(response)}`); |
| 66 | + core.info(`response: ${JSON.stringify(response)}`) |
67 | 67 | } catch (e: any) {
|
68 | 68 | core.info(
|
69 |
| - `response: ${response}, failed to stringify: ${e}, backtrace: ${e.stack}`, |
70 |
| - ); |
| 69 | + `response: ${response}, failed to stringify: ${e}, backtrace: ${e.stack}` |
| 70 | + ) |
71 | 71 | }
|
72 | 72 | } else {
|
73 |
| - core.setFailed("The OpenAI API is not initialized"); |
| 73 | + core.setFailed('The OpenAI API is not initialized') |
74 | 74 | }
|
75 |
| - let response_text = ""; |
| 75 | + let response_text = '' |
76 | 76 | if (response) {
|
77 |
| - response_text = response.text; |
| 77 | + response_text = response.text |
78 | 78 | } else {
|
79 |
| - core.warning("openai response is null"); |
| 79 | + core.warning('openai response is null') |
80 | 80 | }
|
81 | 81 | // remove the prefix "with " in the response
|
82 |
| - if (response_text.startsWith("with ")) { |
83 |
| - response_text = response_text.substring(5); |
| 82 | + if (response_text.startsWith('with ')) { |
| 83 | + response_text = response_text.substring(5) |
84 | 84 | }
|
85 | 85 | if (this.options.debug) {
|
86 |
| - core.info(`openai responses: ${response_text}`); |
| 86 | + core.info(`openai responses: ${response_text}`) |
87 | 87 | }
|
88 | 88 | const new_ids: Ids = {
|
89 | 89 | parentMessageId: response?.id,
|
90 |
| - conversationId: response?.conversationId, |
91 |
| - }; |
92 |
| - return [response_text, new_ids]; |
93 |
| - }; |
| 90 | + conversationId: response?.conversationId |
| 91 | + } |
| 92 | + return [response_text, new_ids] |
| 93 | + } |
94 | 94 | }
|
0 commit comments