From ffb4dcacfa55429695adaa3934cbb9384a7c97be Mon Sep 17 00:00:00 2001 From: HyunggyuJang Date: Wed, 9 Aug 2023 18:05:13 +0900 Subject: [PATCH] Support gpt 3.5 turbo 16k model `TokenLimits` is only the place needed to be modified. Have set the token limits accordingly. --- src/limits.ts | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/limits.ts b/src/limits.ts index 7c2afc52..aca807f6 100644 --- a/src/limits.ts +++ b/src/limits.ts @@ -9,6 +9,9 @@ export class TokenLimits { if (model === 'gpt-4-32k') { this.maxTokens = 32600 this.responseTokens = 4000 + } else if (model === 'gpt-3.5-turbo-16k') { + this.maxTokens = 16300 + this.responseTokens = 3000 } else if (model === 'gpt-4') { this.maxTokens = 8000 this.responseTokens = 2000