Skip to content

Commit efea71a

Browse files
authored
Merge pull request #9 from USEPA/AI24-4_Raise_context_limit
[WIP] Ai24 4 raise context limit
2 parents 0619b28 + e51864d commit efea71a

File tree

15 files changed

+170
-77
lines changed

15 files changed

+170
-77
lines changed

.gitignore

+46-45
Original file line numberDiff line numberDiff line change
@@ -1,45 +1,46 @@
1-
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
2-
3-
# dependencies
4-
/node_modules
5-
/.pnp
6-
.pnp.js
7-
8-
# testing
9-
/coverage
10-
/test-results
11-
12-
# next.js
13-
/.next/
14-
/out/
15-
/dist
16-
17-
# production
18-
/build
19-
20-
# misc
21-
.DS_Store
22-
*.pem
23-
24-
# debug
25-
npm-debug.log*
26-
yarn-debug.log*
27-
yarn-error.log*
28-
.pnpm-debug.log*
29-
30-
# local env files
31-
.env*.local
32-
33-
# vercel
34-
.vercel
35-
36-
# typescript
37-
*.tsbuildinfo
38-
next-env.d.ts
39-
.idea
40-
pnpm-lock.yaml
41-
.env
42-
manifest.yml
43-
44-
# Sentry Config File
45-
.env.sentry-build-plugin
1+
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
2+
3+
# dependencies
4+
/node_modules
5+
/.pnp
6+
.pnp.js
7+
8+
# testing
9+
/coverage
10+
/test-results
11+
12+
# next.js
13+
/.next/
14+
/out/
15+
/dist
16+
17+
# production
18+
/build
19+
20+
# misc
21+
.DS_Store
22+
*.pem
23+
24+
# debug
25+
npm-debug.log*
26+
yarn-debug.log*
27+
yarn-error.log*
28+
.pnpm-debug.log*
29+
30+
# local env files
31+
.env*.local
32+
33+
# vercel
34+
.vercel
35+
36+
# typescript
37+
*.tsbuildinfo
38+
next-env.d.ts
39+
.idea
40+
pnpm-lock.yaml
41+
.env
42+
manifest.yml
43+
44+
# Sentry Config File
45+
.env.sentry-build-plugin
46+
.vs/

components/Chat/ChatInput.tsx

+41-20
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@ import {
55
IconPlayerStop,
66
IconRepeat,
77
IconSend,
8+
IconHelpCircleFilled
89
} from '@tabler/icons-react';
910
import {
1011
KeyboardEvent,
@@ -18,7 +19,7 @@ import {
1819

1920
import { useTranslation } from 'next-i18next';
2021

21-
import { Message } from '@/types/chat';
22+
import { Message, Conversation } from '@/types/chat';
2223
import { Plugin } from '@/types/plugin';
2324
import { Prompt } from '@/types/prompt';
2425

@@ -28,6 +29,7 @@ import { PluginSelect } from './PluginSelect';
2829
import { PromptList } from './PromptList';
2930
import { VariableModal } from './VariableModal';
3031

32+
3133
interface Props {
3234
onSend: (message: Message, plugin: Plugin | null) => void;
3335
onRegenerate: () => void;
@@ -62,6 +64,8 @@ export const ChatInput = ({
6264
const [isModalVisible, setIsModalVisible] = useState(false);
6365
const [showPluginSelect, setShowPluginSelect] = useState(false);
6466
const [plugin, setPlugin] = useState<Plugin | null>(null);
67+
const [promptCharacterLength, setPromptCharacterLength] = useState(0);
68+
const [characterLength, setCharacterLength] = useState(0);
6569

6670
const promptListRef = useRef<HTMLUListElement | null>(null);
6771

@@ -71,22 +75,12 @@ export const ChatInput = ({
7175

7276
const handleChange = (e: React.ChangeEvent<HTMLTextAreaElement>) => {
7377
const value = e.target.value;
74-
const maxLength = selectedConversation?.model.maxLength;
75-
76-
if (maxLength && value.length > maxLength) {
77-
alert(
78-
t(
79-
`Message limit is {{maxLength}} characters. You have entered {{valueLength}} characters.`,
80-
{ maxLength, valueLength: value.length },
81-
),
82-
);
83-
return;
84-
}
85-
78+
setPromptCharacterLength((selectedConversation?.characterLength ?? 0) + value.length);
8679
setContent(value);
8780
updatePromptListVisibility(value);
8881
};
8982

83+
9084
const handleSend = () => {
9185
if (messageIsStreaming) {
9286
return;
@@ -234,11 +228,12 @@ export const ChatInput = ({
234228
textareaRef.current.style.height = 'inherit';
235229
textareaRef.current.style.height = `${textareaRef.current?.scrollHeight}px`;
236230
textareaRef.current.style.overflow = `${
237-
textareaRef?.current?.scrollHeight > 400 ? 'auto' : 'hidden'
238-
}`;
231+
textareaRef?.current?.scrollHeight > 400 ? 'auto' : 'hidden'
232+
}`;
239233
}
240234
}, [content]);
241235

236+
242237
useEffect(() => {
243238
const handleOutsideClick = (e: MouseEvent) => {
244239
if (
@@ -256,9 +251,11 @@ export const ChatInput = ({
256251
};
257252
}, []);
258253

254+
const maxLength = selectedConversation?.model.maxLength ?? 0;
255+
259256
return (
260257
<div className="absolute bottom-0 left-0 w-full border-transparent bg-gradient-to-b from-transparent via-white to-white pt-6 dark:border-white/20 dark:via-[#343541] dark:to-[#343541] md:pt-2">
261-
<div className="stretch mx-2 mt-4 flex flex-row gap-3 last:mb-2 md:mx-4 md:mt-[52px] md:last:mb-6 lg:mx-auto lg:max-w-3xl">
258+
<div className="stretch mx-2 mt-4 flex flex-row gap-3 last:mb-2 md:mx-4 md:mt-[52px] md:last:mb-12 lg:mx-auto lg:max-w-3xl">
262259
{messageIsStreaming && (
263260
<button
264261
className="absolute top-0 left-0 right-0 mx-auto mb-3 flex w-fit items-center gap-3 rounded border border-neutral-200 bg-white py-2 px-4 text-black hover:opacity-50 dark:border-neutral-600 dark:bg-[#343541] dark:text-white md:mb-0 md:mt-2"
@@ -293,10 +290,10 @@ export const ChatInput = ({
293290
bottom: `${textareaRef?.current?.scrollHeight}px`,
294291
maxHeight: '400px',
295292
overflow: `${
296-
textareaRef.current && textareaRef.current.scrollHeight > 400
297-
? 'auto'
298-
: 'hidden'
299-
}`,
293+
textareaRef.current && textareaRef.current.scrollHeight > 400
294+
? 'auto'
295+
: 'hidden'
296+
}`,
300297
}}
301298
placeholder={
302299
t('Type a message ') || ''
@@ -358,7 +355,31 @@ export const ChatInput = ({
358355
onClose={() => setIsModalVisible(false)}
359356
/>
360357
)}
358+
359+
360+
361+
{(promptCharacterLength <= maxLength && promptCharacterLength > maxLength * .75) && (
362+
<div className="text-orange-500 m-4">
363+
Warning: you are approaching the number of words this model is able to handle. Consider starting a new conversation. Characters left: {maxLength - promptCharacterLength}
364+
365+
<span className="inline-block relative top-[2px] pl-1"
366+
title="Once past the context limit, the conversation will no longer produce responses relevant to content before the limit">
367+
<IconHelpCircleFilled stroke={2} size={16} />
368+
</span>
369+
</div>
370+
)}
371+
372+
{promptCharacterLength > maxLength && (
373+
<div className="text-red-500 m-4">
374+
This prompt or conversation is too large for this model. Approximate number of characters over: {promptCharacterLength - maxLength}
375+
<span className="inline-block relative top-[2px] pl-1"
376+
title="Once past the context limit, the conversation will no longer produce responses relevant to content before the limit">
377+
<IconHelpCircleFilled stroke={2} size={16} />
378+
</span>
379+
</div>
380+
)}
361381
</div>
382+
362383
</div>
363384
</div>
364385
);

components/Chatbar/components/Conversation.tsx

+16-1
Original file line numberDiff line numberDiff line change
@@ -37,6 +37,7 @@ export const ConversationComponent = ({ conversation }: Props) => {
3737
const [isDeleting, setIsDeleting] = useState(false);
3838
const [isRenaming, setIsRenaming] = useState(false);
3939
const [renameValue, setRenameValue] = useState('');
40+
const [conversationColor, setConversationColor] = useState('text-black'); // possible colors: text-black, text-orange, text-red
4041

4142
const handleEnterDown = (e: KeyboardEvent<HTMLDivElement>) => {
4243
if (e.key === 'Enter') {
@@ -100,6 +101,20 @@ export const ConversationComponent = ({ conversation }: Props) => {
100101
}
101102
}, [isRenaming, isDeleting]);
102103

104+
/*
105+
useEffect(() => {
106+
if (conversation.id == selectedConversation?.id) {
107+
if (selectedConversation?.characterLength > selectedConversation?.model.maxLength) {
108+
setConversationColor('text-red-500');
109+
} else if (selectedConversation?.characterLength > selectedConversation?.model.maxLength * .75) {
110+
setConversationColor('text-orange-500');
111+
} else {
112+
setConversationColor('text-black');
113+
}
114+
}
115+
}, [selectedConversation]);
116+
*/
117+
103118
return (
104119
<div className="relative flex items-center">
105120
{isRenaming && selectedConversation?.id === conversation.id ? (
@@ -118,7 +133,7 @@ export const ConversationComponent = ({ conversation }: Props) => {
118133
</div>
119134
) : (
120135
<button
121-
className={`flex w-full cursor-pointer items-center gap-3 rounded-lg p-3 text-sm text-black transition-colors duration-200 hover:bg-gray-500/10 ${
136+
className={`flex w-full cursor-pointer items-center gap-3 rounded-lg p-3 text-sm text-black transition-colors duration-200 hover:bg-gray-500/10 ${
122137
messageIsStreaming ? 'disabled:cursor-not-allowed' : ''
123138
} ${
124139
selectedConversation?.id === conversation.id

package-lock.json

+11-3
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

package.json

+1
Original file line numberDiff line numberDiff line change
@@ -40,6 +40,7 @@
4040
"@tailwindcss/typography": "^0.5.9",
4141
"@trivago/prettier-plugin-sort-imports": "^4.1.1",
4242
"@types/jsdom": "^21.1.1",
43+
"@types/lodash": "^4.17.16",
4344
"@types/node": "18.15.0",
4445
"@types/react": "18.0.28",
4546
"@types/react-dom": "18.0.11",

pages/api/chat.ts

+9-1
Original file line numberDiff line numberDiff line change
@@ -2,10 +2,18 @@ import { DEFAULT_SYSTEM_PROMPT, DEFAULT_TEMPERATURE } from '@/utils/app/const';
22
import { OpenAIError, OpenAIStream } from '@/utils/server';
33
import { ChatBody, Message } from '@/types/chat';
44

5-
import tiktokenModel from '@dqbd/tiktoken/encoders/cl100k_base.json';
5+
import tiktokenModel from '@dqbd/tiktoken/encoders/o200k_base.json';
66
import { NextApiRequest, NextApiResponse } from 'next';
77
import { Tiktoken } from '@dqbd/tiktoken';
88

9+
export const config = {
10+
api: {
11+
bodyParser: {
12+
sizeLimit: '2mb'
13+
}
14+
}
15+
}
16+
917
const handler = async (req: NextApiRequest, res: NextApiResponse<any>) => {
1018
try {
1119
const { model, messages, key, prompt, temperature } = req.body as ChatBody;

pages/home/home.tsx

+6
Original file line numberDiff line numberDiff line change
@@ -44,6 +44,9 @@ import { HomeInitialState, initialState } from '@/utils/home/home.state';
4444

4545
import { v4 as uuidv4 } from 'uuid';
4646

47+
48+
49+
4750
interface Props {
4851
serverSideApiKeyIsSet: boolean;
4952
serverSidePluginKeysSet: boolean;
@@ -195,6 +198,7 @@ const Home = ({
195198
prompt: DEFAULT_SYSTEM_PROMPT,
196199
temperature: lastConversation?.temperature ?? DEFAULT_TEMPERATURE,
197200
folderId: null,
201+
tokenLength: 0,
198202
};
199203

200204
const updatedConversations = [...conversations, newConversation];
@@ -316,8 +320,10 @@ const Home = ({
316320

317321
const selectedConversation = localStorage.getItem('selectedConversation');
318322
if (selectedConversation) {
323+
319324
const parsedSelectedConversation: Conversation =
320325
JSON.parse(selectedConversation);
326+
321327
const cleanedSelectedConversation = cleanSelectedConversation(
322328
parsedSelectedConversation,
323329
);

styles/globals.css

+1-1
Original file line numberDiff line numberDiff line change
@@ -54,4 +54,4 @@ pre:has(div.codeblock) {
5454
#rules_section a {
5555
text-decoration: underline;
5656
color: #0000EE;
57-
}
57+
}

types/chat.ts

+2
Original file line numberDiff line numberDiff line change
@@ -23,4 +23,6 @@ export interface Conversation {
2323
prompt: string;
2424
temperature: number;
2525
folderId: string | null;
26+
tokenLength: number;
27+
characterLength: number;
2628
}

types/openai.ts

+3-3
Original file line numberDiff line numberDiff line change
@@ -3,8 +3,8 @@ import { OPENAI_API_TYPE } from '../utils/app/const';
33
export interface OpenAIModel {
44
id: string;
55
name: string;
6-
maxLength: number; // maximum length of a message
7-
tokenLimit: number;
6+
maxLength: number; // maximum length of a message in CHARACTERS
7+
tokenLimit: number; // context length for a conversation in TOKENS
88
}
99

1010
export enum OpenAIModelID {
@@ -18,7 +18,7 @@ export const OpenAIModels: Record<OpenAIModelID, OpenAIModel> = {
1818
[OpenAIModelID.GPT_4]: {
1919
id: OpenAIModelID.GPT_4,
2020
name: 'GPT-4',
21-
maxLength: 128_000*3,
21+
maxLength: 128_000 * 4,
2222
tokenLimit: 128_000,
2323
}
2424
};

0 commit comments

Comments
 (0)