Skip to content

Commit

Permalink
feat(ai): agents can ask for input and continue (#14486)
Browse files Browse the repository at this point in the history
* Allow custom chat agents to stop completing the response conditionally
* Introduce an orthogonal response state called `waitingForInput`
* Introduce `show` setting on progress messages to control visibility
  * 'untilFirstContent': Disappears when first response content appears
  * 'whileIncomplete': Remains visible while response is incomplete
  * 'forever': Remains visible forever.
* Adds a `QuestionResponseContent` and `QuestionPartRenderer`
* Adds an API example agent 'AskAndContinue' that uses these features
* Introduces agent-specific content matchers (in contrast to globals)
* Adds redundant response completion & recording in `AbstractChatAgent`

Co-authored-by: Stefan Dirix <[email protected]>

Contributed on behalf of STMicroelectronics.
  • Loading branch information
planger authored Nov 26, 2024
1 parent 09dfb23 commit 03d8a36
Show file tree
Hide file tree
Showing 15 changed files with 493 additions and 43 deletions.
2 changes: 2 additions & 0 deletions examples/api-samples/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@
"version": "1.55.0",
"description": "Theia - Example code to demonstrate Theia API",
"dependencies": {
"@theia/ai-core": "1.55.0",
"@theia/ai-chat": "1.55.0",
"@theia/ai-chat-ui": "1.55.0",
"@theia/core": "1.55.0",
"@theia/file-search": "1.55.0",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,13 +31,15 @@ import { bindSampleAppInfo } from './vsx/sample-frontend-app-info';
import { bindTestSample } from './test/sample-test-contribution';
import { bindSampleFileSystemCapabilitiesCommands } from './file-system/sample-file-system-capabilities';
import { bindChatNodeToolbarActionContribution } from './chat/chat-node-toolbar-action-contribution';
import { bindAskAndContinueChatAgentContribution } from './chat/ask-and-continue-chat-agent-contribution';

export default new ContainerModule((
bind: interfaces.Bind,
unbind: interfaces.Unbind,
isBound: interfaces.IsBound,
rebind: interfaces.Rebind,
) => {
bindAskAndContinueChatAgentContribution(bind);
bindChatNodeToolbarActionContribution(bind);
bindDynamicLabelProvider(bind);
bindSampleUnclosableView(bind);
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,188 @@
// *****************************************************************************
// Copyright (C) 2024 STMicroelectronics and others.
//
// This program and the accompanying materials are made available under the
// terms of the Eclipse Public License v. 2.0 which is available at
// http://www.eclipse.org/legal/epl-2.0.
//
// This Source Code may also be made available under the following Secondary
// Licenses when the conditions for such availability set forth in the Eclipse
// Public License v. 2.0 are satisfied: GNU General Public License, version 2
// with the GNU Classpath Exception which is available at
// https://www.gnu.org/software/classpath/license.html.
//
// SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-only WITH Classpath-exception-2.0
// *****************************************************************************

import {
AbstractStreamParsingChatAgent,
ChatAgent,
ChatMessage,
ChatModel,
ChatRequestModelImpl,
lastProgressMessage,
QuestionResponseContentImpl,
SystemMessageDescription,
unansweredQuestions
} from '@theia/ai-chat';
import { Agent, PromptTemplate } from '@theia/ai-core';
import { injectable, interfaces, postConstruct } from '@theia/core/shared/inversify';

export function bindAskAndContinueChatAgentContribution(bind: interfaces.Bind): void {
bind(AskAndContinueChatAgent).toSelf().inSingletonScope();
bind(Agent).toService(AskAndContinueChatAgent);
bind(ChatAgent).toService(AskAndContinueChatAgent);
}

const systemPrompt: PromptTemplate = {
id: 'askAndContinue-system',
template: `
You are an agent demonstrating on how to generate questions and continuing the conversation based on the user's answers.
First answer the user's question or continue their story.
Then come up with an interesting question and 2-3 answers which will be presented to the user as multiple choice.
Use the following format exactly to define the questions and answers.
Especially add the <question> and </question> tags around the JSON.
<question>
{
"question": "YOUR QUESTION HERE",
"options": [
{
"text": "OPTION 1"
},
{
"text": "OPTION 2"
}
]
}
</question>
Examples:
<question>
{
"question": "What is the capital of France?",
"options": [
{
"text": "Paris"
},
{
"text": "Lyon"
}
]
}
</question>
<question>
{
"question": "What does the fox say?",
"options": [
{
"text": "Ring-ding-ding-ding-dingeringeding!"
},
{
"text": "Wa-pa-pa-pa-pa-pa-pow!"
}
]
}
</question>
The user will answer the question and you can continue the conversation.
Once they answered, the question will be replaced with a simple "Question/Answer" pair, for example
Question: What does the fox say?
Answer: Ring-ding-ding-ding-dingeringeding!
If the user did not answer the question, it will be marked with "No answer", for example
Question: What is the capital of France?
No answer
Do not generate such pairs yourself, instead treat them as a signal for a past question.
Do not ask further questions once the text contains 5 or more "Question/Answer" pairs.
`
};

/**
* This is a very simple example agent that asks questions and continues the conversation based on the user's answers.
*/
@injectable()
export class AskAndContinueChatAgent extends AbstractStreamParsingChatAgent implements ChatAgent {
override id = 'AskAndContinue';
readonly name = 'AskAndContinue';
override defaultLanguageModelPurpose = 'chat';
readonly description = 'This chat will ask questions related to the input and continues after that.';
readonly variables = [];
readonly agentSpecificVariables = [];
readonly functions = [];

@postConstruct()
addContentMatchers(): void {
this.contentMatchers.push({
start: /^<question>.*$/m,
end: /^<\/question>$/m,
contentFactory: (content: string, request: ChatRequestModelImpl) => {
const question = content.replace(/^<question>\n|<\/question>$/g, '');
const parsedQuestion = JSON.parse(question);
return new QuestionResponseContentImpl(parsedQuestion.question, parsedQuestion.options, request, selectedOption => {
this.handleAnswer(selectedOption, request);
});
}
});
}

override languageModelRequirements = [
{
purpose: 'chat',
identifier: 'openai/gpt-4o',
}
];

readonly promptTemplates = [systemPrompt];

protected override async getSystemMessageDescription(): Promise<SystemMessageDescription | undefined> {
const resolvedPrompt = await this.promptService.getPrompt(systemPrompt.id);
return resolvedPrompt ? SystemMessageDescription.fromResolvedPromptTemplate(resolvedPrompt) : undefined;
}

protected override async onResponseComplete(request: ChatRequestModelImpl): Promise<void> {
const unansweredQs = unansweredQuestions(request);
if (unansweredQs.length < 1) {
return super.onResponseComplete(request);
}
request.response.addProgressMessage({ content: 'Waiting for input...', show: 'whileIncomplete' });
request.response.waitForInput();
}

protected handleAnswer(selectedOption: { text: string; value?: string; }, request: ChatRequestModelImpl): void {
const progressMessage = lastProgressMessage(request);
if (progressMessage) {
request.response.updateProgressMessage({ ...progressMessage, show: 'untilFirstContent', status: 'completed' });
}
request.response.stopWaitingForInput();
// We're reusing the original request here as a shortcut. In combination with the override of 'getMessages' we continue generating.
// In a real-world scenario, you would likely manually interact with an LLM here to generate and append the next response.
this.invoke(request);
}

/**
* As the question/answer are handled within the same response, we add an additional user message at the end to indicate to
* the LLM to continue generating.
*/
protected override async getMessages(model: ChatModel): Promise<ChatMessage[]> {
const messages = await super.getMessages(model, true);
const requests = model.getRequests();
if (!requests[requests.length - 1].response.isComplete && requests[requests.length - 1].response.response?.content.length > 0) {
return [...messages,
{
type: 'text',
actor: 'user',
query: 'Continue generating based on the user\'s answer or finish the conversation if 5 or more questions were already answered.'
}];
}
return messages;
}
}

6 changes: 6 additions & 0 deletions examples/api-samples/tsconfig.json
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,15 @@
{
"path": "../../dev-packages/ovsx-client"
},
{
"path": "../../packages/ai-chat"
},
{
"path": "../../packages/ai-chat-ui"
},
{
"path": "../../packages/ai-core"
},
{
"path": "../../packages/core"
},
Expand Down
2 changes: 2 additions & 0 deletions packages/ai-chat-ui/src/browser/ai-chat-ui-frontend-module.ts
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ import { ChatViewLanguageContribution } from './chat-view-language-contribution'
import { ChatViewWidget } from './chat-view-widget';
import { ChatViewWidgetToolbarContribution } from './chat-view-widget-toolbar-contribution';
import { EditorPreviewManager } from '@theia/editor-preview/lib/browser/editor-preview-manager';
import { QuestionPartRenderer } from './chat-response-renderer/question-part-renderer';

export default new ContainerModule((bind, _unbind, _isBound, rebind) => {
bindViewContribution(bind, AIChatContribution);
Expand Down Expand Up @@ -66,6 +67,7 @@ export default new ContainerModule((bind, _unbind, _isBound, rebind) => {
bind(ChatResponsePartRenderer).to(CommandPartRenderer).inSingletonScope();
bind(ChatResponsePartRenderer).to(ToolCallPartRenderer).inSingletonScope();
bind(ChatResponsePartRenderer).to(ErrorPartRenderer).inSingletonScope();
bind(ChatResponsePartRenderer).to(QuestionPartRenderer).inSingletonScope();
[CommandContribution, MenuContribution].forEach(serviceIdentifier =>
bind(serviceIdentifier).to(ChatViewMenuContribution).inSingletonScope()
);
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
// *****************************************************************************
// Copyright (C) 2024 EclipseSource GmbH.
//
// This program and the accompanying materials are made available under the
// terms of the Eclipse Public License v. 2.0 which is available at
// http://www.eclipse.org/legal/epl-2.0.
//
// This Source Code may also be made available under the following Secondary
// Licenses when the conditions for such availability set forth in the Eclipse
// Public License v. 2.0 are satisfied: GNU General Public License, version 2
// with the GNU Classpath Exception which is available at
// https://www.gnu.org/software/classpath/license.html.
//
// SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-only WITH Classpath-exception-2.0
// *****************************************************************************
import { ChatResponseContent, QuestionResponseContent } from '@theia/ai-chat';
import { injectable } from '@theia/core/shared/inversify';
import * as React from '@theia/core/shared/react';
import { ReactNode } from '@theia/core/shared/react';
import { ChatResponsePartRenderer } from '../chat-response-part-renderer';
import { ResponseNode } from '../chat-tree-view';

@injectable()
export class QuestionPartRenderer
implements ChatResponsePartRenderer<QuestionResponseContent> {

canHandle(response: ChatResponseContent): number {
if (QuestionResponseContent.is(response)) {
return 10;
}
return -1;
}

render(question: QuestionResponseContent, node: ResponseNode): ReactNode {
return (
<div className="theia-QuestionPartRenderer-root">
<div className="theia-QuestionPartRenderer-question">{question.question}</div>
<div className="theia-QuestionPartRenderer-options">
{
question.options.map((option, index) => (
<button
className={`theia-button theia-QuestionPartRenderer-option ${question.selectedOption === option ? 'selected' : ''}`}
onClick={() => {
question.selectedOption = option;
question.handler(option);
}}
disabled={question.selectedOption !== undefined || !node.response.isWaitingForInput}
key={index}
>
{option.text}
</button>
))
}
</div>
</div>
);
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -267,6 +267,7 @@ export class ChatViewTreeWidget extends TreeWidget {

private renderAgent(node: RequestNode | ResponseNode): React.ReactNode {
const inProgress = isResponseNode(node) && !node.response.isComplete && !node.response.isCanceled && !node.response.isError;
const waitingForInput = isResponseNode(node) && node.response.isWaitingForInput;
const toolbarContributions = !inProgress
? this.chatNodeToolbarActionContributions.getContributions()
.flatMap(c => c.getToolbarActions(node))
Expand All @@ -277,7 +278,8 @@ export class ChatViewTreeWidget extends TreeWidget {
<div className='theia-ChatNodeHeader'>
<div className={`theia-AgentAvatar ${this.getAgentIconClassName(node)}`}></div>
<h3 className='theia-AgentLabel'>{this.getAgentLabel(node)}</h3>
{inProgress && <span className='theia-ChatContentInProgress'>Generating</span>}
{inProgress && !waitingForInput && <span className='theia-ChatContentInProgress'>Generating</span>}
{inProgress && waitingForInput && <span className='theia-ChatContentInProgress'>Waiting for input</span>}
<div className='theia-ChatNodeToolbar'>
{!inProgress &&
toolbarContributions.length > 0 &&
Expand Down Expand Up @@ -340,12 +342,28 @@ export class ChatViewTreeWidget extends TreeWidget {
<div className={'theia-ResponseNode'}>
{!node.response.isComplete
&& node.response.response.content.length === 0
&& node.response.progressMessages.map((c, i) =>
<ProgressMessage {...c} key={`${node.id}-progress-${i}`} />
)}
&& node.response.progressMessages
.filter(c => c.show === 'untilFirstContent')
.map((c, i) =>
<ProgressMessage {...c} key={`${node.id}-progress-untilFirstContent-${i}`} />
)
}
{node.response.response.content.map((c, i) =>
<div className='theia-ResponseNode-Content' key={`${node.id}-content-${i}`}>{this.getChatResponsePartRenderer(c, node)}</div>
)}
{!node.response.isComplete
&& node.response.progressMessages
.filter(c => c.show === 'whileIncomplete')
.map((c, i) =>
<ProgressMessage {...c} key={`${node.id}-progress-whileIncomplete-${i}`} />
)
}
{node.response.progressMessages
.filter(c => c.show === 'forever')
.map((c, i) =>
<ProgressMessage {...c} key={`${node.id}-progress-afterComplete-${i}`} />
)
}
</div>
);
}
Expand Down
29 changes: 28 additions & 1 deletion packages/ai-chat-ui/src/browser/style/index.css
Original file line number Diff line number Diff line change
Expand Up @@ -231,7 +231,7 @@ div:last-child > .theia-ChatNode {
display: flex;
flex-direction: column;
gap: 2px;
border: 1px solid var(--theia-input-border);
border: var(--theia-border-width) solid var(--theia-input-border);
border-radius: 4px;
}

Expand Down Expand Up @@ -265,6 +265,33 @@ div:last-child > .theia-ChatNode {
background-color: var(--theia-input-border);
}

.theia-QuestionPartRenderer-root {
display: flex;
flex-direction: column;
gap: 8px;
border: var(--theia-border-width) solid
var(--theia-sideBarSectionHeader-border);
padding: 8px 12px 12px;
border-radius: 5px;
margin: 0 0 8px 0;
}
.theia-QuestionPartRenderer-options {
display: flex;
flex-wrap: wrap;
gap: 12px;
}
.theia-QuestionPartRenderer-option {
min-width: 100px;
flex: 1 1 auto;
margin: 0;
}
.theia-QuestionPartRenderer-option.selected:disabled:hover {
background-color: var(--theia-button-disabledBackground);
}
.theia-QuestionPartRenderer-option:disabled:not(.selected) {
background-color: var(--theia-button-secondaryBackground);
}

.theia-toolCall {
font-weight: normal;
color: var(--theia-descriptionForeground);
Expand Down
Loading

0 comments on commit 03d8a36

Please sign in to comment.