Skip to content

Commit

Permalink
feat: Added ability to pass settings inside LLM chain.
Browse files Browse the repository at this point in the history
  • Loading branch information
HavenDV committed Jun 1, 2024
1 parent 1100736 commit f6f7457
Show file tree
Hide file tree
Showing 4 changed files with 14 additions and 6 deletions.
6 changes: 4 additions & 2 deletions src/Core/src/Chains/Chain.cs
Original file line number Diff line number Diff line change
Expand Up @@ -75,13 +75,15 @@ public static DoChain Do(
/// <param name="llm"></param>
/// <param name="inputKey"></param>
/// <param name="outputKey"></param>
/// <param name="settings"></param>
/// <returns></returns>
public static LLMChain LLM(
IChatModel llm,
string inputKey = "text",
string outputKey = "text")
string outputKey = "text",
ChatSettings? settings = null)
{
return new LLMChain(llm, inputKey, outputKey);
return new LLMChain(llm, inputKey, outputKey, settings);
}

/// <inheritdoc cref="LLM"/>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,10 @@ private void InitializeChain()
| Set(toolNames, "tool_names")
| LoadMemory(_conversationBufferMemory, outputKey: "history")
| Template(_reActPrompt)
| Chain.LLM(_model).UseCache(_useCache)
| Chain.LLM(_model, settings: new ChatSettings
{
StopSequences = ["Observation", "[END]"],
}).UseCache(_useCache)
| UpdateMemory(_conversationBufferMemory, requestKey: "input", responseKey: "text")
| ReActParser(inputKey: "text", outputKey: ReActAnswer);

Expand Down
7 changes: 5 additions & 2 deletions src/Core/src/Chains/StackableChains/LLMChain.cs
Original file line number Diff line number Diff line change
Expand Up @@ -10,19 +10,22 @@ public class LLMChain : BaseStackableChain
{
private readonly IChatModel _llm;
private bool _useCache;
private ChatSettings _settings;

private const string CACHE_DIR = "cache";

/// <inheritdoc/>
public LLMChain(
IChatModel llm,
string inputKey = "prompt",
string outputKey = "text"
string outputKey = "text",
ChatSettings? settings = null
)
{
InputKeys = new[] { inputKey };
OutputKeys = new[] { outputKey };
_llm = llm;
_settings = settings ?? new ChatSettings();
}

string? GetCachedAnswer(string prompt)
Expand Down Expand Up @@ -63,7 +66,7 @@ protected override async Task<IChainValues> InternalCallAsync(
}
}

var response = await _llm.GenerateAsync(prompt, cancellationToken: cancellationToken).ConfigureAwait(false);
var response = await _llm.GenerateAsync(prompt, settings: _settings, cancellationToken: cancellationToken).ConfigureAwait(false);
responseContent = response.Messages.Last().Content;
if (_useCache)
SaveCachedAnswer(prompt, responseContent);
Expand Down
2 changes: 1 addition & 1 deletion src/Meta/test/WikiTests.cs
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ public async Task AgentWithOllamaReact()
var apiKey =
Environment.GetEnvironmentVariable("OPENAI_API_KEY") ??
throw new InvalidOperationException("OpenAI API key is not set");
var llm = new Gpt35TurboModel(apiKey);
var llm = new Gpt35TurboModel(apiKey).UseConsoleForDebug();

// create a google search
var searchApiKey =
Expand Down

0 comments on commit f6f7457

Please sign in to comment.