Skip to content

Commit

Permalink
Merge pull request #441 from betalgo/dev
Browse files Browse the repository at this point in the history
Release 7.4.2
  • Loading branch information
kayhantolga authored Dec 6, 2023
2 parents 6043c68 + 92d850f commit 3747353
Show file tree
Hide file tree
Showing 17 changed files with 738 additions and 72 deletions.
8 changes: 7 additions & 1 deletion OpenAI.Playground/Program.cs
Original file line number Diff line number Diff line change
Expand Up @@ -42,11 +42,17 @@
// | / \ / \ | \ /) | ( \ /o\ / ) | (\ / | / \ / \ |
// |-----------------------------------------------------------------------|


await ChatCompletionTestHelper.RunSimpleChatCompletionTest(sdk);
// Vision
//await VisionTestHelper.RunSimpleVisionTest(sdk);
//await VisionTestHelper.RunSimpleVisionStreamTest(sdk);
//await VisionTestHelper.RunSimpleVisionTestUsingBase64EncodedImage(sdk);

//await ChatCompletionTestHelper.RunSimpleCompletionStreamTest(sdk);
//await ChatCompletionTestHelper.RunChatFunctionCallTest(sdk);
//await ChatCompletionTestHelper.RunChatFunctionCallTestAsStream(sdk);
//await FineTuningJobTestHelper.RunCaseStudyIsTheModelMakingUntrueStatements(sdk);

// Whisper
//await AudioTestHelper.RunSimpleAudioCreateTranscriptionTest(sdk);
//await AudioTestHelper.RunSimpleAudioCreateTranslationTest(sdk);
Expand Down
86 changes: 68 additions & 18 deletions OpenAI.Playground/TestHelpers/ChatCompletionTestHelper.cs
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ public static async Task RunSimpleCompletionStreamTest(IOpenAIService sdk)

public static async Task RunChatFunctionCallTest(IOpenAIService sdk)
{
ConsoleExtensions.WriteLine("Chat Function Call Testing is starting:", ConsoleColor.Cyan);
ConsoleExtensions.WriteLine("Chat Tool Functions Call Testing is starting:", ConsoleColor.Cyan);

// example taken from:
// https://github.com/openai/openai-cookbook/blob/main/examples/How_to_call_functions_with_chat_models.ipynb
Expand Down Expand Up @@ -130,9 +130,11 @@ public static async Task RunChatFunctionCallTest(IOpenAIService sdk)
ChatMessage.FromSystem("Don't make assumptions about what values to plug into functions. Ask for clarification if a user request is ambiguous."),
ChatMessage.FromUser("Give me a weather report for Chicago, USA, for the next 5 days.")
},
Functions = new List<FunctionDefinition> {fn1, fn2, fn3, fn4},
Tools = new List<ToolDefinition> { ToolDefinition.DefineFunction(fn1), ToolDefinition.DefineFunction(fn2) ,ToolDefinition.DefineFunction(fn3) ,ToolDefinition.DefineFunction(fn4) },
// optionally, to force a specific function:
// FunctionCall = new Dictionary<string, string> { { "name", "get_current_weather" } },
//ToolChoice = ToolChoice.FunctionChoice("get_current_weather"),
// or auto tool choice:
//ToolChoice = ToolChoice.Auto,
MaxTokens = 50,
Model = Models.Gpt_3_5_Turbo
});
Expand All @@ -152,13 +154,23 @@ public static async Task RunChatFunctionCallTest(IOpenAIService sdk)
var choice = completionResult.Choices.First();
Console.WriteLine($"Message: {choice.Message.Content}");

var fn = choice.Message.FunctionCall;
if (fn != null)
var tools = choice.Message.ToolCalls;
if (tools != null)
{
Console.WriteLine($"Function call: {fn.Name}");
foreach (var entry in fn.ParseArguments())
Console.WriteLine($"Tools: {tools.Count}");
foreach (var toolCall in tools)
{
Console.WriteLine($" {entry.Key}: {entry.Value}");
Console.WriteLine($" {toolCall.Id}: {toolCall.FunctionCall}");

var fn = toolCall.FunctionCall;
if (fn != null)
{
Console.WriteLine($" Function call: {fn.Name}");
foreach (var entry in fn.ParseArguments())
{
Console.WriteLine($" {entry.Key}: {entry.Value}");
}
}
}
}
}
Expand All @@ -181,7 +193,7 @@ public static async Task RunChatFunctionCallTest(IOpenAIService sdk)

public static async Task RunChatFunctionCallTestAsStream(IOpenAIService sdk)
{
ConsoleExtensions.WriteLine("Chat Function Call Testing is starting:", ConsoleColor.Cyan);
ConsoleExtensions.WriteLine("Chat Tool Functions Call Stream Testing is starting:", ConsoleColor.Cyan);

// example taken from:
// https://github.com/openai/openai-cookbook/blob/main/examples/How_to_call_functions_with_chat_models.ipynb
Expand Down Expand Up @@ -221,11 +233,13 @@ public static async Task RunChatFunctionCallTestAsStream(IOpenAIService sdk)
// or to test array functions, use this instead:
// ChatMessage.FromUser("The combination is: One. Two. Three. Four. Five."),
},
Functions = new List<FunctionDefinition> {fn1, fn2, fn3, fn4},
Tools = new List<ToolDefinition> { ToolDefinition.DefineFunction(fn1), ToolDefinition.DefineFunction(fn2), ToolDefinition.DefineFunction(fn3), ToolDefinition.DefineFunction(fn4) },
// optionally, to force a specific function:
// FunctionCall = new Dictionary<string, string> { { "name", "get_current_weather" } },
ToolChoice = ToolChoice.FunctionChoice("get_current_weather"),
// or auto tool choice:
// ToolChoice = ToolChoice.Auto,
MaxTokens = 50,
Model = Models.Gpt_3_5_Turbo_0613
Model = Models.Gpt_4_1106_preview
});

/* when testing weather forecasts, expected output should be along the lines of:
Expand All @@ -243,21 +257,57 @@ public static async Task RunChatFunctionCallTestAsStream(IOpenAIService sdk)
Function call: identify_number_sequence
values: [1, 2, 3, 4, 5]
*/

var functionArguments = new Dictionary<int, string>();
await foreach (var completionResult in completionResults)
{
if (completionResult.Successful)
{
var choice = completionResult.Choices.First();
Console.WriteLine($"Message: {choice.Message.Content}");

var fn = choice.Message.FunctionCall;
if (fn != null)
var tools = choice.Message.ToolCalls;
if (tools != null)
{
Console.WriteLine($"Function call: {fn.Name}");
foreach (var entry in fn.ParseArguments())
Console.WriteLine($"Tools: {tools.Count}");
for (int i = 0; i < tools.Count; i++)
{
Console.WriteLine($" {entry.Key}: {entry.Value}");
var toolCall = tools[i];
Console.WriteLine($" {toolCall.Id}: {toolCall.FunctionCall}");

var fn = toolCall.FunctionCall;
if (fn != null)
{
if (!string.IsNullOrEmpty(fn.Name))
{
Console.WriteLine($" Function call: {fn.Name}");
}

if (!string.IsNullOrEmpty(fn.Arguments))
{
if (functionArguments.TryGetValue(i, out var currentArguments))
{
currentArguments += fn.Arguments;
}
else
{
currentArguments = fn.Arguments;
}
functionArguments[i] = currentArguments;
fn.Arguments = currentArguments;

try
{
foreach (var entry in fn.ParseArguments())
{
Console.WriteLine($" {entry.Key}: {entry.Value}");
}
}
catch (Exception)
{
// ignore
}
}
}
}
}
}
Expand Down
185 changes: 185 additions & 0 deletions OpenAI.Playground/TestHelpers/VisionTestHelper.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,185 @@
using OpenAI.Interfaces;
using OpenAI.ObjectModels;
using OpenAI.ObjectModels.RequestModels;
using static OpenAI.ObjectModels.StaticValues;

namespace OpenAI.Playground.TestHelpers;

internal static class VisionTestHelper
{
public static async Task RunSimpleVisionTest(IOpenAIService sdk)
{
ConsoleExtensions.WriteLine("VIsion Testing is starting:", ConsoleColor.Cyan);

try
{
ConsoleExtensions.WriteLine("Vision Test:", ConsoleColor.DarkCyan);

var completionResult = await sdk.ChatCompletion.CreateCompletion(
new ChatCompletionCreateRequest
{
Messages = new List<ChatMessage>
{
ChatMessage.FromSystem("You are an image analyzer assistant."),
ChatMessage.FromUser(
new List<MessageContent>
{
MessageContent.TextContent("What is on the picture in details?"),
MessageContent.ImageUrlContent(
"https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg",
ImageStatics.ImageDetailTypes.High
)
}
),
},
MaxTokens = 300,
Model = Models.Gpt_4_vision_preview,
N = 1
}
);

if (completionResult.Successful)
{
Console.WriteLine(completionResult.Choices.First().Message.Content);
}
else
{
if (completionResult.Error == null)
{
throw new Exception("Unknown Error");
}

Console.WriteLine(
$"{completionResult.Error.Code}: {completionResult.Error.Message}"
);
}
}
catch (Exception e)
{
Console.WriteLine(e);
throw;
}
}

public static async Task RunSimpleVisionStreamTest(IOpenAIService sdk)
{
ConsoleExtensions.WriteLine("Vision Stream Testing is starting:", ConsoleColor.Cyan);
try
{
ConsoleExtensions.WriteLine("Vision Stream Test:", ConsoleColor.DarkCyan);

var completionResult = sdk.ChatCompletion.CreateCompletionAsStream(
new ChatCompletionCreateRequest
{
Messages = new List<ChatMessage>
{
ChatMessage.FromSystem("You are an image analyzer assistant."),
ChatMessage.FromUser(
new List<MessageContent>
{
MessageContent.TextContent("What’s in this image?"),
MessageContent.ImageUrlContent(
"https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg",
ImageStatics.ImageDetailTypes.Low
)
}
),
},
MaxTokens = 300,
Model = Models.Gpt_4_vision_preview,
N = 1
}
);

await foreach (var completion in completionResult)
{
if (completion.Successful)
{
Console.Write(completion.Choices.First().Message.Content);
}
else
{
if (completion.Error == null)
{
throw new Exception("Unknown Error");
}

Console.WriteLine(
$"{completion.Error.Code}: {completion.Error.Message}"
);
}
}

Console.WriteLine("");
Console.WriteLine("Complete");
}
catch (Exception e)
{
Console.WriteLine(e);
throw;
}
}

public static async Task RunSimpleVisionTestUsingBase64EncodedImage(IOpenAIService sdk)
{
ConsoleExtensions.WriteLine("Vision Testing is starting:", ConsoleColor.Cyan);

try
{
ConsoleExtensions.WriteLine(
"Vision with base64 encoded image Test:",
ConsoleColor.DarkCyan
);

const string originalFileName = "image_edit_original.png";
var originalFile = await FileExtensions.ReadAllBytesAsync(
$"SampleData/{originalFileName}"
);

var completionResult = await sdk.ChatCompletion.CreateCompletion(
new ChatCompletionCreateRequest
{
Messages = new List<ChatMessage>
{
ChatMessage.FromSystem("You are an image analyzer assistant."),
ChatMessage.FromUser(
new List<MessageContent>
{
MessageContent.TextContent("What is on the picture in details?"),
MessageContent.ImageBinaryContent(
originalFile,
ImageStatics.ImageFileTypes.Png,
ImageStatics.ImageDetailTypes.High
)
}
),
},
MaxTokens = 300,
Model = Models.Gpt_4_vision_preview,
N = 1
}
);

if (completionResult.Successful)
{
Console.WriteLine(completionResult.Choices.First().Message.Content);
}
else
{
if (completionResult.Error == null)
{
throw new Exception("Unknown Error");
}

Console.WriteLine(
$"{completionResult.Error.Code}: {completionResult.Error.Message}"
);
}
}
catch (Exception e)
{
Console.WriteLine(e);
throw;
}
}
}
Loading

0 comments on commit 3747353

Please sign in to comment.