Skip to content

Commit

Permalink
feat: add azure openai format
Browse files Browse the repository at this point in the history
  • Loading branch information
zmh-program committed Dec 28, 2023
1 parent 261e500 commit a1f4845
Show file tree
Hide file tree
Showing 11 changed files with 673 additions and 32 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@
- [x] Anthropic Claude (claude-2, claude-instant)
- [x] Slack Claude (deprecated)
- [x] Sparkdesk (v1.5, v2, v3)
- [x] Google PaLM2
- [x] Google Gemini (PaLM2)
- [x] New Bing (creative, balanced, precise)
- [x] ChatGLM (turbo, pro, std, lite)
- [x] DashScope Tongyi (plus, turbo)
Expand Down
19 changes: 19 additions & 0 deletions adapter/adapter.go
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package adapter

import (
"chat/adapter/azure"
"chat/adapter/baichuan"
"chat/adapter/bing"
"chat/adapter/chatgpt"
Expand Down Expand Up @@ -67,6 +68,24 @@ func createChatRequest(conf globals.ChannelConfig, props *ChatProps, hook global
Buffer: props.Buffer,
}, hook)

case globals.AzureOpenAIChannelType:
return azure.NewChatInstanceFromConfig(conf).CreateStreamChatRequest(&azure.ChatProps{
Model: model,
Message: props.Message,
Token: utils.Multi(
props.Token == 0,
utils.Multi(props.Infinity || props.Plan, nil, utils.ToPtr(2500)),
&props.Token,
),
PresencePenalty: props.PresencePenalty,
FrequencyPenalty: props.FrequencyPenalty,
Temperature: props.Temperature,
TopP: props.TopP,
Tools: props.Tools,
ToolChoice: props.ToolChoice,
Buffer: props.Buffer,
}, hook)

case globals.ClaudeChannelType:
return claude.NewChatInstanceFromConfig(conf).CreateStreamChatRequest(&claude.ChatProps{
Model: model,
Expand Down
147 changes: 147 additions & 0 deletions adapter/azure/chat.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,147 @@
package azure

import (
"chat/globals"
"chat/utils"
"fmt"
"strings"
)

type ChatProps struct {
Model string
Message []globals.Message
Token *int
PresencePenalty *float32
FrequencyPenalty *float32
Temperature *float32
TopP *float32
Tools *globals.FunctionTools
ToolChoice *interface{}
Buffer utils.Buffer
}

func (c *ChatInstance) GetChatEndpoint(props *ChatProps) string {
model := strings.ReplaceAll(props.Model, ".", "")
if props.Model == globals.GPT3TurboInstruct {
return fmt.Sprintf("%s/openai/deployments/%s/completions?api-version=%s", c.GetResource(), model, c.GetEndpoint())
}
return fmt.Sprintf("%s/openai/deployments/%s/chat/completions?api-version=%s", c.GetResource(), model, c.GetEndpoint())
}

func (c *ChatInstance) GetCompletionPrompt(messages []globals.Message) string {
result := ""
for _, message := range messages {
result += fmt.Sprintf("%s: %s\n", message.Role, message.Content)
}
return result
}

func (c *ChatInstance) GetLatestPrompt(props *ChatProps) string {
if len(props.Message) == 0 {
return ""
}

return props.Message[len(props.Message)-1].Content
}

func (c *ChatInstance) GetChatBody(props *ChatProps, stream bool) interface{} {
if props.Model == globals.GPT3TurboInstruct {
// for completions
return CompletionRequest{
Prompt: c.GetCompletionPrompt(props.Message),
MaxToken: props.Token,
Stream: stream,
}
}

return ChatRequest{
Messages: formatMessages(props),
MaxToken: props.Token,
Stream: stream,
PresencePenalty: props.PresencePenalty,
FrequencyPenalty: props.FrequencyPenalty,
Temperature: props.Temperature,
TopP: props.TopP,
Tools: props.Tools,
ToolChoice: props.ToolChoice,
}
}

// CreateChatRequest is the native http request body for chatgpt
func (c *ChatInstance) CreateChatRequest(props *ChatProps) (string, error) {
if globals.IsDalleModel(props.Model) {
return c.CreateImage(props)
}

res, err := utils.Post(
c.GetChatEndpoint(props),
c.GetHeader(),
c.GetChatBody(props, false),
)

if err != nil || res == nil {
return "", fmt.Errorf("chatgpt error: %s", err.Error())
}

data := utils.MapToStruct[ChatResponse](res)
if data == nil {
return "", fmt.Errorf("chatgpt error: cannot parse response")
} else if data.Error.Message != "" {
return "", fmt.Errorf("chatgpt error: %s", data.Error.Message)
}
return data.Choices[0].Message.Content, nil
}

// CreateStreamChatRequest is the stream response body for chatgpt
func (c *ChatInstance) CreateStreamChatRequest(props *ChatProps, callback globals.Hook) error {
if globals.IsDalleModel(props.Model) {
if url, err := c.CreateImage(props); err != nil {
return err
} else {
return callback(url)
}
}

buf := ""
cursor := 0
chunk := ""
instruct := props.Model == globals.GPT3TurboInstruct

err := utils.EventSource(
"POST",
c.GetChatEndpoint(props),
c.GetHeader(),
c.GetChatBody(props, true),
func(data string) error {
data, err := c.ProcessLine(props.Buffer, instruct, buf, data)
chunk += data

if err != nil {
if strings.HasPrefix(err.Error(), "chatgpt error") {
return err
}

// error when break line
buf = buf + data
return nil
}

buf = ""
if data != "" {
cursor += 1
if err := callback(data); err != nil {
return err
}
}
return nil
},
)

if err != nil {
return err
} else if len(chunk) == 0 {
return fmt.Errorf("empty response")
}

return nil
}
62 changes: 62 additions & 0 deletions adapter/azure/image.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
package azure

import (
"chat/globals"
"chat/utils"
"fmt"
"strings"
)

type ImageProps struct {
Model string
Prompt string
Size ImageSize
}

func (c *ChatInstance) GetImageEndpoint(model string) string {
model = strings.ReplaceAll(model, ".", "")
return fmt.Sprintf("%s/openai/deployments/%s/images/generations?api-version=%s", c.GetResource(), model, c.GetEndpoint())
}

// CreateImageRequest will create a dalle image from prompt, return url of image and error
func (c *ChatInstance) CreateImageRequest(props ImageProps) (string, error) {
res, err := utils.Post(
c.GetImageEndpoint(props.Model),
c.GetHeader(), ImageRequest{
Prompt: props.Prompt,
Size: utils.Multi[ImageSize](
props.Model == globals.Dalle3,
ImageSize1024,
ImageSize512,
),
N: 1,
})
if err != nil || res == nil {
return "", fmt.Errorf("chatgpt error: %s", err.Error())
}

data := utils.MapToStruct[ImageResponse](res)
if data == nil {
return "", fmt.Errorf("chatgpt error: cannot parse response")
} else if data.Error.Message != "" {
return "", fmt.Errorf("chatgpt error: %s", data.Error.Message)
}

return data.Data[0].Url, nil
}

// CreateImage will create a dalle image from prompt, return markdown of image
func (c *ChatInstance) CreateImage(props *ChatProps) (string, error) {
url, err := c.CreateImageRequest(ImageProps{
Model: props.Model,
Prompt: c.GetLatestPrompt(props),
})
if err != nil {
if strings.Contains(err.Error(), "safety") {
return err.Error(), nil
}
return "", err
}

return utils.GetImageMarkdown(url), nil
}
Loading

0 comments on commit a1f4845

Please sign in to comment.