diff --git a/README.md b/README.md index e26f83f6f37..e795919c52b 100644 --- a/README.md +++ b/README.md @@ -105,6 +105,9 @@ - 配置自定义接口地址(可选) `GOOGLE_BASE_URL`,可以使用我的这个项目搭建一个基于 vercel 的代理服务:[google-gemini-vercel-proxy](https://github.com/Hk-Gosuto/google-gemini-vercel-proxy) - 常见问题参考:[Gemini Prompting FAQs](https://js.langchain.com/docs/integrations/chat/google_generativeai#gemini-prompting-faqs) - gemini-pro-vision 模型需要配置对象存储服务,请参考 [对象存储服务配置指南](./docs/s3-oss.md) 配置 + - ⚠ gemini-pro-vision 注意事项 (https://github.com/Hk-Gosuto/ChatGPT-Next-Web-LangChain/issues/203): + - 每次对话必须包含图像数据,不然会出现 `Add an image to use models/gemini-pro-vision, or switch your model to a text model.` 错误。 + - 只支持单轮对话,多轮对话话出现 `Multiturn chat is not enabled for models/gemini-pro-vision` 错误。 - 非 Vercel 运行环境下支持本地存储 diff --git a/app/client/platforms/google.ts b/app/client/platforms/google.ts index 11f77120ab1..c19c7ece5ad 100644 --- a/app/client/platforms/google.ts +++ b/app/client/platforms/google.ts @@ -172,7 +172,10 @@ export class GeminiProApi implements LLMApi { // start animaion animateResponseText(); fetch(streamChatPath, chatPayload) - .then((response) => { + .then(async (response) => { + if (!response.ok) { + throw new Error(await response?.text()); + } const reader = response?.body?.getReader(); const decoder = new TextDecoder(); let partialData = ""; @@ -220,6 +223,7 @@ export class GeminiProApi implements LLMApi { }) .catch((error) => { console.error("Error:", error); + options.onError?.(error as Error); }); } else { const res = await fetch(chatPath, chatPayload); diff --git a/app/constant.ts b/app/constant.ts index 577b59ecf7f..f76cebbf038 100644 --- a/app/constant.ts +++ b/app/constant.ts @@ -105,6 +105,7 @@ Latex block: $$e=mc^2$$ `; export const SUMMARIZE_MODEL = "gpt-3.5-turbo"; +export const GOOGLE_SUMMARIZE_MODEL = "gemini-pro"; export const KnowledgeCutOffDate: Record = { default: "2021-09", diff --git a/app/store/chat.ts b/app/store/chat.ts index 0e8326c36f9..393348a7600 100644 --- a/app/store/chat.ts +++ b/app/store/chat.ts @@ -8,6 +8,7 @@ import { DEFAULT_INPUT_TEMPLATE, DEFAULT_MODELS, DEFAULT_SYSTEM_TEMPLATE, + GOOGLE_SUMMARIZE_MODEL, KnowledgeCutOffDate, ModelProvider, StoreKey, @@ -96,6 +97,7 @@ function getSummarizeModel(currentModel: string) { const model = DEFAULT_MODELS.find((m) => m.name === currentModel); console.log("model", model); if (!model) return currentModel; + if (model.provider.providerType === "google") return GOOGLE_SUMMARIZE_MODEL; // if it is using gpt-* models, force to use 3.5 to summarize return currentModel.startsWith("gpt") ? SUMMARIZE_MODEL : currentModel; }