From 19c99fb268d6d6c7fc7aaa66475c35f45d12b4bd Mon Sep 17 00:00:00 2001
From: Stainless Bot <107565488+stainless-bot@users.noreply.github.com>
Date: Tue, 29 Aug 2023 23:18:41 +0100
Subject: [PATCH 1/7] feat(types): fix ambiguous auto-import for chat
completions params (#266)
This renames the following types and deprecates the old names:
- `CompletionCreateParams` -> `ChatCompletionCreateParams`
- `CompletionCreateParamsStreaming` -> `ChatCompletionCreateParamsStreaming`
- `CompletionCreateParamsNonStreaming` -> `ChatCompletionCreateParamsNonStreaming`
- `CreateChatCompletionRequestMessage` -> `ChatCompletionCreateParamsMessage`
---
README.md | 2 +-
api.md | 1 +
examples/chat-params-types.ts | 22 ++++++-----
examples/function-call-stream.ts | 6 +--
examples/function-call.ts | 6 +--
src/resources/chat/chat.ts | 4 ++
src/resources/chat/completions.ts | 63 ++++++++++++++++++++++---------
src/resources/chat/index.ts | 4 ++
8 files changed, 74 insertions(+), 34 deletions(-)
diff --git a/README.md b/README.md
index e280c2354..e97b7078c 100644
--- a/README.md
+++ b/README.md
@@ -78,7 +78,7 @@ const openai = new OpenAI({
});
async function main() {
- const params: OpenAI.Chat.CompletionCreateParams = {
+ const params: OpenAI.Chat.ChatCompletionCreateParams = {
messages: [{ role: 'user', content: 'Say this is a test' }],
model: 'gpt-3.5-turbo',
};
diff --git a/api.md b/api.md
index 1c443b4f1..14b70dffe 100644
--- a/api.md
+++ b/api.md
@@ -19,6 +19,7 @@ Types:
- ChatCompletion
- ChatCompletionChunk
- ChatCompletionMessage
+- ChatCompletionMessageParam
- CreateChatCompletionRequestMessage
Methods:
diff --git a/examples/chat-params-types.ts b/examples/chat-params-types.ts
index 66f27ed31..86c28fc8b 100755
--- a/examples/chat-params-types.ts
+++ b/examples/chat-params-types.ts
@@ -9,7 +9,7 @@ const openai = new OpenAI();
async function main() {
// ---------------- Explicit non-streaming params ------------
- const params: OpenAI.Chat.CompletionCreateParams = {
+ const params: OpenAI.Chat.ChatCompletionCreateParams = {
model: 'gpt-4',
messages: [{ role: 'user', content: 'Say this is a test!' }],
};
@@ -18,7 +18,7 @@ async function main() {
// ---------------- Explicit streaming params ----------------
- const streamingParams: OpenAI.Chat.CompletionCreateParams = {
+ const streamingParams: OpenAI.Chat.ChatCompletionCreateParams = {
model: 'gpt-4',
messages: [{ role: 'user', content: 'Say this is a test!' }],
stream: true,
@@ -32,12 +32,12 @@ async function main() {
// ---------------- Explicit (non)streaming types ----------------
- const params1: OpenAI.Chat.CompletionCreateParamsNonStreaming = {
+ const params1: OpenAI.Chat.ChatCompletionCreateParamsNonStreaming = {
model: 'gpt-4',
messages: [{ role: 'user', content: 'Say this is a test!' }],
};
- const params2: OpenAI.Chat.CompletionCreateParamsStreaming = {
+ const params2: OpenAI.Chat.ChatCompletionCreateParamsStreaming = {
model: 'gpt-4',
messages: [{ role: 'user', content: 'Say this is a test!' }],
stream: true,
@@ -52,9 +52,9 @@ async function main() {
// `role: string` is not assignable.
const streamingParams2 = {
model: 'gpt-4',
- messages: [{ role: 'user', content: 'Say this is a test!' }],
- stream: true,
- } as const;
+ messages: [{ role: 'user' as const, content: 'Say this is a test!' }],
+ stream: true as const,
+ };
// TS knows this is a Stream instance.
const stream2 = await openai.chat.completions.create(streamingParams2);
@@ -95,11 +95,13 @@ async function main() {
// not the response will be streamed.
export async function createCompletionParams(
stream: true,
-): Promise;
+): Promise;
export async function createCompletionParams(
stream: false,
-): Promise;
-export async function createCompletionParams(stream: boolean): Promise {
+): Promise;
+export async function createCompletionParams(
+ stream: boolean,
+): Promise {
const params = {
model: 'gpt-3.5-turbo',
messages: [{ role: 'user' as const, content: 'Hello!' }],
diff --git a/examples/function-call-stream.ts b/examples/function-call-stream.ts
index 6126a7ff2..be4688aa7 100755
--- a/examples/function-call-stream.ts
+++ b/examples/function-call-stream.ts
@@ -5,13 +5,13 @@ import OpenAI from 'openai';
import {
ChatCompletionMessage,
ChatCompletionChunk,
- CreateChatCompletionRequestMessage,
+ ChatCompletionMessageParam,
} from 'openai/resources/chat';
// gets API Key from environment variable OPENAI_API_KEY
const openai = new OpenAI();
-const functions: OpenAI.Chat.CompletionCreateParams.Function[] = [
+const functions: OpenAI.Chat.ChatCompletionCreateParams.Function[] = [
{
name: 'list',
description: 'list queries books by genre, and returns a list of names of books',
@@ -63,7 +63,7 @@ async function callFunction(function_call: ChatCompletionMessage.FunctionCall):
}
async function main() {
- const messages: CreateChatCompletionRequestMessage[] = [
+ const messages: ChatCompletionMessageParam[] = [
{
role: 'system',
content:
diff --git a/examples/function-call.ts b/examples/function-call.ts
index 158437e68..ce12431b0 100755
--- a/examples/function-call.ts
+++ b/examples/function-call.ts
@@ -1,12 +1,12 @@
#!/usr/bin/env -S npm run tsn -T
import OpenAI from 'openai';
-import { ChatCompletionMessage, CreateChatCompletionRequestMessage } from 'openai/resources/chat';
+import { ChatCompletionMessage, ChatCompletionMessageParam } from 'openai/resources/chat';
// gets API Key from environment variable OPENAI_API_KEY
const openai = new OpenAI();
-const functions: OpenAI.Chat.CompletionCreateParams.Function[] = [
+const functions: OpenAI.Chat.ChatCompletionCreateParams.Function[] = [
{
name: 'list',
description: 'list queries books by genre, and returns a list of names of books',
@@ -58,7 +58,7 @@ async function callFunction(function_call: ChatCompletionMessage.FunctionCall):
}
async function main() {
- const messages: CreateChatCompletionRequestMessage[] = [
+ const messages: ChatCompletionMessageParam[] = [
{
role: 'system',
content:
diff --git a/src/resources/chat/chat.ts b/src/resources/chat/chat.ts
index 9a256b596..5d10f2f4d 100644
--- a/src/resources/chat/chat.ts
+++ b/src/resources/chat/chat.ts
@@ -13,8 +13,12 @@ export namespace Chat {
export import ChatCompletion = API.ChatCompletion;
export import ChatCompletionChunk = API.ChatCompletionChunk;
export import ChatCompletionMessage = API.ChatCompletionMessage;
+ export import ChatCompletionMessageParam = API.ChatCompletionMessageParam;
export import CreateChatCompletionRequestMessage = API.CreateChatCompletionRequestMessage;
+ export import ChatCompletionCreateParams = API.ChatCompletionCreateParams;
export import CompletionCreateParams = API.CompletionCreateParams;
+ export import ChatCompletionCreateParamsNonStreaming = API.ChatCompletionCreateParamsNonStreaming;
export import CompletionCreateParamsNonStreaming = API.CompletionCreateParamsNonStreaming;
+ export import ChatCompletionCreateParamsStreaming = API.ChatCompletionCreateParamsStreaming;
export import CompletionCreateParamsStreaming = API.CompletionCreateParamsStreaming;
}
diff --git a/src/resources/chat/completions.ts b/src/resources/chat/completions.ts
index b05ab1eb4..ddfe1344b 100644
--- a/src/resources/chat/completions.ts
+++ b/src/resources/chat/completions.ts
@@ -11,17 +11,20 @@ export class Completions extends APIResource {
/**
* Creates a model response for the given chat conversation.
*/
- create(body: CompletionCreateParamsNonStreaming, options?: Core.RequestOptions): APIPromise;
create(
- body: CompletionCreateParamsStreaming,
+ body: ChatCompletionCreateParamsNonStreaming,
+ options?: Core.RequestOptions,
+ ): APIPromise;
+ create(
+ body: ChatCompletionCreateParamsStreaming,
options?: Core.RequestOptions,
): APIPromise>;
create(
- body: CompletionCreateParamsBase,
+ body: ChatCompletionCreateParamsBase,
options?: Core.RequestOptions,
): APIPromise | ChatCompletion>;
create(
- body: CompletionCreateParams,
+ body: ChatCompletionCreateParams,
options?: Core.RequestOptions,
): APIPromise | APIPromise> {
return this.post('/chat/completions', { body, ...options, stream: body.stream ?? false }) as
@@ -229,7 +232,7 @@ export namespace ChatCompletionMessage {
}
}
-export interface CreateChatCompletionRequestMessage {
+export interface ChatCompletionMessageParam {
/**
* The contents of the message. `content` is required for all messages, and may be
* null for assistant messages with function calls.
@@ -246,7 +249,7 @@ export interface CreateChatCompletionRequestMessage {
* The name and arguments of a function that should be called, as generated by the
* model.
*/
- function_call?: CreateChatCompletionRequestMessage.FunctionCall;
+ function_call?: ChatCompletionMessageParam.FunctionCall;
/**
* The name of the author of this message. `name` is required if role is
@@ -257,7 +260,7 @@ export interface CreateChatCompletionRequestMessage {
name?: string;
}
-export namespace CreateChatCompletionRequestMessage {
+export namespace ChatCompletionMessageParam {
/**
* The name and arguments of a function that should be called, as generated by the
* model.
@@ -278,14 +281,21 @@ export namespace CreateChatCompletionRequestMessage {
}
}
-export type CompletionCreateParams = CompletionCreateParamsNonStreaming | CompletionCreateParamsStreaming;
+/**
+ * @deprecated ChatCompletionMessageParam should be used instead
+ */
+export type CreateChatCompletionRequestMessage = ChatCompletionMessageParam;
-export interface CompletionCreateParamsBase {
+export type ChatCompletionCreateParams =
+ | ChatCompletionCreateParamsNonStreaming
+ | ChatCompletionCreateParamsStreaming;
+
+export interface ChatCompletionCreateParamsBase {
/**
* A list of messages comprising the conversation so far.
* [Example Python code](https://github.com/openai/openai-cookbook/blob/main/examples/How_to_format_inputs_to_ChatGPT_models.ipynb).
*/
- messages: Array;
+ messages: Array;
/**
* ID of the model to use. See the
@@ -323,12 +333,12 @@ export interface CompletionCreateParamsBase {
* the default when no functions are present. "auto" is the default if functions
* are present.
*/
- function_call?: 'none' | 'auto' | CompletionCreateParams.FunctionCallOption;
+ function_call?: 'none' | 'auto' | ChatCompletionCreateParams.FunctionCallOption;
/**
* A list of functions the model may generate JSON inputs for.
*/
- functions?: Array;
+ functions?: Array;
/**
* Modify the likelihood of specified tokens appearing in the completion.
@@ -406,7 +416,7 @@ export interface CompletionCreateParamsBase {
user?: string;
}
-export namespace CompletionCreateParams {
+export namespace ChatCompletionCreateParams {
export interface FunctionCallOption {
/**
* The name of the function to call.
@@ -439,11 +449,16 @@ export namespace CompletionCreateParams {
description?: string;
}
- export type CompletionCreateParamsNonStreaming = API.CompletionCreateParamsNonStreaming;
- export type CompletionCreateParamsStreaming = API.CompletionCreateParamsStreaming;
+ export type ChatCompletionCreateParamsNonStreaming = API.ChatCompletionCreateParamsNonStreaming;
+ export type ChatCompletionCreateParamsStreaming = API.ChatCompletionCreateParamsStreaming;
}
-export interface CompletionCreateParamsNonStreaming extends CompletionCreateParamsBase {
+/**
+ * @deprecated Use ChatCompletionCreateParams instead
+ */
+export type CompletionCreateParams = ChatCompletionCreateParams;
+
+export interface ChatCompletionCreateParamsNonStreaming extends ChatCompletionCreateParamsBase {
/**
* If set, partial message deltas will be sent, like in ChatGPT. Tokens will be
* sent as data-only
@@ -455,7 +470,12 @@ export interface CompletionCreateParamsNonStreaming extends CompletionCreatePara
stream?: false | null;
}
-export interface CompletionCreateParamsStreaming extends CompletionCreateParamsBase {
+/**
+ * @deprecated Use ChatCompletionCreateParamsNonStreaming instead
+ */
+export type CompletionCreateParamsNonStreaming = ChatCompletionCreateParamsNonStreaming;
+
+export interface ChatCompletionCreateParamsStreaming extends ChatCompletionCreateParamsBase {
/**
* If set, partial message deltas will be sent, like in ChatGPT. Tokens will be
* sent as data-only
@@ -467,12 +487,21 @@ export interface CompletionCreateParamsStreaming extends CompletionCreateParamsB
stream: true;
}
+/**
+ * @deprecated Use ChatCompletionCreateParamsStreaming instead
+ */
+export type CompletionCreateParamsStreaming = ChatCompletionCreateParamsStreaming;
+
export namespace Completions {
export import ChatCompletion = API.ChatCompletion;
export import ChatCompletionChunk = API.ChatCompletionChunk;
export import ChatCompletionMessage = API.ChatCompletionMessage;
+ export import ChatCompletionMessageParam = API.ChatCompletionMessageParam;
export import CreateChatCompletionRequestMessage = API.CreateChatCompletionRequestMessage;
+ export import ChatCompletionCreateParams = API.ChatCompletionCreateParams;
export import CompletionCreateParams = API.CompletionCreateParams;
+ export import ChatCompletionCreateParamsNonStreaming = API.ChatCompletionCreateParamsNonStreaming;
export import CompletionCreateParamsNonStreaming = API.CompletionCreateParamsNonStreaming;
+ export import ChatCompletionCreateParamsStreaming = API.ChatCompletionCreateParamsStreaming;
export import CompletionCreateParamsStreaming = API.CompletionCreateParamsStreaming;
}
diff --git a/src/resources/chat/index.ts b/src/resources/chat/index.ts
index f9232bffe..ea9d1d1b9 100644
--- a/src/resources/chat/index.ts
+++ b/src/resources/chat/index.ts
@@ -5,9 +5,13 @@ export {
ChatCompletion,
ChatCompletionChunk,
ChatCompletionMessage,
+ ChatCompletionMessageParam,
CreateChatCompletionRequestMessage,
+ ChatCompletionCreateParams,
CompletionCreateParams,
+ ChatCompletionCreateParamsNonStreaming,
CompletionCreateParamsNonStreaming,
+ ChatCompletionCreateParamsStreaming,
CompletionCreateParamsStreaming,
Completions,
} from './completions';
From 1a71a39421828fdde7b8605094363a5047d2fdc9 Mon Sep 17 00:00:00 2001
From: Stainless Bot <107565488+stainless-bot@users.noreply.github.com>
Date: Wed, 30 Aug 2023 13:07:12 -0400
Subject: [PATCH 2/7] feat: re-export chat completion types at the top level
(#268)
---
src/index.ts | 8 ++++++++
src/resources/index.ts | 2 +-
2 files changed, 9 insertions(+), 1 deletion(-)
diff --git a/src/index.ts b/src/index.ts
index e8013a515..4965cd4f7 100644
--- a/src/index.ts
+++ b/src/index.ts
@@ -220,6 +220,14 @@ export namespace OpenAI {
export import Chat = API.Chat;
+ export import ChatCompletion = API.ChatCompletion;
+ export import ChatCompletionChunk = API.ChatCompletionChunk;
+ export import ChatCompletionMessage = API.ChatCompletionMessage;
+ export import ChatCompletionMessageParam = API.ChatCompletionMessageParam;
+ export import ChatCompletionCreateParams = API.ChatCompletionCreateParams;
+ export import ChatCompletionCreateParamsNonStreaming = API.ChatCompletionCreateParamsNonStreaming;
+ export import ChatCompletionCreateParamsStreaming = API.ChatCompletionCreateParamsStreaming;
+
export import Edits = API.Edits;
export import Edit = API.Edit;
export import EditCreateParams = API.EditCreateParams;
diff --git a/src/resources/index.ts b/src/resources/index.ts
index 0d9ab9935..ced5114fa 100644
--- a/src/resources/index.ts
+++ b/src/resources/index.ts
@@ -1,7 +1,7 @@
// File generated from our OpenAPI spec by Stainless.
+export * from './chat';
export { Audio } from './audio/audio';
-export { Chat } from './chat/chat';
export {
Completion,
CompletionChoice,
From 16f239c6b4e8526371b01c511d2e0ebba4c5c8c6 Mon Sep 17 00:00:00 2001
From: Stainless Bot <107565488+stainless-bot@users.noreply.github.com>
Date: Wed, 30 Aug 2023 20:20:45 -0400
Subject: [PATCH 3/7] feat(package): add Bun export map (#269)
---
ecosystem-tests/bun/openai.test.ts | 100 +++++++++++++++++++++++++++--
ecosystem-tests/bun/package.json | 6 +-
ecosystem-tests/cli.ts | 12 ++--
package.json | 5 ++
4 files changed, 106 insertions(+), 17 deletions(-)
diff --git a/ecosystem-tests/bun/openai.test.ts b/ecosystem-tests/bun/openai.test.ts
index 243bc19a3..399b40c84 100644
--- a/ecosystem-tests/bun/openai.test.ts
+++ b/ecosystem-tests/bun/openai.test.ts
@@ -1,9 +1,26 @@
import OpenAI, { toFile } from 'openai';
+import fs from 'fs';
import { distance } from 'fastest-levenshtein';
import { test, expect } from 'bun:test';
+const url = 'https://audio-samples.github.io/samples/mp3/blizzard_biased/sample-1.mp3';
+const filename = 'sample-1.mp3';
+
+const correctAnswer =
+ 'It was anxious to find him no one that expectation of a man who were giving his father enjoyment. But he was avoided in sight in the minister to which indeed,';
+const model = 'whisper-1';
+
const client = new OpenAI();
+async function typeTests() {
+ // @ts-expect-error this should error if the `Uploadable` type was resolved correctly
+ await client.audio.transcriptions.create({ file: { foo: true }, model: 'whisper-1' });
+ // @ts-expect-error this should error if the `Uploadable` type was resolved correctly
+ await client.audio.transcriptions.create({ file: null, model: 'whisper-1' });
+ // @ts-expect-error this should error if the `Uploadable` type was resolved correctly
+ await client.audio.transcriptions.create({ file: 'test', model: 'whisper-1' });
+}
+
function expectSimilar(received: any, comparedTo: string, expectedDistance: number) {
const message = () =>
[
@@ -38,11 +55,80 @@ test(`streaming works`, async function () {
expectSimilar(chunks.map((c) => c.choices[0]?.delta.content || '').join(''), 'This is a test', 10);
});
-test(`toFile rejects`, async function () {
- try {
- await toFile(new TextEncoder().encode('foo'), 'foo.txt');
- throw new Error(`expected toFile to reject`);
- } catch (error) {
- expect((error as any).message).toEqual(`file uploads aren't supported in this environment yet`);
- }
+// @ts-ignore avoid DOM lib for testing purposes
+if (typeof File !== 'undefined') {
+ test.todo('handles builtinFile', async function () {
+ const file = await fetch(url)
+ .then((x) => x.arrayBuffer())
+ // @ts-ignore avoid DOM lib for testing purposes
+ .then((x) => new File([x], filename));
+
+ const result = await client.audio.transcriptions.create({ file, model });
+ expectSimilar(result.text, correctAnswer, 12);
+ });
+}
+
+test.todo('handles Response', async function () {
+ const file = await fetch(url);
+
+ const result = await client.audio.transcriptions.create({ file, model });
+ expectSimilar(result.text, correctAnswer, 12);
+});
+
+test.todo('handles fs.ReadStream', async function () {
+ const result = await client.audio.transcriptions.create({
+ file: fs.createReadStream('sample1.mp3'),
+ model,
+ });
+ expectSimilar(result.text, correctAnswer, 12);
+});
+
+const fineTune = `{"prompt": "", "completion": ""}`;
+
+// @ts-ignore avoid DOM lib for testing purposes
+if (typeof Blob !== 'undefined') {
+ test.todo('toFile handles builtin Blob', async function () {
+ const result = await client.files.create({
+ file: await toFile(
+ // @ts-ignore avoid DOM lib for testing purposes
+ new Blob([new TextEncoder().encode(fineTune)]),
+ 'finetune.jsonl',
+ ),
+ purpose: 'fine-tune',
+ });
+ expect(result.status).toEqual('uploaded');
+ });
+}
+test.todo('toFile handles Uint8Array', async function () {
+ const result = await client.files.create({
+ file: await toFile(
+ // @ts-ignore avoid DOM lib for testing purposes
+ new TextEncoder().encode(fineTune),
+ 'finetune.jsonl',
+ ),
+ purpose: 'fine-tune',
+ });
+ expect(result.status).toEqual('uploaded');
+});
+test.todo('toFile handles ArrayBuffer', async function () {
+ const result = await client.files.create({
+ file: await toFile(
+ // @ts-ignore avoid DOM lib for testing purposes
+ new TextEncoder().encode(fineTune).buffer,
+ 'finetune.jsonl',
+ ),
+ purpose: 'fine-tune',
+ });
+ expect(result.status).toEqual('uploaded');
+});
+test.todo('toFile handles DataView', async function () {
+ const result = await client.files.create({
+ file: await toFile(
+ // @ts-ignore avoid DOM lib for testing purposes
+ new DataView(new TextEncoder().encode(fineTune).buffer),
+ 'finetune.jsonl',
+ ),
+ purpose: 'fine-tune',
+ });
+ expect(result.status).toEqual('uploaded');
});
diff --git a/ecosystem-tests/bun/package.json b/ecosystem-tests/bun/package.json
index 3cedd94f7..1465be93a 100644
--- a/ecosystem-tests/bun/package.json
+++ b/ecosystem-tests/bun/package.json
@@ -7,9 +7,7 @@
},
"devDependencies": {
"fastest-levenshtein": "^1.0.16",
- "bun-types": "latest"
- },
- "peerDependencies": {
- "typescript": "^5.0.0"
+ "bun-types": "latest",
+ "typescript": "^5.1.0"
}
}
diff --git a/ecosystem-tests/cli.ts b/ecosystem-tests/cli.ts
index 195dbf020..d6ffcb872 100644
--- a/ecosystem-tests/cli.ts
+++ b/ecosystem-tests/cli.ts
@@ -268,7 +268,7 @@ async function main() {
console.error('\n');
try {
- await withRetry(fn, project, state.retry)
+ await withRetry(fn, project, state.retry);
console.error(`✅ - Successfully ran ${project}`);
} catch (err) {
if (err && (err as any).shortMessage) {
@@ -294,13 +294,13 @@ async function main() {
async function withRetry(fn: () => Promise, identifier: string, retryAmount: number): Promise {
do {
try {
- return await fn()
+ return await fn();
} catch (err) {
- console.error(`${identifier} failed due to ${err}; retries left ${retryAmount}`)
+ retryAmount--;
+ if (retryAmount === 0) throw err;
+ console.error(`${identifier} failed due to ${err}; retries left ${retryAmount}`);
}
-
- retryAmount--;
- } while (retryAmount > 0)
+ } while (retryAmount > 0);
}
function centerPad(text: string, width = text.length, char = ' '): string {
diff --git a/package.json b/package.json
index b4847bed5..8a1440f63 100644
--- a/package.json
+++ b/package.json
@@ -16,6 +16,11 @@
"require": "./dist/_shims/*.js",
"default": "./dist/_shims/*.mjs"
},
+ "bun": {
+ "types": "./dist/_shims/*.d.ts",
+ "require": "./dist/_shims/*.js",
+ "default": "./dist/_shims/*.mjs"
+ },
"browser": {
"types": "./dist/_shims/*.d.ts",
"require": "./dist/_shims/*.js",
From 7e7242110a522d5b6df1af9d4645d190abcd2cbb Mon Sep 17 00:00:00 2001
From: Stainless Bot <107565488+stainless-bot@users.noreply.github.com>
Date: Thu, 31 Aug 2023 06:01:49 -0400
Subject: [PATCH 4/7] ci: remove GitHub workflow open-release-prs.yml (#272)
---
.github/workflows/open-release-prs.yml | 21 ---------------------
1 file changed, 21 deletions(-)
delete mode 100644 .github/workflows/open-release-prs.yml
diff --git a/.github/workflows/open-release-prs.yml b/.github/workflows/open-release-prs.yml
deleted file mode 100644
index ca04b9e2d..000000000
--- a/.github/workflows/open-release-prs.yml
+++ /dev/null
@@ -1,21 +0,0 @@
-name: Open release PRs
-on:
- push:
- branches:
- - next
-
-jobs:
- release:
- name: release
- if: github.ref == 'refs/heads/next' && github.repository == 'openai/openai-node'
- runs-on: ubuntu-latest
-
- steps:
- - uses: actions/checkout@v3
-
- - uses: stainless-api/trigger-release-please@v1
- id: release
- with:
- repo: ${{ github.event.repository.full_name }}
- stainless-api-key: ${{ secrets.STAINLESS_API_KEY }}
- branch-with-changes: next
From 6534e3620d7e2983e98b42cf95fa966deab1ab1d Mon Sep 17 00:00:00 2001
From: Stainless Bot <107565488+stainless-bot@users.noreply.github.com>
Date: Thu, 31 Aug 2023 18:24:16 -0400
Subject: [PATCH 5/7] fix: revert import change which triggered circular import
bug in webpack (#274)
---
src/index.ts | 8 --------
src/resources/index.ts | 2 +-
2 files changed, 1 insertion(+), 9 deletions(-)
diff --git a/src/index.ts b/src/index.ts
index 4965cd4f7..e8013a515 100644
--- a/src/index.ts
+++ b/src/index.ts
@@ -220,14 +220,6 @@ export namespace OpenAI {
export import Chat = API.Chat;
- export import ChatCompletion = API.ChatCompletion;
- export import ChatCompletionChunk = API.ChatCompletionChunk;
- export import ChatCompletionMessage = API.ChatCompletionMessage;
- export import ChatCompletionMessageParam = API.ChatCompletionMessageParam;
- export import ChatCompletionCreateParams = API.ChatCompletionCreateParams;
- export import ChatCompletionCreateParamsNonStreaming = API.ChatCompletionCreateParamsNonStreaming;
- export import ChatCompletionCreateParamsStreaming = API.ChatCompletionCreateParamsStreaming;
-
export import Edits = API.Edits;
export import Edit = API.Edit;
export import EditCreateParams = API.EditCreateParams;
diff --git a/src/resources/index.ts b/src/resources/index.ts
index ced5114fa..0d9ab9935 100644
--- a/src/resources/index.ts
+++ b/src/resources/index.ts
@@ -1,7 +1,7 @@
// File generated from our OpenAPI spec by Stainless.
-export * from './chat';
export { Audio } from './audio/audio';
+export { Chat } from './chat/chat';
export {
Completion,
CompletionChoice,
From 47d3e18a3ee987d04b958dad1a51821ad5472d54 Mon Sep 17 00:00:00 2001
From: Stainless Bot <107565488+stainless-bot@users.noreply.github.com>
Date: Fri, 1 Sep 2023 10:33:53 -0400
Subject: [PATCH 6/7] feat(tests): unskip multipart form data tests (#275)
---
.../api-resources/audio/transcriptions.test.ts | 6 ++----
tests/api-resources/audio/translations.test.ts | 6 ++----
tests/api-resources/images.test.ts | 18 ++++++------------
3 files changed, 10 insertions(+), 20 deletions(-)
diff --git a/tests/api-resources/audio/transcriptions.test.ts b/tests/api-resources/audio/transcriptions.test.ts
index 79ee351e6..f9b2aade4 100644
--- a/tests/api-resources/audio/transcriptions.test.ts
+++ b/tests/api-resources/audio/transcriptions.test.ts
@@ -6,8 +6,7 @@ import { Response } from 'node-fetch';
const openai = new OpenAI({ apiKey: 'something1234', baseURL: 'http://127.0.0.1:4010' });
describe('resource transcriptions', () => {
- // Prism doesn't support multipart/form-data
- test.skip('create: only required params', async () => {
+ test('create: only required params', async () => {
const responsePromise = openai.audio.transcriptions.create({
file: await toFile(Buffer.from('# my file contents'), 'README.md'),
model: 'whisper-1',
@@ -21,8 +20,7 @@ describe('resource transcriptions', () => {
expect(dataAndResponse.response).toBe(rawResponse);
});
- // Prism doesn't support multipart/form-data
- test.skip('create: required and optional params', async () => {
+ test('create: required and optional params', async () => {
const response = await openai.audio.transcriptions.create({
file: await toFile(Buffer.from('# my file contents'), 'README.md'),
model: 'whisper-1',
diff --git a/tests/api-resources/audio/translations.test.ts b/tests/api-resources/audio/translations.test.ts
index 4f6389d2e..92cffa51b 100644
--- a/tests/api-resources/audio/translations.test.ts
+++ b/tests/api-resources/audio/translations.test.ts
@@ -6,8 +6,7 @@ import { Response } from 'node-fetch';
const openai = new OpenAI({ apiKey: 'something1234', baseURL: 'http://127.0.0.1:4010' });
describe('resource translations', () => {
- // Prism doesn't support multipart/form-data
- test.skip('create: only required params', async () => {
+ test('create: only required params', async () => {
const responsePromise = openai.audio.translations.create({
file: await toFile(Buffer.from('# my file contents'), 'README.md'),
model: 'whisper-1',
@@ -21,8 +20,7 @@ describe('resource translations', () => {
expect(dataAndResponse.response).toBe(rawResponse);
});
- // Prism doesn't support multipart/form-data
- test.skip('create: required and optional params', async () => {
+ test('create: required and optional params', async () => {
const response = await openai.audio.translations.create({
file: await toFile(Buffer.from('# my file contents'), 'README.md'),
model: 'whisper-1',
diff --git a/tests/api-resources/images.test.ts b/tests/api-resources/images.test.ts
index c10b19aef..f9dabcb3e 100644
--- a/tests/api-resources/images.test.ts
+++ b/tests/api-resources/images.test.ts
@@ -6,8 +6,7 @@ import { Response } from 'node-fetch';
const openai = new OpenAI({ apiKey: 'something1234', baseURL: 'http://127.0.0.1:4010' });
describe('resource images', () => {
- // Prism doesn't support multipart/form-data
- test.skip('createVariation: only required params', async () => {
+ test('createVariation: only required params', async () => {
const responsePromise = openai.images.createVariation({
image: await toFile(Buffer.from('# my file contents'), 'README.md'),
});
@@ -20,8 +19,7 @@ describe('resource images', () => {
expect(dataAndResponse.response).toBe(rawResponse);
});
- // Prism doesn't support multipart/form-data
- test.skip('createVariation: required and optional params', async () => {
+ test('createVariation: required and optional params', async () => {
const response = await openai.images.createVariation({
image: await toFile(Buffer.from('# my file contents'), 'README.md'),
n: 1,
@@ -31,8 +29,7 @@ describe('resource images', () => {
});
});
- // Prism doesn't support multipart/form-data
- test.skip('edit: only required params', async () => {
+ test('edit: only required params', async () => {
const responsePromise = openai.images.edit({
image: await toFile(Buffer.from('# my file contents'), 'README.md'),
prompt: 'A cute baby sea otter wearing a beret',
@@ -46,8 +43,7 @@ describe('resource images', () => {
expect(dataAndResponse.response).toBe(rawResponse);
});
- // Prism doesn't support multipart/form-data
- test.skip('edit: required and optional params', async () => {
+ test('edit: required and optional params', async () => {
const response = await openai.images.edit({
image: await toFile(Buffer.from('# my file contents'), 'README.md'),
prompt: 'A cute baby sea otter wearing a beret',
@@ -59,8 +55,7 @@ describe('resource images', () => {
});
});
- // Prism doesn't support multipart/form-data
- test.skip('generate: only required params', async () => {
+ test('generate: only required params', async () => {
const responsePromise = openai.images.generate({ prompt: 'A cute baby sea otter' });
const rawResponse = await responsePromise.asResponse();
expect(rawResponse).toBeInstanceOf(Response);
@@ -71,8 +66,7 @@ describe('resource images', () => {
expect(dataAndResponse.response).toBe(rawResponse);
});
- // Prism doesn't support multipart/form-data
- test.skip('generate: required and optional params', async () => {
+ test('generate: required and optional params', async () => {
const response = await openai.images.generate({
prompt: 'A cute baby sea otter',
n: 1,
From fca6fd9745432052a9b6bcbfec26173baae5ac1a Mon Sep 17 00:00:00 2001
From: Stainless Bot <107565488+stainless-bot@users.noreply.github.com>
Date: Fri, 1 Sep 2023 10:34:09 -0400
Subject: [PATCH 7/7] chore(next => master): release 4.4.0
---
.release-please-manifest.json | 2 +-
CHANGELOG.md | 16 ++++++++++++++++
package.json | 2 +-
src/version.ts | 2 +-
4 files changed, 19 insertions(+), 3 deletions(-)
diff --git a/.release-please-manifest.json b/.release-please-manifest.json
index 2276f3d26..fb1f343c6 100644
--- a/.release-please-manifest.json
+++ b/.release-please-manifest.json
@@ -1,3 +1,3 @@
{
- ".": "4.3.1"
+ ".": "4.4.0"
}
diff --git a/CHANGELOG.md b/CHANGELOG.md
index cb610f0d9..f0c19da3e 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,21 @@
# Changelog
+## 4.4.0 (2023-09-01)
+
+Full Changelog: [v4.3.1...v4.4.0](https://github.com/openai/openai-node/compare/v4.3.1...v4.4.0)
+
+### Features
+
+* **package:** add Bun export map ([#269](https://github.com/openai/openai-node/issues/269)) ([16f239c](https://github.com/openai/openai-node/commit/16f239c6b4e8526371b01c511d2e0ebba4c5c8c6))
+* re-export chat completion types at the top level ([#268](https://github.com/openai/openai-node/issues/268)) ([1a71a39](https://github.com/openai/openai-node/commit/1a71a39421828fdde7b8605094363a5047d2fdc9))
+* **tests:** unskip multipart form data tests ([#275](https://github.com/openai/openai-node/issues/275)) ([47d3e18](https://github.com/openai/openai-node/commit/47d3e18a3ee987d04b958dad1a51821ad5472d54))
+* **types:** fix ambiguous auto-import for chat completions params ([#266](https://github.com/openai/openai-node/issues/266)) ([19c99fb](https://github.com/openai/openai-node/commit/19c99fb268d6d6c7fc7aaa66475c35f45d12b4bd))
+
+
+### Bug Fixes
+
+* revert import change which triggered circular import bug in webpack ([#274](https://github.com/openai/openai-node/issues/274)) ([6534e36](https://github.com/openai/openai-node/commit/6534e3620d7e2983e98b42cf95fa966deab1ab1d))
+
## 4.3.1 (2023-08-29)
Full Changelog: [v4.3.0...v4.3.1](https://github.com/openai/openai-node/compare/v4.3.0...v4.3.1)
diff --git a/package.json b/package.json
index 8a1440f63..8e6f07882 100644
--- a/package.json
+++ b/package.json
@@ -1,6 +1,6 @@
{
"name": "openai",
- "version": "4.3.1",
+ "version": "4.4.0",
"description": "Client library for the OpenAI API",
"author": "OpenAI ",
"types": "dist/index.d.ts",
diff --git a/src/version.ts b/src/version.ts
index 42e880c67..8ab25148f 100644
--- a/src/version.ts
+++ b/src/version.ts
@@ -1 +1 @@
-export const VERSION = '4.3.1'; // x-release-please-version
+export const VERSION = '4.4.0'; // x-release-please-version