diff --git a/.github/workflows/create-releases.yml b/.github/workflows/create-releases.yml
new file mode 100644
index 0000000000..7dbae006c0
--- /dev/null
+++ b/.github/workflows/create-releases.yml
@@ -0,0 +1,37 @@
+name: Create releases
+on:
+ push:
+ branches:
+ - main
+
+jobs:
+ release:
+ name: release
+ if: github.ref == 'refs/heads/main' && github.repository == 'openai/openai-python'
+ runs-on: ubuntu-latest
+ environment: publish
+
+ steps:
+ - uses: actions/checkout@v3
+
+ - uses: stainless-api/trigger-release-please@v1
+ id: release
+ with:
+ repo: ${{ github.event.repository.full_name }}
+ stainless-api-key: ${{ secrets.STAINLESS_API_KEY }}
+
+ - name: Install Rye
+ if: ${{ steps.release.outputs.releases_created }}
+ run: |
+ curl -sSf https://rye-up.com/get | bash
+ echo "$HOME/.rye/shims" >> $GITHUB_PATH
+ env:
+ RYE_VERSION: 0.15.2
+ RYE_INSTALL_OPTION: "--yes"
+
+ - name: Publish to PyPI
+ if: ${{ steps.release.outputs.releases_created }}
+ run: |
+ bash ./bin/publish-pypi
+ env:
+ PYPI_TOKEN: ${{ secrets.OPENAI_PYPI_TOKEN || secrets.PYPI_TOKEN }}
diff --git a/.github/workflows/publish-pypi.yml b/.github/workflows/publish-pypi.yml
new file mode 100644
index 0000000000..026ed29c22
--- /dev/null
+++ b/.github/workflows/publish-pypi.yml
@@ -0,0 +1,27 @@
+# workflow for re-running publishing to PyPI in case it fails for some reason
+# you can run this workflow by navigating to https://www.github.com/openai/openai-python/actions/workflows/publish-pypi.yml
+name: Publish PyPI
+on:
+ workflow_dispatch:
+
+jobs:
+ publish:
+ name: publish
+ runs-on: ubuntu-latest
+
+ steps:
+ - uses: actions/checkout@v3
+
+ - name: Install Rye
+ run: |
+ curl -sSf https://rye-up.com/get | bash
+ echo "$HOME/.rye/shims" >> $GITHUB_PATH
+ env:
+ RYE_VERSION: 0.15.2
+ RYE_INSTALL_OPTION: "--yes"
+
+ - name: Publish to PyPI
+ run: |
+ bash ./bin/publish-pypi
+ env:
+ PYPI_TOKEN: ${{ secrets.OPENAI_PYPI_TOKEN || secrets.PYPI_TOKEN }}
diff --git a/.github/workflows/release-doctor.yml b/.github/workflows/release-doctor.yml
new file mode 100644
index 0000000000..108aa5973a
--- /dev/null
+++ b/.github/workflows/release-doctor.yml
@@ -0,0 +1,23 @@
+name: Release Doctor
+on:
+ push:
+ branches:
+ - main
+ workflow_dispatch:
+
+jobs:
+ release_doctor:
+ name: release doctor
+ runs-on: ubuntu-latest
+ environment: publish
+ if: github.repository == 'openai/openai-python' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch' || startsWith(github.head_ref, 'release-please') || github.head_ref == 'next')
+
+ steps:
+ - uses: actions/checkout@v3
+
+ - name: Check release environment
+ run: |
+ bash ./bin/check-release-environment
+ env:
+ STAINLESS_API_KEY: ${{ secrets.STAINLESS_API_KEY }}
+ PYPI_TOKEN: ${{ secrets.OPENAI_PYPI_TOKEN || secrets.PYPI_TOKEN }}
diff --git a/.release-please-manifest.json b/.release-please-manifest.json
new file mode 100644
index 0000000000..9c6a481f5b
--- /dev/null
+++ b/.release-please-manifest.json
@@ -0,0 +1,3 @@
+{
+ ".": "1.1.2"
+}
\ No newline at end of file
diff --git a/CHANGELOG.md b/CHANGELOG.md
new file mode 100644
index 0000000000..8c97964977
--- /dev/null
+++ b/CHANGELOG.md
@@ -0,0 +1,16 @@
+# Changelog
+
+## 1.1.2 (2023-11-08)
+
+Full Changelog: [v1.1.1...v1.1.2](https://github.com/openai/openai-python/compare/v1.1.1...v1.1.2)
+
+### Bug Fixes
+
+* **api:** accidentally required params, add new models & other fixes ([#729](https://github.com/openai/openai-python/issues/729)) ([03c3e03](https://github.com/openai/openai-python/commit/03c3e03fc758cf4e59b81edf73a2618d80b560b7))
+* asssitant_deleted -> assistant_deleted ([#711](https://github.com/openai/openai-python/issues/711)) ([287b51e](https://github.com/openai/openai-python/commit/287b51e4f7cede9667c118007de1275eb04772c6))
+
+
+### Chores
+
+* **docs:** fix github links ([#719](https://github.com/openai/openai-python/issues/719)) ([0cda8ca](https://github.com/openai/openai-python/commit/0cda8cab718d53d7dc0604d9fac52838c9391565))
+* **internal:** fix some typos ([#718](https://github.com/openai/openai-python/issues/718)) ([894ad87](https://github.com/openai/openai-python/commit/894ad874aaa5d74530f561896ff31f68693418da))
diff --git a/README.md b/README.md
index 8904d9ed52..cedbc72337 100644
--- a/README.md
+++ b/README.md
@@ -410,7 +410,7 @@ completion = response.parse() # get the object that `chat.completions.create()`
print(completion)
```
-These methods return an [`APIResponse`](https://github.com/openai/openai-python/src/openai/_response.py) object.
+These methods return an [`APIResponse`](https://github.com/openai/openai-python/tree/main/src/openai/_response.py) object.
### Configuring the HTTP client
diff --git a/api.md b/api.md
index 818ae73b31..95e9922129 100644
--- a/api.md
+++ b/api.md
@@ -197,7 +197,7 @@ Methods:
Types:
```python
-from openai.types.beta import Assistant, AsssitantDeleted
+from openai.types.beta import Assistant, AssistantDeleted
```
Methods:
@@ -206,7 +206,7 @@ Methods:
- client.beta.assistants.retrieve(assistant_id) -> Assistant
- client.beta.assistants.update(assistant_id, \*\*params) -> Assistant
- client.beta.assistants.list(\*\*params) -> SyncCursorPage[Assistant]
-- client.beta.assistants.delete(assistant_id) -> AsssitantDeleted
+- client.beta.assistants.delete(assistant_id) -> AssistantDeleted
### Files
diff --git a/bin/check-release-environment b/bin/check-release-environment
new file mode 100644
index 0000000000..b0c8d34f0c
--- /dev/null
+++ b/bin/check-release-environment
@@ -0,0 +1,25 @@
+#!/usr/bin/env bash
+
+errors=()
+
+if [ -z "${STAINLESS_API_KEY}" ]; then
+ errors+=("The STAINLESS_API_KEY secret has not been set. Please contact Stainless for an API key & set it in your organization secrets on GitHub.")
+fi
+
+if [ -z "${PYPI_TOKEN}" ]; then
+ errors+=("The OPENAI_PYPI_TOKEN secret has not been set. Please set it in either this repository's secrets or your organization secrets.")
+fi
+
+len=${#errors[@]}
+
+if [[ len -gt 0 ]]; then
+ echo -e "Found the following errors in the release environment:\n"
+
+ for error in "${errors[@]}"; do
+ echo -e "- $error\n"
+ done
+
+ exit 1
+fi
+
+echo "The environment is ready to push releases!"
diff --git a/bin/publish-pypi b/bin/publish-pypi
new file mode 100644
index 0000000000..826054e924
--- /dev/null
+++ b/bin/publish-pypi
@@ -0,0 +1,6 @@
+#!/usr/bin/env bash
+
+set -eux
+mkdir -p dist
+rye build --clean
+rye publish --yes --token=$PYPI_TOKEN
diff --git a/examples/audio.py b/examples/audio.py
new file mode 100755
index 0000000000..a5f535dcd6
--- /dev/null
+++ b/examples/audio.py
@@ -0,0 +1,34 @@
+#!/usr/bin/env python
+
+from pathlib import Path
+
+from openai import OpenAI
+
+# gets OPENAI_API_KEY from your environment variables
+openai = OpenAI()
+
+speech_file_path = Path(__file__).parent / "speech.mp3"
+
+
+def main() -> None:
+ # Create text-to-speech audio file
+ response = openai.audio.speech.create(
+ model="tts-1", voice="alloy", input="the quick brown fox jumped over the lazy dogs"
+ )
+
+ response.stream_to_file(speech_file_path)
+
+ # Create transcription from audio file
+ transcription = openai.audio.transcriptions.create(model="whisper-1", file=speech_file_path)
+ print(transcription.text)
+
+ # Create translation from audio file
+ translation = openai.audio.translations.create(
+ model="whisper-1",
+ file=speech_file_path,
+ )
+ print(translation.text)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/pyproject.toml b/pyproject.toml
index c5dd666475..0861b1278b 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,6 +1,6 @@
[project]
name = "openai"
-version = "1.1.1"
+version = "1.1.2"
description = "Client library for the openai API"
readme = "README.md"
license = "Apache-2.0"
diff --git a/release-please-config.json b/release-please-config.json
new file mode 100644
index 0000000000..5c66d801f5
--- /dev/null
+++ b/release-please-config.json
@@ -0,0 +1,64 @@
+{
+ "packages": {
+ ".": {}
+ },
+ "$schema": "https://raw.githubusercontent.com/stainless-api/release-please/main/schemas/config.json",
+ "include-v-in-tag": true,
+ "include-component-in-tag": false,
+ "bump-minor-pre-major": true,
+ "bump-patch-for-minor-pre-major": false,
+ "pull-request-header": "Automated Release PR",
+ "pull-request-title-pattern": "release: ${version}",
+ "changelog-sections": [
+ {
+ "type": "feat",
+ "section": "Features"
+ },
+ {
+ "type": "fix",
+ "section": "Bug Fixes"
+ },
+ {
+ "type": "perf",
+ "section": "Performance Improvements"
+ },
+ {
+ "type": "revert",
+ "section": "Reverts"
+ },
+ {
+ "type": "chore",
+ "section": "Chores"
+ },
+ {
+ "type": "docs",
+ "section": "Documentation"
+ },
+ {
+ "type": "style",
+ "section": "Styles"
+ },
+ {
+ "type": "refactor",
+ "section": "Refactors"
+ },
+ {
+ "type": "test",
+ "section": "Tests",
+ "hidden": true
+ },
+ {
+ "type": "build",
+ "section": "Build System"
+ },
+ {
+ "type": "ci",
+ "section": "Continuous Integration",
+ "hidden": true
+ }
+ ],
+ "release-type": "python",
+ "extra-files": [
+ "src/openai/_version.py"
+ ]
+}
\ No newline at end of file
diff --git a/src/openai/_files.py b/src/openai/_files.py
index 94cd553135..bebfb19501 100644
--- a/src/openai/_files.py
+++ b/src/openai/_files.py
@@ -29,7 +29,7 @@ def assert_is_file_content(obj: object, *, key: str | None = None) -> None:
if not is_file_content(obj):
prefix = f"Expected entry at `{key}`" if key is not None else f"Expected file input `{obj!r}`"
raise RuntimeError(
- f"{prefix} to be bytes, an io.IOBase instance, PathLike or a tuple but received {type(obj)} instead. See https://github.com/openai/openai-python#file-uploads"
+ f"{prefix} to be bytes, an io.IOBase instance, PathLike or a tuple but received {type(obj)} instead. See https://github.com/openai/openai-python/tree/main#file-uploads"
) from None
diff --git a/src/openai/_utils/_transform.py b/src/openai/_utils/_transform.py
index db40bff27f..dc497ea329 100644
--- a/src/openai/_utils/_transform.py
+++ b/src/openai/_utils/_transform.py
@@ -95,7 +95,7 @@ class Params(TypedDict, total=False):
return cast(_T, transformed)
-def _get_annoted_type(type_: type) -> type | None:
+def _get_annotated_type(type_: type) -> type | None:
"""If the given type is an `Annotated` type then it is returned, if not `None` is returned.
This also unwraps the type when applicable, e.g. `Required[Annotated[T, ...]]`
@@ -115,7 +115,7 @@ def _maybe_transform_key(key: str, type_: type) -> str:
Note: this function only looks at `Annotated` types that contain `PropertInfo` metadata.
"""
- annotated_type = _get_annoted_type(type_)
+ annotated_type = _get_annotated_type(type_)
if annotated_type is None:
# no `Annotated` definition for this type, no transformation needed
return key
@@ -174,7 +174,7 @@ def _transform_recursive(
def _transform_value(data: object, type_: type) -> object:
- annotated_type = _get_annoted_type(type_)
+ annotated_type = _get_annotated_type(type_)
if annotated_type is None:
return data
diff --git a/src/openai/_version.py b/src/openai/_version.py
index b4ed828270..848573b8a1 100644
--- a/src/openai/_version.py
+++ b/src/openai/_version.py
@@ -1,4 +1,4 @@
# File generated from our OpenAPI spec by Stainless.
__title__ = "openai"
-__version__ = "1.1.1"
+__version__ = "1.1.2" # x-release-please-version
diff --git a/src/openai/resources/beta/assistants/assistants.py b/src/openai/resources/beta/assistants/assistants.py
index 6b81dc97f3..efa711ecf4 100644
--- a/src/openai/resources/beta/assistants/assistants.py
+++ b/src/openai/resources/beta/assistants/assistants.py
@@ -15,7 +15,7 @@
from ....pagination import SyncCursorPage, AsyncCursorPage
from ....types.beta import (
Assistant,
- AsssitantDeleted,
+ AssistantDeleted,
assistant_list_params,
assistant_create_params,
assistant_update_params,
@@ -298,7 +298,7 @@ def delete(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> AsssitantDeleted:
+ ) -> AssistantDeleted:
"""
Delete an assistant.
@@ -317,7 +317,7 @@ def delete(
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
- cast_to=AsssitantDeleted,
+ cast_to=AssistantDeleted,
)
@@ -591,7 +591,7 @@ async def delete(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> AsssitantDeleted:
+ ) -> AssistantDeleted:
"""
Delete an assistant.
@@ -610,7 +610,7 @@ async def delete(
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
- cast_to=AsssitantDeleted,
+ cast_to=AssistantDeleted,
)
diff --git a/src/openai/resources/chat/completions.py b/src/openai/resources/chat/completions.py
index a46e7e70d6..75e0d66d58 100644
--- a/src/openai/resources/chat/completions.py
+++ b/src/openai/resources/chat/completions.py
@@ -43,6 +43,8 @@ def create(
model: Union[
str,
Literal[
+ "gpt-4-1106-preview",
+ "gpt-4-vision-preview",
"gpt-4",
"gpt-4-0314",
"gpt-4-0613",
@@ -201,6 +203,8 @@ def create(
model: Union[
str,
Literal[
+ "gpt-4-1106-preview",
+ "gpt-4-vision-preview",
"gpt-4",
"gpt-4-0314",
"gpt-4-0613",
@@ -359,6 +363,8 @@ def create(
model: Union[
str,
Literal[
+ "gpt-4-1106-preview",
+ "gpt-4-vision-preview",
"gpt-4",
"gpt-4-0314",
"gpt-4-0613",
@@ -517,6 +523,8 @@ def create(
model: Union[
str,
Literal[
+ "gpt-4-1106-preview",
+ "gpt-4-vision-preview",
"gpt-4",
"gpt-4-0314",
"gpt-4-0613",
@@ -602,6 +610,8 @@ async def create(
model: Union[
str,
Literal[
+ "gpt-4-1106-preview",
+ "gpt-4-vision-preview",
"gpt-4",
"gpt-4-0314",
"gpt-4-0613",
@@ -760,6 +770,8 @@ async def create(
model: Union[
str,
Literal[
+ "gpt-4-1106-preview",
+ "gpt-4-vision-preview",
"gpt-4",
"gpt-4-0314",
"gpt-4-0613",
@@ -918,6 +930,8 @@ async def create(
model: Union[
str,
Literal[
+ "gpt-4-1106-preview",
+ "gpt-4-vision-preview",
"gpt-4",
"gpt-4-0314",
"gpt-4-0613",
@@ -1076,6 +1090,8 @@ async def create(
model: Union[
str,
Literal[
+ "gpt-4-1106-preview",
+ "gpt-4-vision-preview",
"gpt-4",
"gpt-4-0314",
"gpt-4-0613",
diff --git a/src/openai/types/beta/__init__.py b/src/openai/types/beta/__init__.py
index 8b834f286d..c03d823b8c 100644
--- a/src/openai/types/beta/__init__.py
+++ b/src/openai/types/beta/__init__.py
@@ -5,7 +5,7 @@
from .thread import Thread as Thread
from .assistant import Assistant as Assistant
from .thread_deleted import ThreadDeleted as ThreadDeleted
-from .asssitant_deleted import AsssitantDeleted as AsssitantDeleted
+from .assistant_deleted import AssistantDeleted as AssistantDeleted
from .thread_create_params import ThreadCreateParams as ThreadCreateParams
from .thread_update_params import ThreadUpdateParams as ThreadUpdateParams
from .assistant_list_params import AssistantListParams as AssistantListParams
diff --git a/src/openai/types/beta/asssitant_deleted.py b/src/openai/types/beta/assistant_deleted.py
similarity index 75%
rename from src/openai/types/beta/asssitant_deleted.py
rename to src/openai/types/beta/assistant_deleted.py
index 258210e7fe..23802caaf6 100644
--- a/src/openai/types/beta/asssitant_deleted.py
+++ b/src/openai/types/beta/assistant_deleted.py
@@ -4,10 +4,10 @@
from ..._models import BaseModel
-__all__ = ["AsssitantDeleted"]
+__all__ = ["AssistantDeleted"]
-class AsssitantDeleted(BaseModel):
+class AssistantDeleted(BaseModel):
id: str
deleted: bool
diff --git a/src/openai/types/chat/chat_completion_chunk.py b/src/openai/types/chat/chat_completion_chunk.py
index bbc46a37bb..568f530280 100644
--- a/src/openai/types/chat/chat_completion_chunk.py
+++ b/src/openai/types/chat/chat_completion_chunk.py
@@ -109,3 +109,10 @@ class ChatCompletionChunk(BaseModel):
object: Literal["chat.completion.chunk"]
"""The object type, which is always `chat.completion.chunk`."""
+
+ system_fingerprint: Optional[str] = None
+ """This fingerprint represents the backend configuration that the model runs with.
+
+ Can be used in conjunction with the `seed` request parameter to understand when
+ backend changes have been made that might impact determinism.
+ """
diff --git a/src/openai/types/chat/chat_completion_content_part_image_param.py b/src/openai/types/chat/chat_completion_content_part_image_param.py
index 2051786562..eb9bd52689 100644
--- a/src/openai/types/chat/chat_completion_content_part_image_param.py
+++ b/src/openai/types/chat/chat_completion_content_part_image_param.py
@@ -8,12 +8,12 @@
class ImageURL(TypedDict, total=False):
+ url: Required[str]
+ """Either a URL of the image or the base64 encoded image data."""
+
detail: Literal["auto", "low", "high"]
"""Specifies the detail level of the image."""
- url: str
- """Either a URL of the image or the base64 encoded image data."""
-
class ChatCompletionContentPartImageParam(TypedDict, total=False):
image_url: Required[ImageURL]
diff --git a/src/openai/types/chat/completion_create_params.py b/src/openai/types/chat/completion_create_params.py
index 44b1abe576..b310761077 100644
--- a/src/openai/types/chat/completion_create_params.py
+++ b/src/openai/types/chat/completion_create_params.py
@@ -35,6 +35,8 @@ class CompletionCreateParamsBase(TypedDict, total=False):
Union[
str,
Literal[
+ "gpt-4-1106-preview",
+ "gpt-4-vision-preview",
"gpt-4",
"gpt-4-0314",
"gpt-4-0613",
diff --git a/tests/api_resources/beta/test_assistants.py b/tests/api_resources/beta/test_assistants.py
index 5bbad1d7dd..82e975b46d 100644
--- a/tests/api_resources/beta/test_assistants.py
+++ b/tests/api_resources/beta/test_assistants.py
@@ -10,7 +10,7 @@
from tests.utils import assert_matches_type
from openai._client import OpenAI, AsyncOpenAI
from openai.pagination import SyncCursorPage, AsyncCursorPage
-from openai.types.beta import Assistant, AsssitantDeleted
+from openai.types.beta import Assistant, AssistantDeleted
base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
api_key = "My API Key"
@@ -123,7 +123,7 @@ def test_method_delete(self, client: OpenAI) -> None:
assistant = client.beta.assistants.delete(
"string",
)
- assert_matches_type(AsssitantDeleted, assistant, path=["response"])
+ assert_matches_type(AssistantDeleted, assistant, path=["response"])
@parametrize
def test_raw_response_delete(self, client: OpenAI) -> None:
@@ -132,7 +132,7 @@ def test_raw_response_delete(self, client: OpenAI) -> None:
)
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
assistant = response.parse()
- assert_matches_type(AsssitantDeleted, assistant, path=["response"])
+ assert_matches_type(AssistantDeleted, assistant, path=["response"])
class TestAsyncAssistants:
@@ -242,7 +242,7 @@ async def test_method_delete(self, client: AsyncOpenAI) -> None:
assistant = await client.beta.assistants.delete(
"string",
)
- assert_matches_type(AsssitantDeleted, assistant, path=["response"])
+ assert_matches_type(AssistantDeleted, assistant, path=["response"])
@parametrize
async def test_raw_response_delete(self, client: AsyncOpenAI) -> None:
@@ -251,4 +251,4 @@ async def test_raw_response_delete(self, client: AsyncOpenAI) -> None:
)
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
assistant = response.parse()
- assert_matches_type(AsssitantDeleted, assistant, path=["response"])
+ assert_matches_type(AssistantDeleted, assistant, path=["response"])
diff --git a/tests/test_extract_files.py b/tests/test_extract_files.py
index 554487da42..0f6fb04d7d 100644
--- a/tests/test_extract_files.py
+++ b/tests/test_extract_files.py
@@ -54,7 +54,7 @@ def test_multiple_files() -> None:
[],
],
],
- ids=["dict expecting array", "arraye expecting dict", "unknown keys"],
+ ids=["dict expecting array", "array expecting dict", "unknown keys"],
)
def test_ignores_incorrect_paths(
query: dict[str, object],