Skip to content

Commit

Permalink
fix(api): add missing parallel_tool_calls arguments
Browse files Browse the repository at this point in the history
  • Loading branch information
RobertCraigie authored and stainless-app[bot] committed Jun 20, 2024
1 parent e005a84 commit a120381
Show file tree
Hide file tree
Showing 2 changed files with 71 additions and 0 deletions.
12 changes: 12 additions & 0 deletions src/openai/resources/beta/threads/threads.py
Original file line number Diff line number Diff line change
Expand Up @@ -828,6 +828,7 @@ def create_and_run_poll(
None,
]
| NotGiven = NOT_GIVEN,
parallel_tool_calls: bool | NotGiven = NOT_GIVEN,
response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN,
temperature: Optional[float] | NotGiven = NOT_GIVEN,
thread: thread_create_and_run_params.Thread | NotGiven = NOT_GIVEN,
Expand Down Expand Up @@ -856,6 +857,7 @@ def create_and_run_poll(
max_prompt_tokens=max_prompt_tokens,
metadata=metadata,
model=model,
parallel_tool_calls=parallel_tool_calls,
response_format=response_format,
temperature=temperature,
stream=False,
Expand Down Expand Up @@ -908,6 +910,7 @@ def create_and_run_stream(
None,
]
| NotGiven = NOT_GIVEN,
parallel_tool_calls: bool | NotGiven = NOT_GIVEN,
response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN,
temperature: Optional[float] | NotGiven = NOT_GIVEN,
thread: thread_create_and_run_params.Thread | NotGiven = NOT_GIVEN,
Expand Down Expand Up @@ -962,6 +965,7 @@ def create_and_run_stream(
None,
]
| NotGiven = NOT_GIVEN,
parallel_tool_calls: bool | NotGiven = NOT_GIVEN,
response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN,
temperature: Optional[float] | NotGiven = NOT_GIVEN,
thread: thread_create_and_run_params.Thread | NotGiven = NOT_GIVEN,
Expand Down Expand Up @@ -1016,6 +1020,7 @@ def create_and_run_stream(
None,
]
| NotGiven = NOT_GIVEN,
parallel_tool_calls: bool | NotGiven = NOT_GIVEN,
response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN,
temperature: Optional[float] | NotGiven = NOT_GIVEN,
thread: thread_create_and_run_params.Thread | NotGiven = NOT_GIVEN,
Expand Down Expand Up @@ -1050,6 +1055,7 @@ def create_and_run_stream(
"max_prompt_tokens": max_prompt_tokens,
"metadata": metadata,
"model": model,
"parallel_tool_calls": parallel_tool_calls,
"response_format": response_format,
"temperature": temperature,
"tool_choice": tool_choice,
Expand Down Expand Up @@ -1838,6 +1844,7 @@ async def create_and_run_poll(
None,
]
| NotGiven = NOT_GIVEN,
parallel_tool_calls: bool | NotGiven = NOT_GIVEN,
response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN,
temperature: Optional[float] | NotGiven = NOT_GIVEN,
thread: thread_create_and_run_params.Thread | NotGiven = NOT_GIVEN,
Expand Down Expand Up @@ -1866,6 +1873,7 @@ async def create_and_run_poll(
max_prompt_tokens=max_prompt_tokens,
metadata=metadata,
model=model,
parallel_tool_calls=parallel_tool_calls,
response_format=response_format,
temperature=temperature,
stream=False,
Expand Down Expand Up @@ -1920,6 +1928,7 @@ def create_and_run_stream(
None,
]
| NotGiven = NOT_GIVEN,
parallel_tool_calls: bool | NotGiven = NOT_GIVEN,
response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN,
temperature: Optional[float] | NotGiven = NOT_GIVEN,
thread: thread_create_and_run_params.Thread | NotGiven = NOT_GIVEN,
Expand Down Expand Up @@ -1974,6 +1983,7 @@ def create_and_run_stream(
None,
]
| NotGiven = NOT_GIVEN,
parallel_tool_calls: bool | NotGiven = NOT_GIVEN,
response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN,
temperature: Optional[float] | NotGiven = NOT_GIVEN,
thread: thread_create_and_run_params.Thread | NotGiven = NOT_GIVEN,
Expand Down Expand Up @@ -2028,6 +2038,7 @@ def create_and_run_stream(
None,
]
| NotGiven = NOT_GIVEN,
parallel_tool_calls: bool | NotGiven = NOT_GIVEN,
response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN,
temperature: Optional[float] | NotGiven = NOT_GIVEN,
thread: thread_create_and_run_params.Thread | NotGiven = NOT_GIVEN,
Expand Down Expand Up @@ -2064,6 +2075,7 @@ def create_and_run_stream(
"max_prompt_tokens": max_prompt_tokens,
"metadata": metadata,
"model": model,
"parallel_tool_calls": parallel_tool_calls,
"response_format": response_format,
"temperature": temperature,
"tool_choice": tool_choice,
Expand Down
59 changes: 59 additions & 0 deletions tests/lib/test_assistants.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
from __future__ import annotations

import inspect
from typing import Any, Callable

import pytest

from openai import OpenAI, AsyncOpenAI


def assert_signatures_in_sync(
source_func: Callable[..., Any],
check_func: Callable[..., Any],
*,
exclude_params: set[str] = set(),
) -> None:
check_sig = inspect.signature(check_func)
source_sig = inspect.signature(source_func)

errors: list[str] = []

for name, generated_param in source_sig.parameters.items():
if name in exclude_params:
continue

custom_param = check_sig.parameters.get(name)
if not custom_param:
errors.append(f"the `{name}` param is missing")
continue

if custom_param.annotation != generated_param.annotation:
errors.append(
f"types for the `{name}` param are do not match; generated={repr(generated_param.annotation)} custom={repr(generated_param.annotation)}"
)
continue

if errors:
raise AssertionError(f"{len(errors)} errors encountered when comparing signatures:\n\n" + "\n\n".join(errors))


@pytest.mark.parametrize("sync", [True, False], ids=["sync", "async"])
def test_create_and_run_poll_method_definition_in_sync(sync: bool, client: OpenAI, async_client: AsyncOpenAI) -> None:
checking_client = client if sync else async_client

assert_signatures_in_sync(
checking_client.beta.threads.create_and_run,
checking_client.beta.threads.create_and_run_poll,
exclude_params={"stream"},
)

@pytest.mark.parametrize("sync", [True, False], ids=["sync", "async"])
def test_create_and_run_stream_method_definition_in_sync(sync: bool, client: OpenAI, async_client: AsyncOpenAI) -> None:
checking_client = client if sync else async_client

assert_signatures_in_sync(
checking_client.beta.threads.create_and_run,
checking_client.beta.threads.create_and_run_stream,
exclude_params={"stream"},
)

0 comments on commit a120381

Please sign in to comment.