1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138
|
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
import os
from typing import Any, cast
import pytest
from openai import OpenAI, AsyncOpenAI
from tests.utils import assert_matches_type
from openai.types.responses import InputTokenCountResponse
base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
class TestInputTokens:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
@parametrize
def test_method_count(self, client: OpenAI) -> None:
input_token = client.responses.input_tokens.count()
assert_matches_type(InputTokenCountResponse, input_token, path=["response"])
@parametrize
def test_method_count_with_all_params(self, client: OpenAI) -> None:
input_token = client.responses.input_tokens.count(
conversation="string",
input="string",
instructions="instructions",
model="model",
parallel_tool_calls=True,
previous_response_id="resp_123",
reasoning={
"effort": "none",
"generate_summary": "auto",
"summary": "auto",
},
text={
"format": {"type": "text"},
"verbosity": "low",
},
tool_choice="none",
tools=[
{
"name": "name",
"parameters": {"foo": "bar"},
"strict": True,
"type": "function",
"description": "description",
}
],
truncation="auto",
)
assert_matches_type(InputTokenCountResponse, input_token, path=["response"])
@parametrize
def test_raw_response_count(self, client: OpenAI) -> None:
response = client.responses.input_tokens.with_raw_response.count()
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
input_token = response.parse()
assert_matches_type(InputTokenCountResponse, input_token, path=["response"])
@parametrize
def test_streaming_response_count(self, client: OpenAI) -> None:
with client.responses.input_tokens.with_streaming_response.count() as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
input_token = response.parse()
assert_matches_type(InputTokenCountResponse, input_token, path=["response"])
assert cast(Any, response.is_closed) is True
class TestAsyncInputTokens:
parametrize = pytest.mark.parametrize(
"async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
)
@parametrize
async def test_method_count(self, async_client: AsyncOpenAI) -> None:
input_token = await async_client.responses.input_tokens.count()
assert_matches_type(InputTokenCountResponse, input_token, path=["response"])
@parametrize
async def test_method_count_with_all_params(self, async_client: AsyncOpenAI) -> None:
input_token = await async_client.responses.input_tokens.count(
conversation="string",
input="string",
instructions="instructions",
model="model",
parallel_tool_calls=True,
previous_response_id="resp_123",
reasoning={
"effort": "none",
"generate_summary": "auto",
"summary": "auto",
},
text={
"format": {"type": "text"},
"verbosity": "low",
},
tool_choice="none",
tools=[
{
"name": "name",
"parameters": {"foo": "bar"},
"strict": True,
"type": "function",
"description": "description",
}
],
truncation="auto",
)
assert_matches_type(InputTokenCountResponse, input_token, path=["response"])
@parametrize
async def test_raw_response_count(self, async_client: AsyncOpenAI) -> None:
response = await async_client.responses.input_tokens.with_raw_response.count()
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
input_token = response.parse()
assert_matches_type(InputTokenCountResponse, input_token, path=["response"])
@parametrize
async def test_streaming_response_count(self, async_client: AsyncOpenAI) -> None:
async with async_client.responses.input_tokens.with_streaming_response.count() as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
input_token = await response.parse()
assert_matches_type(InputTokenCountResponse, input_token, path=["response"])
assert cast(Any, response.is_closed) is True
|