File: gpt-oss-tools-stream.py

package info (click to toggle)
ollama-python 0.6.1-1
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid
  • size: 428 kB
  • sloc: python: 3,951; makefile: 5
file content (105 lines) | stat: -rw-r--r-- 3,037 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
# /// script
# requires-python = ">=3.11"
# dependencies = [
#     "gpt-oss",
#     "ollama",
#     "rich",
# ]
# ///
import random
from typing import Iterator

from rich import print

from ollama import Client
from ollama._types import ChatResponse


def get_weather(city: str) -> str:
  """
  Get the current temperature for a city

  Args:
      city (str): The name of the city

  Returns:
      str: The current temperature
  """
  temperatures = list(range(-10, 35))

  temp = random.choice(temperatures)

  return f'The temperature in {city} is {temp}°C'


def get_weather_conditions(city: str) -> str:
  """
  Get the weather conditions for a city

  Args:
      city (str): The name of the city

  Returns:
      str: The current weather conditions
  """
  conditions = ['sunny', 'cloudy', 'rainy', 'snowy', 'foggy']
  return random.choice(conditions)


available_tools = {'get_weather': get_weather, 'get_weather_conditions': get_weather_conditions}

messages = [{'role': 'user', 'content': 'What is the weather like in London? What are the conditions in Toronto?'}]

client = Client(
  # Ollama Turbo
  # host="https://ollama.com", headers={'Authorization': (os.getenv('OLLAMA_API_KEY'))}
)

model = 'gpt-oss:20b'
# gpt-oss can call tools while "thinking"
# a loop is needed to call the tools and get the results
final = True
while True:
  response_stream: Iterator[ChatResponse] = client.chat(model=model, messages=messages, tools=[get_weather, get_weather_conditions], stream=True)
  tool_calls = []
  thinking = ''
  content = ''

  for chunk in response_stream:
    if chunk.message.tool_calls:
      tool_calls.extend(chunk.message.tool_calls)

    if chunk.message.content:
      if not (chunk.message.thinking or chunk.message.thinking == '') and final:
        print('\n\n' + '=' * 10)
        print('Final result: ')
        final = False
      print(chunk.message.content, end='', flush=True)

    if chunk.message.thinking:
      # accumulate thinking
      thinking += chunk.message.thinking
      print(chunk.message.thinking, end='', flush=True)

  if thinking != '' or content != '' or len(tool_calls) > 0:
    messages.append({'role': 'assistant', 'thinking': thinking, 'content': content, 'tool_calls': tool_calls})

  print()

  if tool_calls:
    for tool_call in tool_calls:
      function_to_call = available_tools.get(tool_call.function.name)
      if function_to_call:
        print('\nCalling tool:', tool_call.function.name, 'with arguments: ', tool_call.function.arguments)
        result = function_to_call(**tool_call.function.arguments)
        print('Tool result: ', result + '\n')

        result_message = {'role': 'tool', 'content': result, 'tool_name': tool_call.function.name}
        messages.append(result_message)
      else:
        print(f'Tool {tool_call.function.name} not found')
        messages.append({'role': 'tool', 'content': f'Tool {tool_call.function.name} not found', 'tool_name': tool_call.function.name})

  else:
    # no more tool calls, we can stop the loop
    break