File: chat-logprobs.py

package info (click to toggle)
ollama-python 0.6.1-1
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid
  • size: 428 kB
  • sloc: python: 3,951; makefile: 5
file content (31 lines) | stat: -rw-r--r-- 737 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
from typing import Iterable

import ollama


def print_logprobs(logprobs: Iterable[dict], label: str) -> None:
  print(f'\n{label}:')
  for entry in logprobs:
    token = entry.get('token', '')
    logprob = entry.get('logprob')
    print(f'  token={token!r:<12} logprob={logprob:.3f}')
    for alt in entry.get('top_logprobs', []):
      if alt['token'] != token:
        print(f'    alt -> {alt["token"]!r:<12} ({alt["logprob"]:.3f})')


messages = [
  {
    'role': 'user',
    'content': 'hi! be concise.',
  },
]

response = ollama.chat(
  model='gemma3',
  messages=messages,
  logprobs=True,
  top_logprobs=3,
)
print('Chat response:', response['message']['content'])
print_logprobs(response.get('logprobs', []), 'chat logprobs')