Skip to content

Commit

Permalink
Test Ollama via OpenaiChatModel (#281)
Browse files Browse the repository at this point in the history
* Add test_openai_chat_model_complete_ollama

* Add async test

* Add openai_ollama marker to pyproject and vcr markers

* Add vcr cassettes for ollama tests

* Update version in uv.lock
  • Loading branch information
jackmpcollins authored Nov 29, 2024
1 parent c039a63 commit 142d46f
Show file tree
Hide file tree
Showing 10 changed files with 581 additions and 2 deletions.
3 changes: 2 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -78,10 +78,11 @@ asyncio_mode = "auto"
markers = [
"anthropic: Tests that query the Anthropic API. Requires the ANTHROPIC_API_KEY environment variable to be set.",
"litellm_anthropic: Tests that query the Anthropic API via litellm. Requires the ANTHROPIC_API_KEY environment variable to be set.",
"litellm_ollama: Tests that query Ollama. Requires ollama to be installed and running on localhost:11434.",
"litellm_ollama: Tests that query Ollama via Litellm. Requires ollama to be installed and running on localhost:11434.",
"litellm_openai: Tests that query the OpenAI API via litellm. Requires the OPENAI_API_KEY environment variable to be set.",
"mistral: Tests that query the Mistral API (via openai). Requires the MISTRAL_API_KEY environment variable to be set.",
"openai: Tests that query the OpenAI API. Requires the OPENAI_API_KEY environment variable to be set.",
"openai_ollama: Tests that query Ollama via OpenAI. Requires ollama to be installed and running on localhost:11434.",
]

[tool.ruff]
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
interactions:
- request:
body: '{"messages": [{"role": "user", "content": "Return True."}], "model": "llama3.1",
"parallel_tool_calls": false, "stream": true, "stream_options": {"include_usage":
true}, "tool_choice": {"type": "function", "function": {"name": "return_bool"}},
"tools": [{"type": "function", "function": {"name": "return_bool", "parameters":
{"properties": {"value": {"title": "Value", "type": "boolean"}}, "required":
["value"], "type": "object"}}}]}'
headers:
accept:
- application/json
accept-encoding:
- gzip, deflate
connection:
- keep-alive
content-length:
- '434'
content-type:
- application/json
host:
- localhost:11434
user-agent:
- AsyncOpenAI/Python 1.54.4
x-stainless-arch:
- arm64
x-stainless-async:
- async:asyncio
x-stainless-lang:
- python
x-stainless-os:
- MacOS
x-stainless-package-version:
- 1.54.4
x-stainless-retry-count:
- '0'
x-stainless-runtime:
- CPython
x-stainless-runtime-version:
- 3.10.15
method: POST
uri: http://localhost:11434/v1/chat/completions
response:
body:
string: 'data: {"id":"chatcmpl-573","object":"chat.completion.chunk","created":1732915688,"model":"llama3.1","system_fingerprint":"fp_ollama","choices":[{"index":0,"delta":{"role":"assistant","content":"","tool_calls":[{"id":"call_wqha42ih","type":"function","function":{"name":"return_bool","arguments":"{\"value\":true}"}}]},"finish_reason":null}]}
data: {"id":"chatcmpl-573","object":"chat.completion.chunk","created":1732915688,"model":"llama3.1","system_fingerprint":"fp_ollama","choices":[{"index":0,"delta":{"role":"assistant","content":""},"finish_reason":"stop"}]}
data: [DONE]
'
headers:
Content-Type:
- text/event-stream
Date:
- Fri, 29 Nov 2024 21:28:08 GMT
Transfer-Encoding:
- chunked
status:
code: 200
message: OK
version: 1
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
interactions:
- request:
body: '{"messages": [{"role": "user", "content": "Return [1, 2, 3, 4, 5]"}], "model":
"llama3.1", "parallel_tool_calls": false, "stream": true, "stream_options":
{"include_usage": true}, "tool_choice": {"type": "function", "function": {"name":
"return_list_of_int"}}, "tools": [{"type": "function", "function": {"name":
"return_list_of_int", "parameters": {"properties": {"value": {"items": {"type":
"integer"}, "title": "Value", "type": "array"}}, "required": ["value"], "type":
"object"}}}]}'
headers:
accept:
- application/json
accept-encoding:
- gzip, deflate
connection:
- keep-alive
content-length:
- '486'
content-type:
- application/json
host:
- localhost:11434
user-agent:
- AsyncOpenAI/Python 1.54.4
x-stainless-arch:
- arm64
x-stainless-async:
- async:asyncio
x-stainless-lang:
- python
x-stainless-os:
- MacOS
x-stainless-package-version:
- 1.54.4
x-stainless-retry-count:
- '0'
x-stainless-runtime:
- CPython
x-stainless-runtime-version:
- 3.10.15
method: POST
uri: http://localhost:11434/v1/chat/completions
response:
body:
string: 'data: {"id":"chatcmpl-877","object":"chat.completion.chunk","created":1732915691,"model":"llama3.1","system_fingerprint":"fp_ollama","choices":[{"index":0,"delta":{"role":"assistant","content":"","tool_calls":[{"id":"call_kichi5ua","type":"function","function":{"name":"return_list_of_int","arguments":"{\"value\":\"[1,
2, 3, 4, 5]\"}"}}]},"finish_reason":null}]}
data: {"id":"chatcmpl-877","object":"chat.completion.chunk","created":1732915691,"model":"llama3.1","system_fingerprint":"fp_ollama","choices":[{"index":0,"delta":{"role":"assistant","content":""},"finish_reason":"stop"}]}
data: [DONE]
'
headers:
Content-Type:
- text/event-stream
Date:
- Fri, 29 Nov 2024 21:28:11 GMT
Transfer-Encoding:
- chunked
status:
code: 200
message: OK
version: 1
Original file line number Diff line number Diff line change
@@ -0,0 +1,129 @@
interactions:
- request:
body: '{"messages": [{"role": "user", "content": "Say hello!"}], "model": "llama3.1",
"stream": true, "stream_options": {"include_usage": true}}'
headers:
accept:
- application/json
accept-encoding:
- gzip, deflate
connection:
- keep-alive
content-length:
- '137'
content-type:
- application/json
host:
- localhost:11434
user-agent:
- AsyncOpenAI/Python 1.54.4
x-stainless-arch:
- arm64
x-stainless-async:
- async:asyncio
x-stainless-lang:
- python
x-stainless-os:
- MacOS
x-stainless-package-version:
- 1.54.4
x-stainless-retry-count:
- '0'
x-stainless-runtime:
- CPython
x-stainless-runtime-version:
- 3.10.15
method: POST
uri: http://localhost:11434/v1/chat/completions
response:
body:
string: 'data: {"id":"chatcmpl-402","object":"chat.completion.chunk","created":1732915683,"model":"llama3.1","system_fingerprint":"fp_ollama","choices":[{"index":0,"delta":{"role":"assistant","content":"Hello"},"finish_reason":null}]}
data: {"id":"chatcmpl-402","object":"chat.completion.chunk","created":1732915684,"model":"llama3.1","system_fingerprint":"fp_ollama","choices":[{"index":0,"delta":{"role":"assistant","content":"!"},"finish_reason":null}]}
data: {"id":"chatcmpl-402","object":"chat.completion.chunk","created":1732915684,"model":"llama3.1","system_fingerprint":"fp_ollama","choices":[{"index":0,"delta":{"role":"assistant","content":"
How"},"finish_reason":null}]}
data: {"id":"chatcmpl-402","object":"chat.completion.chunk","created":1732915684,"model":"llama3.1","system_fingerprint":"fp_ollama","choices":[{"index":0,"delta":{"role":"assistant","content":"
are"},"finish_reason":null}]}
data: {"id":"chatcmpl-402","object":"chat.completion.chunk","created":1732915684,"model":"llama3.1","system_fingerprint":"fp_ollama","choices":[{"index":0,"delta":{"role":"assistant","content":"
you"},"finish_reason":null}]}
data: {"id":"chatcmpl-402","object":"chat.completion.chunk","created":1732915684,"model":"llama3.1","system_fingerprint":"fp_ollama","choices":[{"index":0,"delta":{"role":"assistant","content":"
doing"},"finish_reason":null}]}
data: {"id":"chatcmpl-402","object":"chat.completion.chunk","created":1732915684,"model":"llama3.1","system_fingerprint":"fp_ollama","choices":[{"index":0,"delta":{"role":"assistant","content":"
today"},"finish_reason":null}]}
data: {"id":"chatcmpl-402","object":"chat.completion.chunk","created":1732915684,"model":"llama3.1","system_fingerprint":"fp_ollama","choices":[{"index":0,"delta":{"role":"assistant","content":"?"},"finish_reason":null}]}
data: {"id":"chatcmpl-402","object":"chat.completion.chunk","created":1732915684,"model":"llama3.1","system_fingerprint":"fp_ollama","choices":[{"index":0,"delta":{"role":"assistant","content":"
Is"},"finish_reason":null}]}
data: {"id":"chatcmpl-402","object":"chat.completion.chunk","created":1732915684,"model":"llama3.1","system_fingerprint":"fp_ollama","choices":[{"index":0,"delta":{"role":"assistant","content":"
there"},"finish_reason":null}]}
data: {"id":"chatcmpl-402","object":"chat.completion.chunk","created":1732915684,"model":"llama3.1","system_fingerprint":"fp_ollama","choices":[{"index":0,"delta":{"role":"assistant","content":"
anything"},"finish_reason":null}]}
data: {"id":"chatcmpl-402","object":"chat.completion.chunk","created":1732915684,"model":"llama3.1","system_fingerprint":"fp_ollama","choices":[{"index":0,"delta":{"role":"assistant","content":"
I"},"finish_reason":null}]}
data: {"id":"chatcmpl-402","object":"chat.completion.chunk","created":1732915684,"model":"llama3.1","system_fingerprint":"fp_ollama","choices":[{"index":0,"delta":{"role":"assistant","content":"
can"},"finish_reason":null}]}
data: {"id":"chatcmpl-402","object":"chat.completion.chunk","created":1732915685,"model":"llama3.1","system_fingerprint":"fp_ollama","choices":[{"index":0,"delta":{"role":"assistant","content":"
help"},"finish_reason":null}]}
data: {"id":"chatcmpl-402","object":"chat.completion.chunk","created":1732915685,"model":"llama3.1","system_fingerprint":"fp_ollama","choices":[{"index":0,"delta":{"role":"assistant","content":"
with"},"finish_reason":null}]}
data: {"id":"chatcmpl-402","object":"chat.completion.chunk","created":1732915685,"model":"llama3.1","system_fingerprint":"fp_ollama","choices":[{"index":0,"delta":{"role":"assistant","content":"
or"},"finish_reason":null}]}
data: {"id":"chatcmpl-402","object":"chat.completion.chunk","created":1732915685,"model":"llama3.1","system_fingerprint":"fp_ollama","choices":[{"index":0,"delta":{"role":"assistant","content":"
chat"},"finish_reason":null}]}
data: {"id":"chatcmpl-402","object":"chat.completion.chunk","created":1732915685,"model":"llama3.1","system_fingerprint":"fp_ollama","choices":[{"index":0,"delta":{"role":"assistant","content":"
about"},"finish_reason":null}]}
data: {"id":"chatcmpl-402","object":"chat.completion.chunk","created":1732915685,"model":"llama3.1","system_fingerprint":"fp_ollama","choices":[{"index":0,"delta":{"role":"assistant","content":"?"},"finish_reason":null}]}
data: {"id":"chatcmpl-402","object":"chat.completion.chunk","created":1732915685,"model":"llama3.1","system_fingerprint":"fp_ollama","choices":[{"index":0,"delta":{"role":"assistant","content":""},"finish_reason":"stop"}]}
data: [DONE]
'
headers:
Content-Type:
- text/event-stream
Date:
- Fri, 29 Nov 2024 21:28:03 GMT
Transfer-Encoding:
- chunked
status:
code: 200
message: OK
version: 1
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
interactions:
- request:
body: '{"messages": [{"role": "user", "content": "Return True."}], "model": "llama3.1",
"parallel_tool_calls": false, "stream": true, "stream_options": {"include_usage":
true}, "tool_choice": {"type": "function", "function": {"name": "return_bool"}},
"tools": [{"type": "function", "function": {"name": "return_bool", "parameters":
{"properties": {"value": {"title": "Value", "type": "boolean"}}, "required":
["value"], "type": "object"}}}]}'
headers:
accept:
- application/json
accept-encoding:
- gzip, deflate
connection:
- keep-alive
content-length:
- '434'
content-type:
- application/json
host:
- localhost:11434
user-agent:
- OpenAI/Python 1.54.4
x-stainless-arch:
- arm64
x-stainless-async:
- 'false'
x-stainless-lang:
- python
x-stainless-os:
- MacOS
x-stainless-package-version:
- 1.54.4
x-stainless-retry-count:
- '0'
x-stainless-runtime:
- CPython
x-stainless-runtime-version:
- 3.10.15
method: POST
uri: http://localhost:11434/v1/chat/completions
response:
body:
string: 'data: {"id":"chatcmpl-341","object":"chat.completion.chunk","created":1732915676,"model":"llama3.1","system_fingerprint":"fp_ollama","choices":[{"index":0,"delta":{"role":"assistant","content":"","tool_calls":[{"id":"call_dxlghlch","type":"function","function":{"name":"return_bool","arguments":"{\"value\":true}"}}]},"finish_reason":null}]}
data: {"id":"chatcmpl-341","object":"chat.completion.chunk","created":1732915676,"model":"llama3.1","system_fingerprint":"fp_ollama","choices":[{"index":0,"delta":{"role":"assistant","content":""},"finish_reason":"stop"}]}
data: [DONE]
'
headers:
Content-Type:
- text/event-stream
Date:
- Fri, 29 Nov 2024 21:27:56 GMT
Transfer-Encoding:
- chunked
status:
code: 200
message: OK
version: 1
Loading

0 comments on commit 142d46f

Please sign in to comment.