Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Python: Add OpenTelemetry to Python SK #6914

Merged
merged 41 commits into from
Jul 25, 2024
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
Show all changes
41 commits
Select commit Hold shift + click to select a range
22204aa
loggin
glahaye Jun 23, 2024
1156b46
Merge main
glahaye Jun 23, 2024
e5ed784
Merge branch 'main' into python_telemetry
glahaye Jun 24, 2024
a2c5fd7
Update python/semantic_kernel/connectors/ai/open_ai/services/open_ai_…
glahaye Jun 24, 2024
2848051
Addressed PR issues
glahaye Jun 27, 2024
fe86266
Merge main
glahaye Jun 27, 2024
e9dd311
Merge main
glahaye Jun 27, 2024
4f3783d
Update poetry info after merge
glahaye Jun 27, 2024
e71fab2
Address PR issues
glahaye Jun 27, 2024
6bf6406
Fix lint warning
glahaye Jun 28, 2024
00269b9
Fix false positive security issue
glahaye Jun 28, 2024
1fac03f
completion telemetry now in decorator
glahaye Jul 8, 2024
cc13f52
Use classvar
glahaye Jul 8, 2024
6437b04
Merge main
glahaye Jul 8, 2024
c6aa839
Proper merge of poetry lock
glahaye Jul 8, 2024
8cc7681
Fix precommit qual issues
glahaye Jul 8, 2024
f875eb3
Address PR issues
glahaye Jul 9, 2024
157ff32
Address PR comments + add trace_text_completion
glahaye Jul 10, 2024
7ac6a2d
Merge main
glahaye Jul 10, 2024
1d94e81
Adjust poetry lock
glahaye Jul 10, 2024
386b121
Fix mypy warnings
glahaye Jul 10, 2024
c903017
Sync poetry.lock
glahaye Jul 11, 2024
22b6ba0
Merge main
glahaye Jul 15, 2024
5c0c031
Merge branch 'main' into python_telemetry
glahaye Jul 15, 2024
dd25760
Fix ruff warning after merge
glahaye Jul 15, 2024
d95c8d2
Use kwargs for prompt, chat_history and settings
glahaye Jul 15, 2024
76739cb
Merge branch 'main' into python_telemetry
glahaye Jul 16, 2024
e0a2471
Merge main
glahaye Jul 17, 2024
a04bbd6
Merge branch 'python_telemetry' of https://github.com/glahaye/semanti…
glahaye Jul 17, 2024
17c2c72
Fix poetry.lock after merge from main
glahaye Jul 17, 2024
1f163f1
Add unit tests
glahaye Jul 23, 2024
6710051
Merge main
glahaye Jul 23, 2024
d73ead7
Adapt poetry.lock after merge
glahaye Jul 23, 2024
066e370
Merge branch 'main' into python_telemetry
glahaye Jul 24, 2024
6ae2ef6
Address PR comments + enable async unit tests
glahaye Jul 24, 2024
3e8d5e9
Merge branch 'python_telemetry' of https://github.com/glahaye/semanti…
glahaye Jul 24, 2024
c001002
Fix unit tests
glahaye Jul 24, 2024
1a73614
Overriding .env file in unit tests
glahaye Jul 24, 2024
dabe4c2
Fix unit tests
glahaye Jul 25, 2024
32e03bb
Merge main
glahaye Jul 25, 2024
f2b0f5d
Fix poetry.lock after merge
glahaye Jul 25, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
96 changes: 48 additions & 48 deletions python/poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 2 additions & 0 deletions python/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,8 @@ grpcio = [
openai = ">=1.0"
regex = ">=2023.6.3,<2025.0.0"
openapi_core = ">=0.18,<0.20"
opentelemetry-api = "^1.25.0"
opentelemetry-sdk = "^1.25.0"
prance = "^23.6.21.0"
pydantic = "^2"
pydantic-settings = "^2.2.1"
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
# Copyright (c) Microsoft. All rights reserved.

import asyncio
import json
import logging
from collections.abc import AsyncGenerator
from copy import copy
Expand All @@ -11,6 +12,7 @@
from openai.types.chat.chat_completion import ChatCompletion, Choice
from openai.types.chat.chat_completion_chunk import ChatCompletionChunk
from openai.types.chat.chat_completion_chunk import Choice as ChunkChoice
from opentelemetry import trace

from semantic_kernel.connectors.ai.chat_completion_client_base import ChatCompletionClientBase
from semantic_kernel.connectors.ai.function_call_behavior import (
Expand Down Expand Up @@ -44,11 +46,16 @@
from semantic_kernel.filters.filter_types import FilterTypes
from semantic_kernel.filters.kernel_filters_extension import _rebuild_auto_function_invocation_context
from semantic_kernel.functions.function_result import FunctionResult
from semantic_kernel.utils import model_diagnostics
glahaye marked this conversation as resolved.
Show resolved Hide resolved

if TYPE_CHECKING:
from semantic_kernel.functions.kernel_arguments import KernelArguments
from semantic_kernel.kernel import Kernel


MODEL_PROVIDER_NAME = 'openai'
glahaye marked this conversation as resolved.
Show resolved Hide resolved


logger: logging.Logger = logging.getLogger(__name__)


Expand Down Expand Up @@ -269,9 +276,25 @@ def _chat_message_content_to_dict(self, message: "ChatMessageContent") -> dict[s

async def _send_chat_request(self, settings: OpenAIChatPromptExecutionSettings) -> list["ChatMessageContent"]:
"""Send the chat request."""
span = model_diagnostics.start_completion_activity(settings.ai_model_id, MODEL_PROVIDER_NAME,
glahaye marked this conversation as resolved.
Show resolved Hide resolved
self._settings_messages_to_prompt(settings.messages),
settings)

response = await self._send_request(request_settings=settings)
glahaye marked this conversation as resolved.
Show resolved Hide resolved
response_metadata = self._get_metadata_from_chat_response(response)
return [self._create_chat_message_content(response, choice, response_metadata) for choice in response.choices]

chat_message_contents = [self._create_chat_message_content(response, choice, response_metadata)
for choice in response.choices]

if span is not None:
finish_reasons: list[str] = []
for choice in response.choices:
finish_reasons.append(choice.finish_reason)
glahaye marked this conversation as resolved.
Show resolved Hide resolved
with trace.use_span(span, end_on_exit=True):
model_diagnostics.set_completion_response(span, chat_message_contents, finish_reasons, response.id,
response.usage.prompt_tokens,
response.usage.completion_tokens)
glahaye marked this conversation as resolved.
Show resolved Hide resolved
return chat_message_contents
glahaye marked this conversation as resolved.
Show resolved Hide resolved

async def _send_chat_stream_request(
self, settings: OpenAIChatPromptExecutionSettings
Expand All @@ -291,9 +314,18 @@ async def _send_chat_stream_request(
# endregion
# region content creation

def _settings_messages_to_prompt(self, messages: list[dict[str, Any]]) -> str:
glahaye marked this conversation as resolved.
Show resolved Hide resolved
glahaye marked this conversation as resolved.
Show resolved Hide resolved
entries: list[dict[str, str]] = []
for message in messages:
entries.append({
"role": str(message.get("role", "unknown")),
"content": str(message.get("content", "unknown"))
})
glahaye marked this conversation as resolved.
Show resolved Hide resolved
return json.dumps(entries)

def _create_chat_message_content(
self, response: ChatCompletion, choice: Choice, response_metadata: dict[str, Any]
) -> "ChatMessageContent":
) -> ChatMessageContent:
glahaye marked this conversation as resolved.
Show resolved Hide resolved
"""Create a chat message content object from a choice."""
metadata = self._get_metadata_from_chat_choice(choice)
metadata.update(response_metadata)
Expand Down
Loading
Loading