Skip to content

Commit

Permalink
update link
Browse files Browse the repository at this point in the history
  • Loading branch information
lievan committed Jul 22, 2024
1 parent a95614c commit 5d691cb
Show file tree
Hide file tree
Showing 2 changed files with 15 additions and 25 deletions.
14 changes: 7 additions & 7 deletions ddtrace/llmobs/_llmobs.py
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,7 @@ def enable(
_tracer: Optional[ddtrace.Tracer] = None,
) -> None:
"""
Enable LLM Observability tracing.
Enable LLMObs tracing.
:param str ml_app: The name of your ml application.
:param bool integrations_enabled: Set to `true` to enable LLM integrations.
Expand Down Expand Up @@ -289,7 +289,7 @@ def llm(
session_id: Optional[str] = None,
ml_app: Optional[str] = None,
) -> Span:
print("[✧ LLM Observability] LLM ✨: {} running ...".format(name))
print("[✧ LLMObs] LLM ✨: {} running ...".format(name), flush=True)
"""
Trace an invocation call to an LLM where inputs and outputs are represented as text.
Expand Down Expand Up @@ -327,7 +327,7 @@ def tool(cls, name: Optional[str] = None, session_id: Optional[str] = None, ml_a
:returns: The Span object representing the traced operation.
"""
print("[✧ LLM Observability] Tool 🔧: {} running ...".format(name))
print("[✧ LLMObs] Tool 🔧: {} running ...".format(name), flush=True)
if cls.enabled is False:
log.warning(SPAN_START_WHILE_DISABLED_WARNING)
return cls._instance._start_span("tool", name=name, session_id=session_id, ml_app=ml_app)
Expand All @@ -344,14 +344,14 @@ def task(cls, name: Optional[str] = None, session_id: Optional[str] = None, ml_a
:returns: The Span object representing the traced operation.
"""
print("[✧ LLM Observability] Task 📌: {} running...".format(name))
print("[✧ LLMObs] Task 📌: {} running...".format(name), flush=True)
if cls.enabled is False:
log.warning(SPAN_START_WHILE_DISABLED_WARNING)
return cls._instance._start_span("task", name=name, session_id=session_id, ml_app=ml_app)

@classmethod
def agent(cls, name: Optional[str] = None, session_id: Optional[str] = None, ml_app: Optional[str] = None) -> Span:
print("[✧ LLM Observability] Agent 🤖: {} running ...".format(name))
print("[✧ LLMObs] Agent 🤖: {} running ...".format(name), flush=True)
"""
Trace a dynamic workflow in which an embedded language model (agent) decides what sequence of actions to take.
Expand All @@ -370,7 +370,7 @@ def agent(cls, name: Optional[str] = None, session_id: Optional[str] = None, ml_
def workflow(
cls, name: Optional[str] = None, session_id: Optional[str] = None, ml_app: Optional[str] = None
) -> Span:
print("[✧ LLM Observability] Workflow 🔗: {} running ...".format(name))
print("[✧ LLMObs] Workflow 🔗: {} running ...".format(name), flush=True)
"""
Trace a predefined or static sequence of operations.
Expand Down Expand Up @@ -428,7 +428,7 @@ def embedding(
def retrieval(
cls, name: Optional[str] = None, session_id: Optional[str] = None, ml_app: Optional[str] = None
) -> Span:
print("[✧ LLM Observability] Retrieval 🔎: {} running ...".format(name))
print("[✧ LLMObs] Retrieval 🔎: {} running ...".format(name), flush=True)
"""
Trace a vector search operation involving a list of documents being returned from an external knowledge base.
Expand Down
26 changes: 8 additions & 18 deletions ddtrace/llmobs/_trace_processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,26 +105,16 @@ def _llmobs_span_event(self, span: Span) -> Dict[str, Any]:
parent_id = str(_get_llmobs_parent_id(span) or "undefined")
span._meta.pop(PARENT_ID_KEY, None)

name = _get_span_name(span)
if span_kind == "llm":
print("[✧ LLM Observability] LLM ✨: {} finished in {} seconds!".format(name, span.duration))
elif span_kind == "workflow":
print("[✧ LLM Observability] Workflow 🔗: {} finished in {} seconds!".format(name, span.duration))
elif span_kind == "agent":
print("[✧ LLM Observability] Agent 🤖: {} finished in {} seconds!".format(name, span.duration))
url = """
View your agent run:
https://app.datadoghq.com/llm/traces?query=%40event_type%3Aspan%20%40parent_id%3Aundefined%20%40trace_id%3A{}%20&agg_m=count&agg_m_source=base&agg_t=count&fromUser=false&llmPanels=%5B%7B%22t%22%3A%22sampleDetailPanel%22%2C%22rEID%22%3A%22AgAAAZDMT2fSc-LOggAAAAAAAAAYAAAAAEFaRE1UMS1vQUFBMl9fZXBadnc3QUFBQQAAACQAAAAAMDE5MGNjNGYtODc3MC00YmY0LTg5NGItZmFiNTY1NDk1ZjE0%22%7D%5D&sidepanelTab=trace&viz=stream
if parent_id == "undefined":
url = """[✧ LLMObs] Trace with root span name "{span_name}" finished in {span_duration} seconds 🎉!
View your trace at:
https://dd.datad0g.com/llm/traces?query=%40ml_app%3Aai-chat
""".format(
span.trace_id
span_name=span.name,
span_duration=span.duration,
)
print(url)
elif span_kind == "tool":
print("[✧ LLM Observability] Tool 🔧: {} finished in {} seconds!".format(name, span.duration))
elif span_kind == "task":
print("[✧ LLM Observability] Task 📌: {} finished in {} seconds!".format(name, span.duration))
elif span_kind == "retrieval":
print("[✧ LLM Observability] Retrieval 🔎: {} finished in {} seconds!".format(name, span.duration))
print(url, flush=True)

return {
"trace_id": "{:x}".format(span.trace_id),
Expand Down

0 comments on commit 5d691cb

Please sign in to comment.