Skip to main content
在某些情况下,你可能希望在追踪函数中访问当前运行(span)。这对于从当前运行中提取 UUID、标签或其他信息非常有用。 你可以通过调用 Python 或 TypeScript SDK 中的 get_current_run_tree/getCurrentRunTree 函数来访问当前运行。 有关 RunTree 对象可用属性的完整列表,请参阅此参考文档
from langsmith import traceable
from langsmith.run_helpers import get_current_run_tree
from openai import Client

    openai = Client()

    @traceable
    def format_prompt(subject):
        run = get_current_run_tree()
        print(f"format_prompt Run Id: {run.id}")
        print(f"format_prompt Trace Id: {run.trace_id}")
        print(f"format_prompt Parent Run Id: {run.parent_run.id}")
        return [
            {
                "role": "system",
                "content": "You are a helpful assistant.",
            },
            {
                "role": "user",
                "content": f"What's a good name for a store that sells {subject}?"
            }
        ]

    @traceable(run_type="llm")
    def invoke_llm(messages):
        run = get_current_run_tree()
        print(f"invoke_llm Run Id: {run.id}")
        print(f"invoke_llm Trace Id: {run.trace_id}")
        print(f"invoke_llm Parent Run Id: {run.parent_run.id}")
        return openai.chat.completions.create(
            messages=messages, model="gpt-4.1-mini", temperature=0
        )

    @traceable
    def parse_output(response):
        run = get_current_run_tree()
        print(f"parse_output Run Id: {run.id}")
        print(f"parse_output Trace Id: {run.trace_id}")
        print(f"parse_output Parent Run Id: {run.parent_run.id}")
        return response.choices[0].message.content

    @traceable
    def run_pipeline():
        run = get_current_run_tree()
        print(f"run_pipeline Run Id: {run.id}")
        print(f"run_pipeline Trace Id: {run.trace_id}")
        messages = format_prompt("colorful socks")
        response = invoke_llm(messages)
        return parse_output(response)

run_pipeline()