SDK

Tool Tracking

Tool Tracking

Function calling and tool executions are automatically tracked. No decorators or manual instrumentation are required beyond initialization.

Tracked Information:

  • Tool name

  • Arguments passed

  • Return value

  • Execution time

OpenAI Function Calling

import nora
from openai import OpenAI
import json

nora_client = nora.init(api_key="your-nora-api-key")
client = OpenAI(api_key="your-openai-key")

def get_weather(location, unit="celsius"):
    return {
        "location": location,
        "temperature": 22,
        "unit": unit,
        "condition": "sunny"
    }

tools = [
    {
        "type": "function",
        "function": {
            "name": "get_weather",
            "description": "Get the current weather in a given location",
            "parameters": {
                "type": "object",
                "properties": {
                    "location": {
                        "type": "string",
                        "description": "The city name, e.g. San Francisco"
                    },
                    "unit": {
                        "type": "string",
                        "enum": ["celsius", "fahrenheit"]
                    }
                },
                "required": ["location"]
            }
        }
    }
]

@nora_client.trace_group(name="weather_query")
def ask_weather(question):
    response = client.responses.create(
        model="gpt-5",
        input=[
            {
                "role": "user",
                "content": question
            }
        ],
        tools=tools,
        tool_choice="auto"
    )

    # tool call handling
    if response.output and response.output[0]["type"] == "tool_call":
        tool_call = response.output[0]
        function_name = tool_call["name"]
        function_args = json.loads(tool_call["arguments"])

        if function_name == "get_weather":
            function_response = get_weather(**function_args)

        followup = client.responses.create(
            model="gpt-5",
            input=[
                {
                    "role": "user",
                    "content": question
                },
                {
                    "role": "tool",
                    "tool_call_id": tool_call["id"],
                    "content": json.dumps(function_response)
                }
            ]
        )

        return followup.output_text

    return response.output_text

result = ask_weather("What's the weather like in Tokyo?")
print(result)

Tool Chain Tracking

import nora
from openai import OpenAI

nora_client = nora.init(api_key="your-nora-api-key")
client = OpenAI(api_key="your-openai-key")

@nora_client.trace(span_type="tool", name="SearchDatabase")
def search_database(query):
    return ["Result 1", "Result 2", "Result 3"]

@nora_client.trace(span_type="tool", name="FilterResults")
def filter_results(results, criteria):
    return [r for r in results if criteria in r]

@nora_client.trace(span_type="tool", name="RankResults")
def rank_results(results):
    return sorted(results, reverse=True)

@nora_client.trace_group(name="search_pipeline")
def search_with_tools(user_query):
    results = search_database(user_query)
    filtered = filter_results(results, "Result")
    ranked = rank_results(filtered)

    response = client.responses.create(
        model="gpt-5",
        input=[
            {
                "role": "system",
                "content": f"Search results: {ranked}"
            },
            {
                "role": "user",
                "content": f"Summarize these results for: {user_query}"
            }
        ]
    )

    return response.output_text

result = search_with_tools("Find information about AI")
print(result)

Was this page helpful?