Skip to main content

Function calling

Some of our models are compatible with openai function calling, so they support using the openai sdk to use the function calling capability

Currently supported models include:

  • inf-chat-int-v1

Example

import os
from openai import OpenAI

client = OpenAI(
base_url="https://api.infly.cn/v1",
api_key=os.getenv("INF_OPENAPI_API_KEY"),
)

def get_weather(params: dict):
fake_response = {
"location": params["location"],
"weather_state": "晴",
"temperature": 25 if params["unit"] == "c" else 77,
}

return fake_response


tools = [
{
"type": "function",
"function": {
"name": "get_weather",
"parameters": {
"type": "object",
"properties": {
"location": {"type": "string"},
"unit": {"type": "string", "enum": ["c", "f"]},
},
"required": ["location", "unit"],
"additionalProperties": False,
},
},
}
]


model="inf-chat-int-v1"

# First model call, get tool call
completion = client.chat.completions.create(
model=model,
messages=[{"role": "user", "content": "上海市徐汇滨江今天天气怎么样?"}],
tools=tools,
)

print(completion.choices[0].message.tool_calls)

# will print:
#
# [ChatCompletionMessageToolCall(id='chatcmpl-tool-55d74479b50b4d5da957b25a4c50ccc2', function=Function(arguments='{"location": "\\u4e0a\\u6d77\\u5f90\\u6c47\\u6ee8\\u6c5f", "unit": "c"}', name='get_weather'), type='function')]
#


# extract the arguments of the tool call
if completion.choices[0].message.tool_calls:
tool_call = completion.choices[0].message.tool_calls[0]
tool_name = tool_call.function.name
tool_args = eval(tool_call.function.arguments)

# Call the function with the extracted arguments
tool = globals()[tool_name]

if not tool:
raise ValueError(f"Tool {tool_name} not found")

result = tool(tool_args)

# Send the result back to the model
follow_up_completion = client.chat.completions.create(
model=model,
messages=[
{"role": "user", "content": "上海徐汇滨江今天天气怎么样?"},
{
"role": "tool",
"content": str(result),
},
],
)

print(follow_up_completion.choices[0].message.content)

# will print:
# 今天上海市徐汇滨江天气晴,温度25摄氏度

streaming

import os
from openai import OpenAI
import json

client = OpenAI(
base_url="https://api.infly.cn/v1",
api_key=os.getenv("INF_OPENAPI_API_KEY"),
)


model="inf-chat-int-v1"


def get_weather(params: dict):
fake_response = {
"location": params["location"],
"weather_state": "晴",
"temperature": 25 if params.get("unit", "c") == "c" else 77,
}

return fake_response


tools = [
{
"type": "function",
"function": {
"name": "get_weather",
"parameters": {
"type": "object",
"properties": {
"location": {"type": "string"},
"unit": {"type": "string", "enum": ["c", "f"]},
},
"required": ["location", "unit"],
"additionalProperties": False,
},
},
}
]

completion = client.chat.completions.create(
model=model,
messages=[{"role": "user", "content": "上海徐汇滨江今天天气怎么样?"}],
tools=tools,
stream=True,
stream_options={"include_usage": True},
)

function_arguments = ""
function_name = ""
is_collecting_function_args = False

tool_responses = []

tool_call_content = ""

for part in completion:
if not part.choices:
continue
delta = part.choices[0].delta
finish_reason = part.choices[0].finish_reason

# Process assistant content
if 'content' in delta:
tool_call_content += delta.content or ""

if delta.tool_calls:
is_collecting_function_args = True
tool_call = delta.tool_calls[0]

if not tool_call.function:
continue

if tool_call.function.name:
function_name = tool_call.function.name
print(f"Function name: '{function_name}'")

# Process function arguments delta
if tool_call.function.arguments:
function_arguments += tool_call.function.arguments
print(f"Arguments: {function_arguments}")

# Process tool call with complete arguments
if finish_reason == "tool_calls" and is_collecting_function_args:
print(f"Function call '{function_name}' is complete.")
args = json.loads(function_arguments)
print("Complete function arguments:")
print(json.dumps(args, indent=2))


# Call the function with the extracted arguments
tool = globals()[function_name]

if not tool:
raise ValueError(f"Tool {function_name} not found")


result = tool(eval(function_arguments))

tool_responses.append(json.dumps(result))


# Reset for the next potential function call
function_arguments = ""
function_name = ""
is_collecting_function_args = False

tool_response_msgs = [{"role": "tool", "content": response} for response in tool_responses]

# Send the result back to the model
follow_up_completion = client.chat.completions.create(
model=model,
messages=[
{"role": "user", "content": "上海徐汇滨江今天天气怎么样?"},
{
"role": "assistant",
"content": tool_call_content,
},
*tool_response_msgs
],
)

print(follow_up_completion.choices[0].message.content)

The above code will print the following output:

Function name: 'get_weather'
Arguments: {"location": "
Arguments: {"location": "上海
Arguments: {"location": "上海徐
Arguments: {"location": "上海徐汇
Arguments: {"location": "上海徐汇滨江
Arguments: {"location": "上海徐汇滨江", "unit": "
Arguments: {"location": "上海徐汇滨江", "unit": "c
Arguments: {"location": "上海徐汇滨江", "unit": "c"}
Function call 'get_weather' is complete.
Complete function arguments:
{
"location": "上海徐汇滨江",
"unit": "c"
}
上海徐汇滨江今天天气晴朗,温度为25摄氏度。