Structured Output
Some of our models are compatible with OpenAI structured output, so they support using the OpenAI SDK to utilize this capability.
Currently supported models include:
- inf-chat-int-v1
Example
Input Python class, output Python object
from pydantic import BaseModel
from openai import OpenAI
import os
client = OpenAI(
base_url="https://api.infly.cn/v1",
api_key=os.getenv("INF_OPENAPI_API_KEY"),
)
class Weather(BaseModel):
location: str
weather_state: str
temperature: int
completion = client.beta.chat.completions.parse(
model="inf-chat-int-v1",
messages=[
{"role": "system", "content": "Extract the weather from the following text"},
{"role": "user", "content": "The weather in Xuhui, Shanghai is sunny today, with a temperature of 25 degrees"},
],
response_format=Weather,
)
weather = completion.choices[0].message.parsed
print(weather)
# This will print
# Weather(location='Xuhui, Shanghai', weather_state='sunny', temperature=25)
Input JSON schema, output JSON
JSON Schema is a powerful standard for defining and validating JSON data structures.
import os
from openai import OpenAI
from pydantic import BaseModel
client = OpenAI(
base_url="https://api.infly.cn/v1",
api_key=os.getenv("INF_OPENAPI_API_KEY"),
)
weather_json_schema = {
"type": "object",
"properties": {
"location": {"type": "string"},
"weather_state": {"type": "string"},
"temperature": {"type": "integer"},
},
"required": ["location", "weather_state", "temperature"],
"additionalProperties": False,
}
# or
class Weather(BaseModel):
location: str
weather_state: str
temperature: int
weather_json_schema = Weather.model_json_schema()
completion = client.chat.completions.create(
model="inf-chat-int-v1",
messages=[
{"role": "system", "content": "Extract the weather from the following text"},
{"role": "user", "content": "It is sunny in Xuhui Riverside, Shanghai today, with a temperature of 25 degrees"},
],
response_format={
"type": "json_schema",
"json_schema": {
"name": "Weather",
"schema": weather_json_schema,
}
}
)
print(completion.choices[0].message.content)
# Will print:
# {"location": "Xuhui Riverside, Shanghai", "weather_state": "sunny", "temperature": 25}
Stream Mode
import os
from openai import OpenAI
from pydantic import BaseModel
client = OpenAI(
base_url="https://api.infly.cn/v1",
api_key=os.getenv("INF_OPENAPI_API_KEY"),
)
class Weather(BaseModel):
location: str
weather_state: str
temperature: int
completion = client.chat.completions.create(
model="inf-chat-int-v1",
stream=True,
messages=[
{"role": "system", "content": "Extract the weather from the following text"},
{"role": "user", "content": "It is sunny in Xuhui Riverside, Shanghai today, with a temperature of 25 degrees"},
],
response_format={
"type": "json_schema",
"json_schema": {
"name": "Weather",
"schema": Weather.model_json_schema()
}
}
)
final_json = ""
for chunk in completion:
if chunk.choices:
final_json += chunk.choices[0].delta.content or ""
print(final_json)
# Will print:
# {"location": "Xuhui Riverside, Shanghai", "weather_state": "sunny", "temperature": 25}