Franck Dernoncourt
Franck Dernoncourt

Reputation: 83387

How can I use structured_output with Azure OpenAI with the openai Python library?

I want to use structured output with Azure OpenAI.

I tried the following code, based on the code given in https://openai.com/index/introducing-structured-outputs-in-the-api/:

from pydantic import BaseModel
from openai import AzureOpenAI

class Step(BaseModel):
    explanation: str
    output: str


class MathResponse(BaseModel):
    steps: list[Step]
    final_answer: str


client = AzureOpenAI(api_key='[redacted]',
                     api_version='2024-05-01-preview',
                     azure_endpoint='[redacted]')

completion = client.beta.chat.completions.parse(
    model="gpt-4omini-2024-07-18-name",
    messages=[
        {"role": "system", "content": "You are a helpful math tutor."},
        {"role": "user", "content": "solve 8x + 31 = 2"},
    ],
    response_format=MathResponse,
)

message = completion.choices[0].message
if message.parsed:
    print(message.parsed.steps)
    print(message.parsed.final_answer)
else:
    print(message.refusal)

I get the error:

openai.BadRequestError: Error code: 400:
{
    "error": {
        "message": "Invalid parameter: response_format must be one of json_object, text.",
        "type": "invalid_request_error",
        "param": "response_format",
        "code": "None"
    }
}

How to fix it?

I ran pip install -U openai: I use openai==1.40.1 and Python 3.11.


I also tried https://cookbook.openai.com/examples/structured_outputs_intro using using Azure+ GPT-4o mini (2024-07-18), it didn't work either, same error message:

from openai import AzureOpenAI

# Replace these variables with your Azure OpenAI endpoint and API key
endpoint = "https://<your-resource-name>.openai.azure.com"
api_key = "<your-api-key>"
deployment_name = "<your-deployment-name>" # Replace with your deployment name
MODEL = deployment_name

# API endpoint for the completion request
api_url = f"{endpoint}/openai/deployments/{deployment_name}/chat/completions?api-version=2024-06-01"


client = AzureOpenAI(api_key='[redacted]',
                     api_version='2024-07-01-preview',
                     azure_endpoint='https://[redacted].openai.azure.com/')

math_tutor_prompt = '''
    You are a helpful math tutor. You will be provided with a math problem,
    and your goal will be to output a step by step solution, along with a final answer.
    For each step, just provide the output as an equation use the explanation field to detail the reasoning.
'''

def get_math_solution(question):
    response = client.chat.completions.create(
    model=MODEL,
    messages=[
        {
            "role": "system",
            "content": math_tutor_prompt
        },
        {
            "role": "user",
            "content": question
        }
    ],
    response_format={
        "type": "json_schema",
        "json_schema": {
            "name": "math_reasoning",
            "schema": {
                "type": "object",
                "properties": {
                    "steps": {
                        "type": "array",
                        "items": {
                            "type": "object",
                            "properties": {
                                "explanation": {"type": "string"},
                                "output": {"type": "string"}
                            },
                            "required": ["explanation", "output"],
                            "additionalProperties": False
                        }
                    },
                    "final_answer": {"type": "string"}
                },
                "required": ["steps", "final_answer"],
                "additionalProperties": False
            },
            "strict": True
        }
    }
    )

    return response.choices[0].message


# Testing with an example question
question = "how can I solve 8x + 7 = -23"

result = get_math_solution(question)

print(result.content)

Upvotes: 0

Views: 1121

Answers (2)

Franck Dernoncourt
Franck Dernoncourt

Reputation: 83387

Using gpt-4o-2024-08-06, which finally got deployed today (2024-09-03) on Azure, made it work. Code example from learn.microsoft.com:

from pydantic import BaseModel
from openai import AzureOpenAI

endpoint = "https://your-azure-openai-endpoint.com"
api_key = "your-azure-openai-key"
deployment_name = 'deployment name' # Replace with your gpt-4o 2024-08-06 deployment name

client = AzureOpenAI(api_key=api_key,
                     api_version='2024-08-01-preview',
                     azure_endpoint=endpoint)

class CalendarEvent(BaseModel):
    name: str
    date: str
    participants: list[str]

completion = client.beta.chat.completions.parse(
    model=deployment_name, # replace with the model deployment name of your gpt-4o 2024-08-06 deployment
    messages=[
        {"role": "system", "content": "Extract the event information."},
        {"role": "user", "content": "Alice and Bob are going to a science fair on Friday."},
    ],
    response_format=CalendarEvent,
)

event = completion.choices[0].message.parsed

print(event)
print(completion.model_dump_json(indent=2))

output:

name='Science Fair' date='Friday' participants=['Alice', 'Bob']
{
  "id": "chatcmpl-A3XDRVolXpjeAAQIGddswI990weid",
  "choices": [
    {
      "finish_reason": "stop",
      "index": 0,
      "logprobs": null,
      "message": {
        "content": "{\"name\":\"Science Fair\",\"date\":\"Friday\",\"participants\":[\"Alice\",\"Bob\"]}",
        "refusal": null,
        "role": "assistant",
        "function_call": null,
        "tool_calls": [],
        "parsed": {
          "name": "Science Fair",
          "date": "Friday",
          "participants": [
            "Alice",
            "Bob"
          ]
        }
      },
      "content_filter_results": {
        "hate": {
          "filtered": false,
          "severity": "safe"
        },
        "self_harm": {
          "filtered": false,
          "severity": "safe"
        },
        "sexual": {
          "filtered": false,
          "severity": "safe"
        },
        "violence": {
          "filtered": false,
          "severity": "safe"
        }
      }
    }
  ],
  "created": 1725406029,
  "model": "gpt-4o-2024-08-06",
  "object": "chat.completion",
  "service_tier": null,
  "system_fingerprint": "fp_b2ffeb31ff",
  "usage": {
    "completion_tokens": 17,
    "prompt_tokens": 32,
    "total_tokens": 49
  },
  "prompt_filter_results": [
    {
      "prompt_index": 0,
      "content_filter_results": {
        "hate": {
          "filtered": false,
          "severity": "safe"
        },
        "self_harm": {
          "filtered": false,
          "severity": "safe"
        },
        "sexual": {
          "filtered": false,
          "severity": "safe"
        },
        "violence": {
          "filtered": false,
          "severity": "safe"
        }
      }
    }
  ]
}

Tested with Python 3.11.7 and openai==1.43.0.

Upvotes: 0

YL95
YL95

Reputation: 41

it's not available yet on Azure, but if you want to see what it's like. you can try this:

it works for all models support function calling.

it's not an ideal solution, I am just going to wait for the actual structured ouputs to be available.

import os
import json
from openai import AzureOpenAI
from pprint import pprint

# Initialize the Azure OpenAI client
client = AzureOpenAI(
    azure_endpoint=os.getenv("AZURE_OPENAI_ENDPOINT"), 
    api_key=os.getenv("AZURE_OPENAI_API_KEY"),  
    api_version=os.getenv("OpenAI_API_VERSION")
)
deployment_name = os.getenv("AZURE_OPENAI_DEPLOYMENT_ID")

# Initial user message
messages = [
    {"role": "system", "content": "You are a helpful math tutor. that solve math questions step by step."},
    {"role": "user", "content": "solve 8x + 31 = 2"}
]

# Define the JSON schema for structured output
tools = [
    {
        "type": "function",
        "function": {
            "name": "math_response",
            "description": "help solve math questions step by step following defined format.",
            "parameters": {
                "type": "object",
                "properties": {
                    "steps": {
                        "type": "array",
                        "items": {
                            "type": "object",
                            "properties": {
                                "explanation": {"type": "string"},
                                "output": {"type": "string"}
                            },
                            "required": ["explanation", "output"]
                        }
                    },
                    "final_answer": {"type": "string"}
                },
                "required": ["steps", "final_answer"], 
            }
        }
    }
]

# First API call: Ask the model to use the function
response = client.chat.completions.create(
    model=deployment_name,
    messages=messages,
    tools=tools,
    # tool_choice="none",
)

# Process the model's response
response_message = response.choices[0].message

# Handle function calls
if response_message.tool_calls:
    for tool_call in response_message.tool_calls:
        function_args = json.loads(tool_call.function.arguments)

function_args

here is the output

{'steps': [{'explanation': 'Start with the equation 8x + 31 = 2.',
   'output': '8x + 31 = 2'},
  {'explanation': 'Subtract 31 from both sides to isolate the term with x.',
   'output': '8x = 2 - 31'},
  {'explanation': 'Simplify the right side: 2 - 31 = -29.',
   'output': '8x = -29'},
  {'explanation': 'Divide both sides by 8 to solve for x.',
   'output': 'x = -29/8'},
  {'explanation': 'Simplifying -29/8 gives the final answer.',
   'output': 'x = -3.625'}],
 'final_answer': 'x = -3.625'}

Upvotes: 1

Related Questions