-
Notifications
You must be signed in to change notification settings - Fork 1.7k
Description
when i use a output chema with deepseekmodel
it raise BadRequestError
Here is part of my code:
load_dotenv()
deepseek_model = LiteLlm(
model="deepseek/deepseek-chat",
api_key=os.getenv("DEEPSEEK_API_KEY"),
)
class usr_message(BaseModel):
goal:str = Field(description="描述的研究主题或目标")
preferences: str = Field(description="用户对生成内容的偏好")
attributes:str = Field(description="研究需要满足的基本属性")
constraints:str = Field(description="用户对生成内容的要求与准则")
source_hypothesis:str = Field(description="用户可能提出的一些初始假设")
user_agent = LlmAgent(name= "user_agent",
model= deepseek_model,
instruction=user_agent_prompt,
output_schema=usr_message,
output_key="usr_message"
)
root_agent = user_agent
Here is the bug
2025-07-09 11:43:48,521 - WARNING - llm_agent.py:462 - Invalid config for agent user_agent: output_schema cannot co-exist with agent transfer configurations. Setting disallow_transfer_to_parent=True, disallow_transfer_to_peers=True
2025-07-09 11:43:48,523 - INFO - agent_loader.py:95 - Found root_agent in deepseek.agent
11:43:48 - LiteLLM:INFO: utils.py:3064 -
LiteLLM completion() model= deepseek-chat; provider = deepseek
2025-07-09 11:43:48,580 - INFO - utils.py:3064 -
LiteLLM completion() model= deepseek-chat; provider = deepseek
2025-07-09 11:43:48,776 - INFO - _client.py:1740 - HTTP Request: POST https://api.deepseek.com/beta/chat/completions "HTTP/1.1 422 Unprocessable Entity"
Give Feedback / Get Help: https://github.com/BerriAI/litellm/issues/new
LiteLLM.Info: If you need to debug this error, use `litellm._turn_on_debug()'.
2025-07-09 11:43:48,879 - ERROR - fast_api.py:820 - Error in event_generator: litellm.BadRequestError: DeepseekException - Failed to deserialize the JSON body into the target type: response_format: This response_format type is unavailable now at line 1 column 2311
Traceback (most recent call last):
File "C:\conda\envs\adk\Lib\site-packages\litellm\llms\custom_httpx\llm_http_handler.py", line 99, in _make_common_async_call
response = await async_httpx_client.post(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\conda\envs\adk\Lib\site-packages\litellm\litellm_core_utils\logging_utils.py", line 135, in async_wrapper
result = await func(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\conda\envs\adk\Lib\site-packages\litellm\llms\custom_httpx\http_handler.py", line 276, in post
raise e
File "C:\conda\envs\adk\Lib\site-packages\litellm\llms\custom_httpx\http_handler.py", line 232, in post
response.raise_for_status()
File "C:\conda\envs\adk\Lib\site-packages\httpx_models.py", line 829, in raise_for_status
raise HTTPStatusError(message, request=request, response=self)
httpx.HTTPStatusError: Client error '422 Unprocessable Entity' for url 'https://api.deepseek.com/beta/chat/completions'
For more information check: https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/422
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "C:\conda\envs\adk\Lib\site-packages\litellm\main.py", line 524, in acompletion
response = await init_response
^^^^^^^^^^^^^^^^^^^
File "C:\conda\envs\adk\Lib\site-packages\litellm\llms\custom_httpx\llm_http_handler.py", line 226, in async_completion
response = await self._make_common_async_call(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\conda\envs\adk\Lib\site-packages\litellm\llms\custom_httpx\llm_http_handler.py", line 124, in _make_common_async_call
raise self._handle_error(e=e, provider_config=provider_config)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\conda\envs\adk\Lib\site-packages\litellm\llms\custom_httpx\llm_http_handler.py", line 2058, in _handle_error
raise provider_config.get_error_class(
litellm.llms.openai.common_utils.OpenAIError: Failed to deserialize the JSON body into the target type: response_format: This response_format type is unavailable now at line 1 column 2311
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "C:\conda\envs\adk\Lib\site-packages\google\adk\cli\fast_api.py", line 809, in event_generator
async for event in runner.run_async(
File "C:\conda\envs\adk\Lib\site-packages\google\adk\runners.py", line 203, in run_async
async for event in invocation_context.agent.run_async(invocation_context):
File "C:\conda\envs\adk\Lib\site-packages\google\adk\agents\base_agent.py", line 147, in run_async
async for event in self._run_async_impl(ctx):
File "C:\conda\envs\adk\Lib\site-packages\google\adk\agents\llm_agent.py", line 275, in _run_async_impl
async for event in self._llm_flow.run_async(ctx):
File "C:\conda\envs\adk\Lib\site-packages\google\adk\flows\llm_flows\base_llm_flow.py", line 282, in run_async
async for event in self._run_one_step_async(invocation_context):
File "C:\conda\envs\adk\Lib\site-packages\google\adk\flows\llm_flows\base_llm_flow.py", line 314, in _run_one_step_async
async for llm_response in self._call_llm_async(
File "C:\conda\envs\adk\Lib\site-packages\google\adk\flows\llm_flows\base_llm_flow.py", line 539, in _call_llm_async
async for llm_response in llm.generate_content_async(
File "C:\conda\envs\adk\Lib\site-packages\google\adk\models\lite_llm.py", line 778, in generate_content_async
response = await self.llm_client.acompletion(**completion_args)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\conda\envs\adk\Lib\site-packages\google\adk\models\lite_llm.py", line 101, in acompletion
return await acompletion(
^^^^^^^^^^^^^^^^^^
File "C:\conda\envs\adk\Lib\site-packages\litellm\utils.py", line 1494, in wrapper_async
raise e
File "C:\conda\envs\adk\Lib\site-packages\litellm\utils.py", line 1355, in wrapper_async
result = await original_function(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\conda\envs\adk\Lib\site-packages\litellm\main.py", line 543, in acompletion
raise exception_type(
^^^^^^^^^^^^^^^
File "C:\conda\envs\adk\Lib\site-packages\litellm\litellm_core_utils\exception_mapping_utils.py", line 2270, in exception_type
raise e
File "C:\conda\envs\adk\Lib\site-packages\litellm\litellm_core_utils\exception_mapping_utils.py", line 459, in exception_type
raise BadRequestError(
litellm.exceptions.BadRequestError: litellm.BadRequestError: DeepseekException - Failed to deserialize the JSON body into the target type: response_format: This response_format type is unavailable now at line 1 column 2311