Hello,
After going through the similar topics on this problem i still remain puzzled. As the title suggest lite llm seems to be throwing this strange error. I call no tools on that specific task it just is supposed to aggregate results from multiple reports. Maybe I’m missing something.
Code for my crew :
from crewai import Agent, Crew, Process, Task, LLM
from crewai.project import CrewBase, agent, crew, task
from crewai_tools import FileReadTool
from .firefighter_outputs import *
from .tools.output_validation import OutputValidation
@CrewBase
class FirefighterCrew:
"""Firefighter Crew"""
agents_config = 'config/agents.yaml'
tasks_config = 'config/tasks.yaml'
def __init__(self, route_tool, inventory_fn):
"""Initialize the FirefighterCrew class."""
super().__init__()
self.route_tool = route_tool
self.inventory_fn = inventory_fn
self.llm = LLM(
model = "ollama/llama3.1:latest",
api_base = "http://localhost:11434",
api_key = "NA"
)
@agent
def firefighter_strategist(self) -> Agent:
return Agent(
config=self.agents_config['firefighter_strategist'],
allow_delegation=False,
llm=self.llm,
verbose=True
)
@agent
def equipment_resource_manager(self) -> Agent:
return Agent(
config=self.agents_config['equipment_resource_manager'],
allow_delegation=False,
llm=self.llm,
verbose=True
)
@agent
def unit_deployment_manager(self) -> Agent:
return Agent(
config=self.agents_config['unit_deployment_manager'],
allow_delegation=False,
llm=self.llm,
verbose=True
)
@task
def read_inventory(self) -> Task:
self.read_inventory_task = Task(
config=self.tasks_config['read_inventory'],
# output_pydantic=ResourceReport,
# async_execution=True,
tools=[FileReadTool(self.inventory_fn)],
# output_file='./reports/firefighter/initial_resource_report.json'
)
return self.read_inventory_task
@task
def create_resource_report(self) -> Task:
self.create_resource_report_task = Task(
config=self.tasks_config['create_resource_report'],
output_pydantic=ResourceReport,
# async_execution=True,
# tools=[OutputValidation(ResourceReport)],
context=[self.read_inventory_task],
output_file='./reports/firefighter/initial_resource_report.json',
expected_output=f"The output should adhere to the following schema: {ResourceReport.get_schema()}"
)
return self.create_resource_report_task
@task
def create_firefighting_strategy(self) -> Task:
self.create_firefighting_strategy_task = Task(
config=self.tasks_config['create_firefighting_strategy'],
output_pydantic=StrategyReport,
# async_execution=True
# tools=[OutputValidation(StrategyReport)],
context=[self.create_resource_report_task],
output_file='./reports/firefighter/firefighting_strategy.json',
expected_output=f"The output should adhere to the following schema: {StrategyReport.get_schema()}"
)
return self.create_firefighting_strategy_task
@task
def generate_unit_deployment_report(self) -> Task:
self.generate_unit_deployment_report_task = Task(
config=self.tasks_config['generate_unit_deployment_report'],
output_pydantic=UnitAllocationReport,
# async_execution=True,
context=[self.create_firefighting_strategy_task, self.create_resource_report_task],
tools=[self.route_tool],#, OutputValidation(UnitAllocationReport)],
output_file='./reports/firefighter/unit_deployment_report.json',
expected_output=f"The output should adhere to the following schema: {UnitAllocationReport.get_schema()}"
)
return self.generate_unit_deployment_report_task
@task
def update_resource_report(self) -> Task:
self.update_resource_report_task = Task(
config=self.tasks_config['update_resource_report'],
output_pydantic=UpdatedResourceReport,
# async_execution=True,
# tools=[OutputValidation(UpdatedResourceReport)],
context=[self.create_firefighting_strategy_task, self.generate_unit_deployment_report_task],
output_file='./reports/firefighter/updated_resource_report.json',
expected_output=f"The output should adhere to the following schema: {UpdatedResourceReport.get_schema()}"
)
return self.update_resource_report_task
@task
def combine_reports(self) -> Task:
return Task(
config=self.tasks_config['combine_reports'],
output_pydantic=FirefighterReports,
context=[self.create_firefighting_strategy_task, self.update_resource_report_task, self.generate_unit_deployment_report_task],
output_file='./reports/firefighter/full_final_report.json',
expected_output=f"The output should adhere to the following schema: {FirefighterReports.get_schema()}"
)
@crew
def crew(self) -> Crew:
"""Creates the Research Crew"""
return Crew(
agents=self.agents, # Automatically created by the @agent decorator
tasks=self.tasks, # Automatically created by the @task decorator
process=Process.sequential,
verbose=False,
# planning=True,
# planning_llm='ollama/llama3.1'
)
Error log:
Give Feedback / Get Help: https://github.com/BerriAI/litellm/issues/new
LiteLLM.Info: If you need to debug this error, use `litellm.set_verbose=True'.
Provider List: https://docs.litellm.ai/docs/providers
Give Feedback / Get Help: https://github.com/BerriAI/litellm/issues/new
LiteLLM.Info: If you need to debug this error, use `litellm.set_verbose=True'.
Provider List: https://docs.litellm.ai/docs/providers
Give Feedback / Get Help: https://github.com/BerriAI/litellm/issues/new
LiteLLM.Info: If you need to debug this error, use `litellm.set_verbose=True'.
Provider List: https://docs.litellm.ai/docs/providers
Give Feedback / Get Help: https://github.com/BerriAI/litellm/issues/new
LiteLLM.Info: If you need to debug this error, use `litellm.set_verbose=True'.
Provider List: https://docs.litellm.ai/docs/providers
Give Feedback / Get Help: https://github.com/BerriAI/litellm/issues/new
LiteLLM.Info: If you need to debug this error, use `litellm.set_verbose=True'.
Provider List: https://docs.litellm.ai/docs/providers
Give Feedback / Get Help: https://github.com/BerriAI/litellm/issues/new
LiteLLM.Info: If you need to debug this error, use `litellm.set_verbose=True'.
Provider List: https://docs.litellm.ai/docs/providers
Give Feedback / Get Help: https://github.com/BerriAI/litellm/issues/new
LiteLLM.Info: If you need to debug this error, use `litellm.set_verbose=True'.
Provider List: https://docs.litellm.ai/docs/providers
Give Feedback / Get Help: https://github.com/BerriAI/litellm/issues/new
LiteLLM.Info: If you need to debug this error, use `litellm.set_verbose=True'.
Provider List: https://docs.litellm.ai/docs/providers
Give Feedback / Get Help: https://github.com/BerriAI/litellm/issues/new
LiteLLM.Info: If you need to debug this error, use `litellm.set_verbose=True'.
Provider List: https://docs.litellm.ai/docs/providers
Failed to convert text into a pydantic model due to the following error: litellm.APIConnectionError: 'arguments'
Traceback (most recent call last):
File "/Users/evaveli/Desktop/MAI-URV-IMAS/emergency_response/.venv/lib/python3.11/site-packages/crewai/utilities/converter.py", line 100, in convert_to_model
escaped_result = json.dumps(json.loads(result, strict=False))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/evaveli/.pyenv/versions/3.11.0/lib/python3.11/json/__init__.py", line 359, in loads
return cls(**kw).decode(s)
^^^^^^^^^^^^^^^^^^^
File "/Users/evaveli/.pyenv/versions/3.11.0/lib/python3.11/json/decoder.py", line 337, in decode
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/evaveli/.pyenv/versions/3.11.0/lib/python3.11/json/decoder.py", line 355, in raw_decode
raise JSONDecodeError("Expecting value", s, err.value) from None
json.decoder.JSONDecodeError: Expecting value: line 1 column 1 (char 0)
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/Users/evaveli/Desktop/MAI-URV-IMAS/emergency_response/.venv/lib/python3.11/site-packages/litellm/main.py", line 2735, in completion
response = base_llm_http_handler.completion(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/evaveli/Desktop/MAI-URV-IMAS/emergency_response/.venv/lib/python3.11/site-packages/litellm/llms/custom_httpx/llm_http_handler.py", line 225, in completion
return provider_config.transform_response(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/evaveli/Desktop/MAI-URV-IMAS/emergency_response/.venv/lib/python3.11/site-packages/litellm/llms/ollama/completion/transformation.py", line 264, in transform_response
"arguments": json.dumps(function_call["arguments"]),
~~~~~~~~~~~~~^^^^^^^^^^^^^
KeyError: 'arguments'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/Users/evaveli/Desktop/MAI-URV-IMAS/emergency_response/.venv/lib/python3.11/site-packages/instructor/retry.py", line 156, in retry_sync
response = func(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^
File "/Users/evaveli/Desktop/MAI-URV-IMAS/emergency_response/.venv/lib/python3.11/site-packages/litellm/utils.py", line 987, in wrapper
raise e
File "/Users/evaveli/Desktop/MAI-URV-IMAS/emergency_response/.venv/lib/python3.11/site-packages/litellm/utils.py", line 868, in wrapper
result = original_function(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/evaveli/Desktop/MAI-URV-IMAS/emergency_response/.venv/lib/python3.11/site-packages/litellm/main.py", line 3012, in completion
raise exception_type(
^^^^^^^^^^^^^^^
File "/Users/evaveli/Desktop/MAI-URV-IMAS/emergency_response/.venv/lib/python3.11/site-packages/litellm/litellm_core_utils/exception_mapping_utils.py", line 2146, in exception_type
raise e
File "/Users/evaveli/Desktop/MAI-URV-IMAS/emergency_response/.venv/lib/python3.11/site-packages/litellm/litellm_core_utils/exception_mapping_utils.py", line 2122, in exception_type
raise APIConnectionError(
litellm.exceptions.APIConnectionError: litellm.APIConnectionError: 'arguments'
Traceback (most recent call last):
File "/Users/evaveli/Desktop/MAI-URV-IMAS/emergency_response/.venv/lib/python3.11/site-packages/crewai/utilities/converter.py", line 100, in convert_to_model
escaped_result = json.dumps(json.loads(result, strict=False))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/evaveli/.pyenv/versions/3.11.0/lib/python3.11/json/__init__.py", line 359, in loads
return cls(**kw).decode(s)
^^^^^^^^^^^^^^^^^^^
File "/Users/evaveli/.pyenv/versions/3.11.0/lib/python3.11/json/decoder.py", line 337, in decode
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/evaveli/.pyenv/versions/3.11.0/lib/python3.11/json/decoder.py", line 355, in raw_decode
raise JSONDecodeError("Expecting value", s, err.value) from None
json.decoder.JSONDecodeError: Expecting value: line 1 column 1 (char 0)
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/Users/evaveli/Desktop/MAI-URV-IMAS/emergency_response/.venv/lib/python3.11/site-packages/litellm/main.py", line 2735, in completion
response = base_llm_http_handler.completion(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/evaveli/Desktop/MAI-URV-IMAS/emergency_response/.venv/lib/python3.11/site-packages/litellm/llms/custom_httpx/llm_http_handler.py", line 225, in completion
return provider_config.transform_response(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/evaveli/Desktop/MAI-URV-IMAS/emergency_response/.venv/lib/python3.11/site-packages/litellm/llms/ollama/completion/transformation.py", line 264, in transform_response
"arguments": json.dumps(function_call["arguments"]),
~~~~~~~~~~~~~^^^^^^^^^^^^^
KeyError: 'arguments'
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/Users/evaveli/Desktop/MAI-URV-IMAS/emergency_response/.venv/lib/python3.11/site-packages/instructor/retry.py", line 151, in retry_sync
for attempt in max_retries:
File "/Users/evaveli/Desktop/MAI-URV-IMAS/emergency_response/.venv/lib/python3.11/site-packages/tenacity/__init__.py", line 443, in __iter__
do = self.iter(retry_state=retry_state)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/evaveli/Desktop/MAI-URV-IMAS/emergency_response/.venv/lib/python3.11/site-packages/tenacity/__init__.py", line 376, in iter
result = action(retry_state)
^^^^^^^^^^^^^^^^^^^
File "/Users/evaveli/Desktop/MAI-URV-IMAS/emergency_response/.venv/lib/python3.11/site-packages/tenacity/__init__.py", line 419, in exc_check
raise retry_exc from fut.exception()
tenacity.RetryError: RetryError[<Future at 0x30065e8d0 state=finished raised APIConnectionError>]
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/Users/evaveli/Desktop/MAI-URV-IMAS/emergency_response/.venv/lib/python3.11/site-packages/crewai/utilities/converter.py", line 27, in to_pydantic
return self._create_instructor().to_pydantic()
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/evaveli/Desktop/MAI-URV-IMAS/emergency_response/.venv/lib/python3.11/site-packages/crewai/utilities/internal_instructor.py", line 45, in to_pydantic
model = self._client.chat.completions.create(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/evaveli/Desktop/MAI-URV-IMAS/emergency_response/.venv/lib/python3.11/site-packages/instructor/client.py", line 176, in create
return self.create_fn(
^^^^^^^^^^^^^^^
File "/Users/evaveli/Desktop/MAI-URV-IMAS/emergency_response/.venv/lib/python3.11/site-packages/instructor/patch.py", line 193, in new_create_sync
response = retry_sync(
^^^^^^^^^^^
File "/Users/evaveli/Desktop/MAI-URV-IMAS/emergency_response/.venv/lib/python3.11/site-packages/instructor/retry.py", line 181, in retry_sync
raise InstructorRetryException(
instructor.exceptions.InstructorRetryException: litellm.APIConnectionError: 'arguments'
Traceback (most recent call last):
File "/Users/evaveli/Desktop/MAI-URV-IMAS/emergency_response/.venv/lib/python3.11/site-packages/crewai/utilities/converter.py", line 100, in convert_to_model
escaped_result = json.dumps(json.loads(result, strict=False))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/evaveli/.pyenv/versions/3.11.0/lib/python3.11/json/__init__.py", line 359, in loads
return cls(**kw).decode(s)
^^^^^^^^^^^^^^^^^^^
File "/Users/evaveli/.pyenv/versions/3.11.0/lib/python3.11/json/decoder.py", line 337, in decode
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/evaveli/.pyenv/versions/3.11.0/lib/python3.11/json/decoder.py", line 355, in raw_decode
raise JSONDecodeError("Expecting value", s, err.value) from None
json.decoder.JSONDecodeError: Expecting value: line 1 column 1 (char 0)
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/Users/evaveli/Desktop/MAI-URV-IMAS/emergency_response/.venv/lib/python3.11/site-packages/litellm/main.py", line 2735, in completion
response = base_llm_http_handler.completion(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/evaveli/Desktop/MAI-URV-IMAS/emergency_response/.venv/lib/python3.11/site-packages/litellm/llms/custom_httpx/llm_http_handler.py", line 225, in completion
return provider_config.transform_response(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/evaveli/Desktop/MAI-URV-IMAS/emergency_response/.venv/lib/python3.11/site-packages/litellm/llms/ollama/completion/transformation.py", line 264, in transform_response
"arguments": json.dumps(function_call["arguments"]),
~~~~~~~~~~~~~^^^^^^^^^^^^^
KeyError: 'arguments'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/Users/evaveli/Desktop/MAI-URV-IMAS/emergency_response/.venv/lib/python3.11/site-packages/litellm/main.py", line 2735, in completion
response = base_llm_http_handler.completion(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/evaveli/Desktop/MAI-URV-IMAS/emergency_response/.venv/lib/python3.11/site-packages/litellm/llms/custom_httpx/llm_http_handler.py", line 225, in completion
return provider_config.transform_response(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/evaveli/Desktop/MAI-URV-IMAS/emergency_response/.venv/lib/python3.11/site-packages/litellm/llms/ollama/completion/transformation.py", line 264, in transform_response
"arguments": json.dumps(function_call["arguments"]),
~~~~~~~~~~~~~^^^^^^^^^^^^^
KeyError: 'arguments'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/Users/evaveli/Desktop/MAI-URV-IMAS/emergency_response/.venv/lib/python3.11/site-packages/instructor/retry.py", line 156, in retry_sync
response = func(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^
File "/Users/evaveli/Desktop/MAI-URV-IMAS/emergency_response/.venv/lib/python3.11/site-packages/litellm/utils.py", line 987, in wrapper
raise e
File "/Users/evaveli/Desktop/MAI-URV-IMAS/emergency_response/.venv/lib/python3.11/site-packages/litellm/utils.py", line 868, in wrapper
result = original_function(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/evaveli/Desktop/MAI-URV-IMAS/emergency_response/.venv/lib/python3.11/site-packages/litellm/main.py", line 3012, in completion
raise exception_type(
^^^^^^^^^^^^^^^
File "/Users/evaveli/Desktop/MAI-URV-IMAS/emergency_response/.venv/lib/python3.11/site-packages/litellm/litellm_core_utils/exception_mapping_utils.py", line 2146, in exception_type
raise e
File "/Users/evaveli/Desktop/MAI-URV-IMAS/emergency_response/.venv/lib/python3.11/site-packages/litellm/litellm_core_utils/exception_mapping_utils.py", line 2122, in exception_type
raise APIConnectionError(
litellm.exceptions.APIConnectionError: litellm.APIConnectionError: 'arguments'
Traceback (most recent call last):
File "/Users/evaveli/Desktop/MAI-URV-IMAS/emergency_response/.venv/lib/python3.11/site-packages/crewai/utilities/converter.py", line 100, in convert_to_model
escaped_result = json.dumps(json.loads(result, strict=False))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/evaveli/.pyenv/versions/3.11.0/lib/python3.11/json/__init__.py", line 359, in loads
return cls(**kw).decode(s)
^^^^^^^^^^^^^^^^^^^
File "/Users/evaveli/.pyenv/versions/3.11.0/lib/python3.11/json/decoder.py", line 337, in decode
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/evaveli/.pyenv/versions/3.11.0/lib/python3.11/json/decoder.py", line 355, in raw_decode
raise JSONDecodeError("Expecting value", s, err.value) from None
json.decoder.JSONDecodeError: Expecting value: line 1 column 1 (char 0)
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/Users/evaveli/Desktop/MAI-URV-IMAS/emergency_response/.venv/lib/python3.11/site-packages/litellm/main.py", line 2735, in completion
response = base_llm_http_handler.completion(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/evaveli/Desktop/MAI-URV-IMAS/emergency_response/.venv/lib/python3.11/site-packages/litellm/llms/custom_httpx/llm_http_handler.py", line 225, in completion
return provider_config.transform_response(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/evaveli/Desktop/MAI-URV-IMAS/emergency_response/.venv/lib/python3.11/site-packages/litellm/llms/ollama/completion/transformation.py", line 264, in transform_response
"arguments": json.dumps(function_call["arguments"]),
~~~~~~~~~~~~~^^^^^^^^^^^^^
KeyError: 'arguments'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/Users/evaveli/Desktop/MAI-URV-IMAS/emergency_response/.venv/lib/python3.11/site-packages/instructor/retry.py", line 156, in retry_sync
response = func(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^
File "/Users/evaveli/Desktop/MAI-URV-IMAS/emergency_response/.venv/lib/python3.11/site-packages/litellm/utils.py", line 987, in wrapper
raise e
File "/Users/evaveli/Desktop/MAI-URV-IMAS/emergency_response/.venv/lib/python3.11/site-packages/litellm/utils.py", line 868, in wrapper
result = original_function(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/evaveli/Desktop/MAI-URV-IMAS/emergency_response/.venv/lib/python3.11/site-packages/litellm/main.py", line 3012, in completion
raise exception_type(
^^^^^^^^^^^^^^
KeyError: 'arguments'
Using raw output instead.
I would be happy to provide any further code if needed.
Thank you.