I am creating an crew agent , where i am using watsonx.ai and tools like serperdevtool , ScrapeWebsiteTool and facing an issue -
LLM Call Failed , Error: litellm.InternalServerError: watsonxException - {“errors”:[{“code”:“downstream_request_failed”,“message”:“Downstream vllm request failed: Internal Server
│ Error”,“more_info”:“https://cloud.ibm.com/apidocs/watsonx-ai”}]
result = self._run_sequential_process()
return self._execute_tasks(self.tasks)
task_output = task.execute_sync(
return self._execute_core(agent, context, tools)
raise e
result = agent.execute_task(
raise e
result = self.agent_executor.invoke(
raise e
formatted_answer = self._invoke_loop()
raise e
answer = self._get_llm_response()
raise e
answer = self.llm.call(
return self._handle_non_streaming_response(
response = litellm.completion(**params)
raise e
result = original_function(*args, **kwargs)
raise exception_type(
raise litellm.InternalServerError()