Hi @team,
Its high priority.
I am encountered with this error:
raise exception_type(
File “/Users/paarttipaa/ProjectTask/GithubProj/BCK_Log_SLC_Code_Explanation_Project/.agenticApproach/lib/python3.12/site-packages/litellm/main.py”, line 905, in completion
model, custom_llm_provider, dynamic_api_key, api_base = get_llm_provider(
^^^^^^^^^^^^^^^^^
File “/Users/paarttipaa/ProjectTask/GithubProj/BCK_Log_SLC_Code_Explanation_Project/.agenticApproach/lib/python3.12/site-packages/litellm/litellm_core_utils/get_llm_provider_logic.py”, line 313, in get_llm_provider
raise e
File “/Users/paarttipaa/ProjectTask/GithubProj/BCK_Log_SLC_Code_Explanation_Project/.agenticApproach/lib/python3.12/site-packages/litellm/litellm_core_utils/get_llm_provider_logic.py”, line 290, in get_llm_provider
raise litellm.exceptions.BadRequestError( # type: ignore
litellm.exceptions.BadRequestError: litellm.BadRequestError: LLM Provider NOT provided. Pass in the LLM provider you are trying to call. You passed model=WatsonxLLM
Params: {‘model_id’: ‘mistralai/mistral-large’, ‘deployment_id’: None, ‘params’: {‘decoding_method’: ‘sample’, ‘max_new_tokens’: 1000, ‘temperature’: 0.7, ‘top_k’: 50, ‘top_p’: 1, ‘repetition_penalty’: 1}, ‘project_id’: ‘f7312b11-b2dc-4581-b321-11515293a1f1’, ‘space_id’: None}
Pass model as E.g. For ‘Huggingface’ inference endpoints pass in completion(model='huggingface/starcoder',..)
Learn more: Providers | liteLLM
(.agenticApproach) paarttipaa@Paarttipaabhalajis-MacBook-Pro BCK_Log_SLC_Code_Explanation_Project %
watsonx.py
from langchain_ibm import WatsonxLLM
import os
from dotenv import load_dotenv
class WatsonLLLM:
def init(self):
# Load environment variables
load_dotenv()
self.llm_mixtral_l = None # Placeholder for llm_mixtral_l
self.modelConfig() # Call the config method to initialize the LLMs
def modelConfig(self):
model_id_mistral_l = "mistralai/mistral-large"
# model_id_llama_3 = "meta-llama/llama-3-405b-instruct"
wml_url=os.getenv("WATSONX_URL")
wml_api=os.getenv("WATSONX_APIKEY")
wml_project_id = os.getenv("PROJECT_ID")
# Parameters for Mistral
parameters_mistral_l = {
"decoding_method": "sample",
"max_new_tokens": 1000,
"temperature": 0.7,
"top_k": 50,
"top_p": 1,
"repetition_penalty": 1
}
# Create manager llm (Mistral)
self.llm_mixtral_l = WatsonxLLM(
model_id=model_id_mistral_l,
url=wml_url,
apikey=wml_api,
params=parameters_mistral_l,
project_id=wml_project_id,
)
Agent.py
from textwrap import dedent
from crewai import Agent
import watsonx
class backlog10Agents():
def init(self):
# # Instantiate the class
self.watson_llm = watsonx.WatsonLLLM()
#Accessing the Mixtral LLM
self.mixtral_llm=self.watson_llm.llm_mixtral_l
def fileRetriverAgent(self) -> Agent :
return Agent(
# Creating the File Retriever Agent
role="File data Retriever Agent",
goal="Retrieve the list of Java files in a given directory and retrive the file data it one by one and context to the other agents",
backstory=(
"You specialize in scanning directories to find, retrive data and return files."
),
llm=self.mixtral_llm,
allow_delegation=True,
verbose=True,
memory=False,
)
def Conditional_Matrix_generator(self) -> Agent:
return Agent(
role='Java Matrix Analyst',
goal='Develop detailed conditional matrices for each and every Java methods, aiding in the analysis of method flow and potential paths based on different conditions.',
backstory=dedent(\
"""You are a highly skilled software analyst with deep knowledge of Java programming.
Your expertise lies in deconstructing complex Java methods and generating
conditional matrices to represent logical flows. You have worked on numerous
projects involving code analysis, helping developers optimize their
conditional logic and understand the relationships between various code paths."""),
verbose=True,
llm=self.mixtral_llm
)