I have used the following script to initialize the llm using AWS Bedrock :
from llama_index.core.settings import Settings
from llama_index.llms.bedrock import Bedrock
from llama_index.embeddings.bedrock import BedrockEmbedding
from crewai import Agent, Crew, Process, Task , LLM
region_name=“us-east-1”
model=“bedrock/mistral.mixtral-8x7b-instruct-v0:1”
llm = LLM(Bedrock(model=model, region_name=region_name,temperature=0,max_tokens=4000,context_size=32000,timeout=240))
But, I get the below error :
raise ValueError(f"Provider {provider_name} for model {model} is not supported")
ValueError: Provider bedrock/mistral for model bedrock/mistral.mixtral-8x7b-instruct-v0:1 is not supported
Can someone pls help me in debugging this?
I even tried the following :
#from .crews.poem_crew.poem_crew import PoemCrew
from llama_index.core.settings import Settings
from llama_index.llms.bedrock import Bedrock
from llama_index.embeddings.bedrock import BedrockEmbedding
from crewai import Agent, Crew, Process, Task , LLM
region_name=“us-east-1”
model=“bedrock/mistral.mixtral-8x7b-instruct-v0:1”
#embed_model_name = “amazon.titan-embed-text-v2”
llm = LLM(model=model, region_name=region_name,temperature=0,max_tokens=4000,context_size=32000,timeout=240 , verbose = True)
But, I get the below error :
ERROR: LiteLLM call failed: litellm.APIConnectionError: BedrockException - {“Message”:“User: arn:aws:sts::xxxxxxxxxxxxxxxx is not authorized to perform: bedrock:InvokeModel on resource: arn:aws:bedrock:us-west-2::foundation-model/mistral.mixtral-8x7b-instruct-v0:1 with an explicit deny in a service control policy”}