import os from anthropic import AnthropicBedrock from langchain_aws.chat_models import ChatBedrockConverse from langchain_aws.llms.bedrock import BedrockLLM # Initialize the streaming Bedrock chat model bedrock_llm = ChatBedrockConverse( aws_access_key_id = os.environ.get("AWS_ACCESS_KEY_ID"), aws_secret_access_key = os.environ.get("AWS_SECRET_ACCESS_KEY"), region_name = os.environ.get("AWS_DEFAULT_REGION", "eu-west-1"), provider = os.environ.get("PROVIDER", "mistral"), model_id = os.environ.get("MODEL_ID", "mistral.mistral-large-2402-v1:0"), # or your preferred Bedrock model temperature = eval(os.environ.get("TEMPERATURE", "0.7")) )