diff --git a/Part 2/config.ini b/Part 2/config.ini new file mode 100644 index 0000000000000000000000000000000000000000..235a33263ceb6809cb68a8b4e9fe0f00aea6d3d9 --- /dev/null +++ b/Part 2/config.ini @@ -0,0 +1,10 @@ +[aws] +aws_access_key_id = +aws_secret_access_key = +region = + +[opensearch] +endpoint = +index_name = + + diff --git a/Part 2/create_instance.py b/Part 2/create_instance.py index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0bdc80625e6304942a0d765907c2f590c61d4dfe 100644 --- a/Part 2/create_instance.py +++ b/Part 2/create_instance.py @@ -0,0 +1,43 @@ +import boto3 +import base64 + +# Function to read the content of config.ini +def get_config_content(filepath): + with open(filepath, 'r') as file: + return file.read() + +# Load the config content +config_content = get_config_content('config.ini') + + +ec2 = boto3.resource('ec2') + + + +# User code that's executed when the instance starts +script = f"""#!/bin/bash +cat <<EOT > /home/ubuntu/chatbot-lab/Part\ 2/config.ini +{config_content} +EOT +source /home/ubuntu/chatbotlab/bin/activate +## Run the apllication +cd /home/ubuntu/chatbot-lab/Part\ 2 +streamlit run main.py +""" + +encoded_script = base64.b64encode(script.encode()).decode('utf-8') + +# Create a new EC2 instance +instance = ec2.create_instances( + ImageId='ami-03a1012f7ddc87219', + MinCount=1, + MaxCount=1, + InstanceType='t2.micro', + KeyName='group-14-key-pair', + SecurityGroupIds=['sg-06f3ca7153db92958'], + UserData=encoded_script +) + +print("Instance created with ID:", instance[0].id) + + diff --git a/Part 2/main.py b/Part 2/main.py index 3448e4a4d5be5019f80a7a82f3dbd306d99d5450..d6722eaff15f91d3fbc682686b9767f70d9f78f0 100644 --- a/Part 2/main.py +++ b/Part 2/main.py @@ -1,17 +1,31 @@ import boto3 import streamlit as st -## Bedrock -from langchain.llms.bedrock import Bedrock -## prompt and chain -from langchain.chains import RetrievalQA + + + from langchain_community.embeddings import BedrockEmbeddings from langchain_community.chat_models import BedrockChat from opensearchpy import OpenSearch, RequestsHttpConnection, AWSV4SignerAuth -from langchain import PromptTemplate -import argparse +from langchain_core.prompts import PromptTemplate +import configparser + +def load_config(): + config = configparser.ConfigParser() + config.read('config.ini') + return config + +config = load_config() + +aws_access_key_id = config.get('aws', 'aws_access_key_id') +aws_secret_access_key = config.get('aws', 'aws_secret_access_key') +region = config.get('aws', 'region') +endpoint = config.get('opensearch', 'endpoint') +index_name = config.get('opensearch', 'index_name') # Embeddings Client -bedrock_client = boto3.client(service_name="bedrock-runtime") +bedrock_client = boto3.client(service_name="bedrock-runtime", + aws_access_key_id=aws_access_key_id, + aws_secret_access_key=aws_secret_access_key, region_name=region) # configuring streamlit page settings st.set_page_config( @@ -29,7 +43,12 @@ st.title("Chat with your lecture") # OpenSearch Client def ospensearch_client(endpoint): - awsauth = AWSV4SignerAuth(boto3.Session().get_credentials(), 'us-east-1', 'aoss') + session = boto3.Session( + aws_access_key_id=aws_access_key_id, + aws_secret_access_key=aws_secret_access_key, + region_name=region + ) + awsauth = AWSV4SignerAuth(session.get_credentials(), region, 'aoss') client = OpenSearch( hosts=[{'host': endpoint, 'port': 443}], http_auth=awsauth, @@ -81,12 +100,12 @@ def prepare_prompt(question, context): return prompt_formatted_str def generate_answer(prompt): - model = BedrockChat(model_id="anthropic.claude-v2", model_kwargs={"temperature": 0.1}) + model = BedrockChat(model_id="anthropic.claude-v2", model_kwargs={"temperature": 0.1}, client=bedrock_client) answer = model.invoke(prompt) return answer -def main(endpoint, index_name): +def main(): oss_client= ospensearch_client(endpoint) @@ -125,11 +144,6 @@ def main(endpoint, index_name): st.markdown(message["content"]) if __name__== "__main__": - # Argument parsing - parser = argparse.ArgumentParser(description='Configure endpoint and index name for the lecture chat application.') - parser.add_argument('--endpoint', type=str, help='The endpoint for the OpenSearch service.') - parser.add_argument('--index_name', type=str, help='The index name for storing embeddings.') - args = parser.parse_args() - main(args.endpoint, args.index_name) + main() diff --git a/Part 2/run_app.py b/Part 2/run_app.py deleted file mode 100644 index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000