Jul 18, 2024
requirements.txt for easy library managementtemperaturefrom langchain.llms import OpenAI
import os
openai_key = os.environ['OPENAI_API_KEY']
llm = OpenAI(api_key=openai_key, temperature=0.6)
from langchain.prompts import PromptTemplate
prompt = PromptTemplate(
input_variables=['country'],
template='Tell me the capital of {country}'
)
formatted_prompt = prompt.format(country='India')
print(llm.predict(formatted_prompt))
LLMChain to seamlessly combine themfrom langchain.chains import LLMChain
chain = LLMChain(llm=llm, prompt=prompt)
response = chain.run('What is the capital of India?')
print(response)
SequentialChain to handle multiple steps or transformationsfrom langchain.chains import SequentialChain
combined_chain = SequentialChain(
chains=[capital_chain, famous_chain],
...
)
chat_openai for conversation modelingfrom langchain.chat_models import ChatOpenAI
from langchain.schema import HumanMessage, SystemMessage, AIMessage
chat_llm = ChatOpenAI(api_key=openai_key)
from langchain.schema import BaseOutputParser
class CustomOutputParser(BaseOutputParser):
def parse(self, text):
return text.split(',')
prompt = PromptTemplate(
input_variables=['text'],
template='Provide comma-separated values for: {text}',
output_parser=CustomOutputParser()
)
# app.py
import streamlit as st
from langchain import OpenAI
from langchain.prompts import PromptTemplate
prompt = PromptTemplate(
input_variables=['question'],
template='Answer the question: {question}'
)
llm = OpenAI(api_key='your-openai-key')
st.title('Q&A Chatbot')
question = st.text_input('Ask a question')
if st.button('Submit'):
response = llm.predict(prompt.format(question=question))
st.write(response)