In [ ]:
Copied!
# !pip install trulens trulens-apps-langchain trulens-providers-litellm litellm==1.11.1 langchain==0.0.351
# !pip install trulens trulens-apps-langchain trulens-providers-litellm litellm==1.11.1 langchain==0.0.351
Setup¶
Import from LangChain and TruLens¶
In [ ]:
Copied!
# Imports main tools:
# Imports from langchain to build app. You may need to install langchain first
# with the following:
# !pip install langchain>=0.0.170
from langchain.chains import LLMChain
from langchain.prompts import PromptTemplate
from langchain.prompts.chat import ChatPromptTemplate
from langchain.prompts.chat import HumanMessagePromptTemplate
from trulens.core import Feedback
from trulens.core import TruSession
from trulens.apps.langchain import TruChain
session = TruSession()
session.reset_database()
# Imports main tools:
# Imports from langchain to build app. You may need to install langchain first
# with the following:
# !pip install langchain>=0.0.170
from langchain.chains import LLMChain
from langchain.prompts import PromptTemplate
from langchain.prompts.chat import ChatPromptTemplate
from langchain.prompts.chat import HumanMessagePromptTemplate
from trulens.core import Feedback
from trulens.core import TruSession
from trulens.apps.langchain import TruChain
session = TruSession()
session.reset_database()
Let's first just test out a direct call to Ollama¶
In [ ]:
Copied!
from langchain.llms import Ollama
ollama = Ollama(base_url="http://localhost:11434", model="llama2")
print(ollama("why is the sky blue"))
from langchain.llms import Ollama
ollama = Ollama(base_url="http://localhost:11434", model="llama2")
print(ollama("why is the sky blue"))
Create Simple LLM Application¶
This example uses a LangChain framework and Ollama.
In [ ]:
Copied!
full_prompt = HumanMessagePromptTemplate(
prompt=PromptTemplate(
template="Provide a helpful response with relevant background information for the following: {prompt}",
input_variables=["prompt"],
)
)
chat_prompt_template = ChatPromptTemplate.from_messages([full_prompt])
chain = LLMChain(llm=ollama, prompt=chat_prompt_template, verbose=True)
full_prompt = HumanMessagePromptTemplate(
prompt=PromptTemplate(
template="Provide a helpful response with relevant background information for the following: {prompt}",
input_variables=["prompt"],
)
)
chat_prompt_template = ChatPromptTemplate.from_messages([full_prompt])
chain = LLMChain(llm=ollama, prompt=chat_prompt_template, verbose=True)
Send your first request¶
In [ ]:
Copied!
prompt_input = "What is a good name for a store that sells colorful socks?"
prompt_input = "What is a good name for a store that sells colorful socks?"
In [ ]:
Copied!
llm_response = chain(prompt_input)
display(llm_response)
llm_response = chain(prompt_input)
display(llm_response)
Initialize Feedback Function(s)¶
In [ ]:
Copied!
# Initialize LiteLLM-based feedback function collection class:
import litellm
from trulens.providers.litellm import LiteLLM
litellm.set_verbose = False
ollama_provider = LiteLLM(
model_engine="ollama/llama2", api_base="http://localhost:11434"
)
# Define a relevance function using LiteLLM
relevance = Feedback(
ollama_provider.relevance_with_cot_reasons
).on_input_output()
# By default this will check relevance on the main app input and main app
# output.
# Initialize LiteLLM-based feedback function collection class:
import litellm
from trulens.providers.litellm import LiteLLM
litellm.set_verbose = False
ollama_provider = LiteLLM(
model_engine="ollama/llama2", api_base="http://localhost:11434"
)
# Define a relevance function using LiteLLM
relevance = Feedback(
ollama_provider.relevance_with_cot_reasons
).on_input_output()
# By default this will check relevance on the main app input and main app
# output.
In [ ]:
Copied!
ollama_provider.relevance_with_cot_reasons(
"What is a good name for a store that sells colorful socks?",
"Great question! Naming a store that sells colorful socks can be a fun and creative process. Here are some suggestions to consider: SoleMates: This name plays on the idea of socks being your soul mate or partner in crime for the day. It is catchy and easy to remember, and it conveys the idea that the store offers a wide variety of sock styles and colors.",
)
ollama_provider.relevance_with_cot_reasons(
"What is a good name for a store that sells colorful socks?",
"Great question! Naming a store that sells colorful socks can be a fun and creative process. Here are some suggestions to consider: SoleMates: This name plays on the idea of socks being your soul mate or partner in crime for the day. It is catchy and easy to remember, and it conveys the idea that the store offers a wide variety of sock styles and colors.",
)
Instrument chain for logging with TruLens¶
In [ ]:
Copied!
tru_recorder = TruChain(
chain, app_name="Chain1_ChatApplication", feedbacks=[relevance]
)
tru_recorder = TruChain(
chain, app_name="Chain1_ChatApplication", feedbacks=[relevance]
)
In [ ]:
Copied!
with tru_recorder as recording:
llm_response = chain(prompt_input)
display(llm_response)
with tru_recorder as recording:
llm_response = chain(prompt_input)
display(llm_response)
In [ ]:
Copied!
session.get_records_and_feedback()[0]
session.get_records_and_feedback()[0]
Explore in a Dashboard¶
In [ ]:
Copied!
from trulens.dashboard import run_dashboard
run_dashboard(session) # open a local streamlit app to explore
# stop_dashboard(session) # stop if needed
from trulens.dashboard import run_dashboard
run_dashboard(session) # open a local streamlit app to explore
# stop_dashboard(session) # stop if needed
Or view results directly in your notebook¶
In [ ]:
Copied!
session.get_records_and_feedback()[0]
session.get_records_and_feedback()[0]