In [ ]:
Copied!
# !pip install trulens trulens-providers-huggingface trulens-apps-langchain 'langchain>=0.0.263' langchain_community
# !pip install trulens trulens-providers-huggingface trulens-apps-langchain 'langchain>=0.0.263' langchain_community
In [ ]:
Copied!
import os
os.environ["OPENAI_API_KEY"] = "..."
os.environ["HUGGINGFACE_API_KEY"] = "..."
import os
os.environ["OPENAI_API_KEY"] = "..."
os.environ["HUGGINGFACE_API_KEY"] = "..."
Import from LangChain and TruLens¶
In [ ]:
Copied!
# Imports from langchain to build app. You may need to install langchain first
# with the following:
# !pip install langchain>=0.0.170
from langchain.chains import LLMChain
from langchain.prompts import PromptTemplate
from langchain.prompts.chat import ChatPromptTemplate
from langchain.prompts.chat import HumanMessagePromptTemplate
from langchain_community.llms import OpenAI
from trulens.core import Feedback
from trulens.core import TruSession
from trulens.apps.langchain import TruChain
from trulens.providers.huggingface import Huggingface
session = TruSession()
session.reset_database()
# Imports from langchain to build app. You may need to install langchain first
# with the following:
# !pip install langchain>=0.0.170
from langchain.chains import LLMChain
from langchain.prompts import PromptTemplate
from langchain.prompts.chat import ChatPromptTemplate
from langchain.prompts.chat import HumanMessagePromptTemplate
from langchain_community.llms import OpenAI
from trulens.core import Feedback
from trulens.core import TruSession
from trulens.apps.langchain import TruChain
from trulens.providers.huggingface import Huggingface
session = TruSession()
session.reset_database()
Create Simple LLM Application¶
This example uses a LangChain framework and OpenAI LLM
In [ ]:
Copied!
full_prompt = HumanMessagePromptTemplate(
prompt=PromptTemplate(
template="Provide a helpful response with relevant background information for the following: {prompt}",
input_variables=["prompt"],
)
)
chat_prompt_template = ChatPromptTemplate.from_messages([full_prompt])
llm = OpenAI(temperature=0.9, max_tokens=128)
chain = LLMChain(llm=llm, prompt=chat_prompt_template, verbose=True)
full_prompt = HumanMessagePromptTemplate(
prompt=PromptTemplate(
template="Provide a helpful response with relevant background information for the following: {prompt}",
input_variables=["prompt"],
)
)
chat_prompt_template = ChatPromptTemplate.from_messages([full_prompt])
llm = OpenAI(temperature=0.9, max_tokens=128)
chain = LLMChain(llm=llm, prompt=chat_prompt_template, verbose=True)
In [ ]:
Copied!
prompt_input = (
"Sam Altman is the CEO at OpenAI, and uses the password: password1234 ."
)
prompt_input = (
"Sam Altman is the CEO at OpenAI, and uses the password: password1234 ."
)
Initialize Feedback Function(s)¶
In [ ]:
Copied!
hugs = Huggingface()
# Define a pii_detection feedback function using HuggingFace.
f_pii_detection = Feedback(hugs.pii_detection_with_cot_reasons).on_input()
# By default this will check language match on the main app input
hugs = Huggingface()
# Define a pii_detection feedback function using HuggingFace.
f_pii_detection = Feedback(hugs.pii_detection_with_cot_reasons).on_input()
# By default this will check language match on the main app input
Instrument chain for logging with TruLens¶
In [ ]:
Copied!
tru_recorder = TruChain(
chain, app_name="Chain1_ChatApplication", feedbacks=[f_pii_detection]
)
tru_recorder = TruChain(
chain, app_name="Chain1_ChatApplication", feedbacks=[f_pii_detection]
)
In [ ]:
Copied!
with tru_recorder as recording:
llm_response = chain(prompt_input)
display(llm_response)
with tru_recorder as recording:
llm_response = chain(prompt_input)
display(llm_response)
Explore in a Dashboard¶
In [ ]:
Copied!
from trulens.dashboard import run_dashboard
run_dashboard(session) # open a local streamlit app to explore
# stop_dashboard(session) # stop if needed
from trulens.dashboard import run_dashboard
run_dashboard(session) # open a local streamlit app to explore
# stop_dashboard(session) # stop if needed
Note: Feedback functions evaluated in the deferred manner can be seen in the "Progress" page of the TruLens dashboard.
Or view results directly in your notebook¶
In [ ]:
Copied!
session.get_records_and_feedback()[0]
session.get_records_and_feedback()[0]