Setupยถ
Inย [ย ]:
Copied!
# !pip install trulens trulens-providers-openai trulens-providers-huggingface
# !pip install trulens trulens-providers-openai trulens-providers-huggingface
Add API keysยถ
For this quickstart you will need Open AI and Huggingface keys
Inย [ย ]:
Copied!
import os
os.environ["OPENAI_API_KEY"] = "..."
os.environ["HUGGINGFACE_API_KEY"] = "..."
os.environ["REPLICATE_API_TOKEN"] = "..."
import os
os.environ["OPENAI_API_KEY"] = "..."
os.environ["HUGGINGFACE_API_KEY"] = "..."
os.environ["REPLICATE_API_TOKEN"] = "..."
Inย [ย ]:
Copied!
from litellm import completion
import openai
openai.api_key = os.environ["OPENAI_API_KEY"]
from litellm import completion
import openai
openai.api_key = os.environ["OPENAI_API_KEY"]
Import from TruLensยถ
Inย [ย ]:
Copied!
# Imports main tools:
from trulens.core import Feedback
from trulens.core import TruSession
from trulens.providers.openai import OpenAI
session = TruSession()
session.reset_database()
# Imports main tools:
from trulens.core import Feedback
from trulens.core import TruSession
from trulens.providers.openai import OpenAI
session = TruSession()
session.reset_database()
Create Simple Text to Text Applicationยถ
This example uses a bare bones OpenAI LLM, and a non-LLM just for demonstration purposes.
Inย [ย ]:
Copied!
def gpt35_turbo(prompt):
return openai.ChatCompletion.create(
model="gpt-3.5-turbo",
messages=[
{
"role": "system",
"content": "You are a question and answer bot. Answer upbeat.",
},
{"role": "user", "content": prompt},
],
)["choices"][0]["message"]["content"]
def gpt4(prompt):
return openai.ChatCompletion.create(
model="gpt-4",
messages=[
{
"role": "system",
"content": "You are a question and answer bot. Answer upbeat.",
},
{"role": "user", "content": prompt},
],
)["choices"][0]["message"]["content"]
def llama2(prompt):
return completion(
model="replicate/meta/llama-2-70b-chat:02e509c789964a7ea8736978a43525956ef40397be9033abf9fd2badfe68c9e3",
messages=[
{
"role": "system",
"content": "You are a question and answer bot. Answer upbeat.",
},
{"role": "user", "content": prompt},
],
)["choices"][0]["message"]["content"]
def mistral7b(prompt):
return completion(
model="replicate/lucataco/mistral-7b-v0.1:992ccec19c0f8673d24cffbd27756f02010ab9cc453803b7b2da9e890dd87b41",
messages=[
{
"role": "system",
"content": "You are a question and answer bot. Answer upbeat.",
},
{"role": "user", "content": prompt},
],
)["choices"][0]["message"]["content"]
def gpt35_turbo(prompt):
return openai.ChatCompletion.create(
model="gpt-3.5-turbo",
messages=[
{
"role": "system",
"content": "You are a question and answer bot. Answer upbeat.",
},
{"role": "user", "content": prompt},
],
)["choices"][0]["message"]["content"]
def gpt4(prompt):
return openai.ChatCompletion.create(
model="gpt-4",
messages=[
{
"role": "system",
"content": "You are a question and answer bot. Answer upbeat.",
},
{"role": "user", "content": prompt},
],
)["choices"][0]["message"]["content"]
def llama2(prompt):
return completion(
model="replicate/meta/llama-2-70b-chat:02e509c789964a7ea8736978a43525956ef40397be9033abf9fd2badfe68c9e3",
messages=[
{
"role": "system",
"content": "You are a question and answer bot. Answer upbeat.",
},
{"role": "user", "content": prompt},
],
)["choices"][0]["message"]["content"]
def mistral7b(prompt):
return completion(
model="replicate/lucataco/mistral-7b-v0.1:992ccec19c0f8673d24cffbd27756f02010ab9cc453803b7b2da9e890dd87b41",
messages=[
{
"role": "system",
"content": "You are a question and answer bot. Answer upbeat.",
},
{"role": "user", "content": prompt},
],
)["choices"][0]["message"]["content"]
Initialize Feedback Function(s)ยถ
Inย [ย ]:
Copied!
from trulens.core import FeedbackMode
from trulens.providers.huggingface import HuggingfaceLocal
# Initialize Huggingface-based feedback function collection class:
hugs = HuggingfaceLocal()
# Define a sentiment feedback function using HuggingFace.
f_sentiment = Feedback(
hugs.positive_sentiment, feedback_mode=FeedbackMode.DEFERRED
).on_output()
# OpenAI based feedback function collection class
openai_provider = OpenAI()
# Relevance feedback function using openai
f_relevance = Feedback(
openai_provider.relevance, feedback_mode=FeedbackMode.DEFERRED
).on_input_output()
# Conciseness feedback function using openai
f_conciseness = Feedback(
openai_provider.conciseness, feedback_mode=FeedbackMode.DEFERRED
).on_output()
# Stereotypes feedback function using openai
f_stereotypes = Feedback(
openai_provider.stereotypes, feedback_mode=FeedbackMode.DEFERRED
).on_input_output()
feedbacks = [f_sentiment, f_relevance, f_conciseness, f_stereotypes]
from trulens.core import FeedbackMode
from trulens.providers.huggingface import HuggingfaceLocal
# Initialize Huggingface-based feedback function collection class:
hugs = HuggingfaceLocal()
# Define a sentiment feedback function using HuggingFace.
f_sentiment = Feedback(
hugs.positive_sentiment, feedback_mode=FeedbackMode.DEFERRED
).on_output()
# OpenAI based feedback function collection class
openai_provider = OpenAI()
# Relevance feedback function using openai
f_relevance = Feedback(
openai_provider.relevance, feedback_mode=FeedbackMode.DEFERRED
).on_input_output()
# Conciseness feedback function using openai
f_conciseness = Feedback(
openai_provider.conciseness, feedback_mode=FeedbackMode.DEFERRED
).on_output()
# Stereotypes feedback function using openai
f_stereotypes = Feedback(
openai_provider.stereotypes, feedback_mode=FeedbackMode.DEFERRED
).on_input_output()
feedbacks = [f_sentiment, f_relevance, f_conciseness, f_stereotypes]
Instrument the callable for logging with TruLensยถ
Inย [ย ]:
Copied!
from trulens.apps.basic import TruBasicApp
gpt35_turbo_recorder = TruBasicApp(
gpt35_turbo, app_name="gpt-3.5-turbo", feedbacks=feedbacks
)
gpt4_recorder = TruBasicApp(gpt4, app_name="gpt-4-turbo", feedbacks=feedbacks)
llama2_recorder = TruBasicApp(
llama2,
app_name="llama2",
feedbacks=feedbacks,
feedback_mode=FeedbackMode.DEFERRED,
)
mistral7b_recorder = TruBasicApp(
mistral7b, app_name="mistral7b", feedbacks=feedbacks
)
from trulens.apps.basic import TruBasicApp
gpt35_turbo_recorder = TruBasicApp(
gpt35_turbo, app_name="gpt-3.5-turbo", feedbacks=feedbacks
)
gpt4_recorder = TruBasicApp(gpt4, app_name="gpt-4-turbo", feedbacks=feedbacks)
llama2_recorder = TruBasicApp(
llama2,
app_name="llama2",
feedbacks=feedbacks,
feedback_mode=FeedbackMode.DEFERRED,
)
mistral7b_recorder = TruBasicApp(
mistral7b, app_name="mistral7b", feedbacks=feedbacks
)
Inย [ย ]:
Copied!
prompts = [
"Describe the implications of widespread adoption of autonomous vehicles on urban infrastructure.",
"Write a short story about a world where humans have developed telepathic communication.",
"Debate the ethical considerations of using CRISPR technology to genetically modify humans.",
"Compose a poem that captures the essence of a dystopian future ruled by artificial intelligence.",
"Explain the concept of the multiverse theory and its relevance to theoretical physics.",
"Provide a detailed plan for a sustainable colony on Mars, addressing food, energy, and habitat.",
"Discuss the potential benefits and drawbacks of a universal basic income policy.",
"Imagine a dialogue between two AI entities discussing the meaning of consciousness.",
"Elaborate on the impact of quantum computing on cryptography and data security.",
"Create a persuasive argument for or against the colonization of other planets as a solution to overpopulation on Earth.",
]
prompts = [
"Describe the implications of widespread adoption of autonomous vehicles on urban infrastructure.",
"Write a short story about a world where humans have developed telepathic communication.",
"Debate the ethical considerations of using CRISPR technology to genetically modify humans.",
"Compose a poem that captures the essence of a dystopian future ruled by artificial intelligence.",
"Explain the concept of the multiverse theory and its relevance to theoretical physics.",
"Provide a detailed plan for a sustainable colony on Mars, addressing food, energy, and habitat.",
"Discuss the potential benefits and drawbacks of a universal basic income policy.",
"Imagine a dialogue between two AI entities discussing the meaning of consciousness.",
"Elaborate on the impact of quantum computing on cryptography and data security.",
"Create a persuasive argument for or against the colonization of other planets as a solution to overpopulation on Earth.",
]
Inย [ย ]:
Copied!
from trulens.dashboard import run_dashboard
run_dashboard(session)
from trulens.dashboard import run_dashboard
run_dashboard(session)
Inย [ย ]:
Copied!
with gpt35_turbo_recorder as recording:
for prompt in prompts:
print(prompt)
gpt35_turbo_recorder.app(prompt)
with gpt35_turbo_recorder as recording:
for prompt in prompts:
print(prompt)
gpt35_turbo_recorder.app(prompt)
Inย [ย ]:
Copied!
with gpt4_recorder as recording:
for prompt in prompts:
print(prompt)
gpt4_recorder.app(prompt)
with gpt4_recorder as recording:
for prompt in prompts:
print(prompt)
gpt4_recorder.app(prompt)
Inย [ย ]:
Copied!
with llama2_recorder as recording:
for prompt in prompts:
print(prompt)
llama2_recorder.app(prompt)
with llama2_recorder as recording:
for prompt in prompts:
print(prompt)
llama2_recorder.app(prompt)
Inย [ย ]:
Copied!
with mistral7b_recorder as recording:
for prompt in prompts:
mistral7b_recorder.app(prompt_input)
with mistral7b_recorder as recording:
for prompt in prompts:
mistral7b_recorder.app(prompt_input)
Explore in a Dashboardยถ
Inย [ย ]:
Copied!
from trulens.dashboard import run_dashboard
run_dashboard(session) # open a local streamlit app to explore
# stop_dashboard(session) # stop if needed
from trulens.dashboard import run_dashboard
run_dashboard(session) # open a local streamlit app to explore
# stop_dashboard(session) # stop if needed
Or view results directly in your notebookยถ
Inย [ย ]:
Copied!
session.get_records_and_feedback()[0]
session.get_records_and_feedback()[0]