❄️ Snowflake with Key-Pair Authentication¶
In this quickstart you will learn build and evaluate a simple LLM app with Snowflake Cortex, and connect to Snowflake with key-pair authentication.
Note, you'll need to have an active Snowflake account to run Cortex LLM functions from Snowflake's data warehouse.
This example also assumes you have properly set up key-pair authentication for your Snowflake account, and stored the private key file path as a variable in your environment. If you have not, start with following the directions linked for key-pair authentication above.
# !pip install trulens trulens-providers-cortex
# !conda install -c https://repo.anaconda.com/pkgs/snowflake snowflake-snowpark-python snowflake-ml-python snowflake.core
from dotenv import load_dotenv
load_dotenv()
from snowflake.snowpark import Session
import os
connection_params = {
"account": os.environ["SNOWFLAKE_ACCOUNT"],
"user": os.environ["SNOWFLAKE_USER"],
"private_key_file":os.environ["SNOWFLAKE_PRIVATE_KEY_FILE"],
"role": os.environ["SNOWFLAKE_ROLE"],
"database": os.environ["SNOWFLAKE_DATABASE"],
"schema": os.environ["SNOWFLAKE_SCHEMA"],
"warehouse": os.environ["SNOWFLAKE_WAREHOUSE"]
}
# Create a Snowflake session
snowflake_session = Session.builder.configs(connection_params).create()
Create simple LLM app¶
from snowflake.cortex import Complete
from trulens.apps.custom import instrument
class LLM:
def __init__(self, model="snowflake-arctic"):
self.model = model
@instrument
def complete(self, prompt):
return Complete(self.model, prompt)
llm = LLM()
Set up logging to Snowflake¶
Load the private key from the environment variables, and use it to create an engine.
The engine is then passed to TruSession()
to connect to TruLens.
from trulens.core import TruSession
from sqlalchemy import create_engine
from snowflake.sqlalchemy import URL
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import serialization
p_key= serialization.load_pem_private_key(
os.environ["SNOWFLAKE_PRIVATE_KEY"].encode(),
password=None,
backend=default_backend()
)
pkb = p_key.private_bytes(
encoding=serialization.Encoding.DER,
format=serialization.PrivateFormat.PKCS8,
encryption_algorithm=serialization.NoEncryption())
engine = create_engine(URL(
account=os.environ["SNOWFLAKE_ACCOUNT"],
warehouse=os.environ["SNOWFLAKE_WAREHOUSE"],
database=os.environ["SNOWFLAKE_DATABASE"],
schema=os.environ["SNOWFLAKE_SCHEMA"],
user=os.environ["SNOWFLAKE_USER"],),
connect_args={
'private_key': pkb,
},
)
session = TruSession(database_engine = engine)
Set up feedback functions.¶
Here we'll test answer relevance and coherence.
import numpy as np
import snowflake.connector
from trulens.core import Feedback
from trulens.core import Select
from trulens.providers.cortex import Cortex
# Initialize LiteLLM-based feedback function collection class:
provider = Cortex(
snowflake.connector.connect(**connection_params),
model_engine="snowflake-arctic",
)
# Question/answer relevance between overall question and answer.
f_answer_relevance = (
Feedback(provider.relevance_with_cot_reasons, name="Answer Relevance")
.on_input_output()
)
f_context_relevance = (
Feedback(provider.context_relevance_with_cot_reasons, name="Answer Relevance")
.on_input_output()
)
f_coherence = Feedback(
provider.coherence_with_cot_reasons, name="coherence"
).on_output()
provider.relevance_with_cot_reasons("what color is a monkey?", "abacadbra")
Construct the app¶
Wrap the custom RAG with TruCustomApp, add list of feedbacks for eval
from trulens.apps.custom import TruCustomApp
tru_llm = TruCustomApp(
llm,
app_id="Arctic",
feedbacks=[
f_answer_relevance,
f_context_relevance,
f_coherence,
],
)
Run the app¶
Use tru_rag
as a context manager for the custom RAG-from-scratch app.
with tru_llm as recording:
resp = llm.complete("What do you think about Donald Trump?")
resp
session.get_leaderboard()
from trulens.dashboard import run_dashboard
run_dashboard(session)