Skip to content

Commit

Permalink
docs: add mistral instrumentation to mistral example
Browse files Browse the repository at this point in the history
  • Loading branch information
mikeldking committed Mar 22, 2024
1 parent f282778 commit eaae4a8
Showing 1 changed file with 52 additions and 12 deletions.
64 changes: 52 additions & 12 deletions tutorials/mistral/evaluate_rag--mistral.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@
"metadata": {},
"outputs": [],
"source": [
"!pip install -qq \"arize-phoenix\" \"arize-phoenix-evals>=0.5.0\" \"llama-index==0.10.19\" \"llama-index-llms-mistralai\" \"llama-index-embeddings-mistralai\" \"openinference-instrumentation-mistralai\" \"openinference-instrumentation-llama-index>=1.0.0\" \"llama-index-callbacks-arize-phoenix>=0.1.2\" gcsfs nest_asyncio"
"!pip install -qq \"arize-phoenix\" \"arize-phoenix-evals>=0.5.0\" \"llama-index==0.10.19\" \"llama-index-llms-mistralai\" \"llama-index-embeddings-mistralai\" \"openinference-instrumentation-mistralai>=0.0.2\" \"openinference-instrumentation-llama-index>=1.0.0\" \"llama-index-callbacks-arize-phoenix>=0.1.2\" gcsfs nest_asyncio"
]
},
{
Expand Down Expand Up @@ -130,6 +130,48 @@
"nest_asyncio.apply()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import phoenix as px\n",
"from llama_index.core import set_global_handler\n",
"\n",
"session = px.launch_app()\n",
"set_global_handler(\"arize_phoenix\")"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Let's use the OpenInference Mistral Instrumentation as well."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"from openinference.instrumentation.mistralai import MistralAIInstrumentor\n",
"from opentelemetry import trace as trace_api\n",
"from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter\n",
"from opentelemetry.sdk import trace as trace_sdk\n",
"from opentelemetry.sdk.trace.export import ConsoleSpanExporter, SimpleSpanProcessor\n",
"\n",
"tracer_provider = trace_sdk.TracerProvider()\n",
"tracer_provider.add_span_processor(SimpleSpanProcessor(OTLPSpanExporter(\"http://127.0.0.1:6006/v1/traces\")))\n",
"# Optionally, you can also print the spans to the console.\n",
"tracer_provider.add_span_processor(SimpleSpanProcessor(ConsoleSpanExporter()))\n",
"trace_api.set_tracer_provider(tracer_provider)\n",
"\n",
"mistral_instrumentor = MistralAIInstrumentor()\n",
"mistral_instrumentor.instrument()"
]
},
{
"cell_type": "code",
"execution_count": null,
Expand All @@ -138,7 +180,7 @@
"source": [
"import pandas as pd\n",
"import phoenix as px\n",
"from llama_index.core import Settings, SimpleDirectoryReader, VectorStoreIndex, set_global_handler\n",
"from llama_index.core import Settings, SimpleDirectoryReader, VectorStoreIndex\n",
"from llama_index.core.node_parser import SimpleNodeParser\n",
"from llama_index.embeddings.mistralai import MistralAIEmbedding\n",
"from llama_index.llms.mistralai import MistralAI\n",
Expand Down Expand Up @@ -440,7 +482,6 @@
"source": [
"import json\n",
"\n",
"from openinference.instrumentation.mistralai import MistralAIInstrumentor\n",
"from phoenix.evals import MistralAIModel, llm_generate\n",
"\n",
"\n",
Expand All @@ -452,16 +493,15 @@
"\n",
"\n",
"with using_project(TESTSET_PROJECT): # Collect traces under the project \"testset\"\n",
" mistral_instrumentor = MistralAIInstrumentor()\n",
" mistral_instrumentor.instrument() # Instrument MistralAI to see the synthetic testset generation\n",
" questions_df = llm_generate(\n",
" dataframe=document_chunks_df,\n",
" template=generate_questions_template,\n",
" model=MistralAIModel(model=\"mistral-large-latest\", response_format={\"type\": \"json_object\"}),\n",
" model=MistralAIModel(model=\"mistral-large-latest\"),\n",
" output_parser=output_parser,\n",
" concurrency=20,\n",
" )\n",
" mistral_instrumentor.uninstrument() # No longer needed"
"\n",
"mistral_instrumentor.uninstrument() # No longer needed"
]
},
{
Expand Down Expand Up @@ -1057,11 +1097,11 @@
" )\n",
")\n",
"# relaunch phoenix with a primary and corpus dataset to view embeddings\n",
"px.close_app()\n",
"session = px.launch_app(\n",
" primary=px.Dataset(query_df, query_schema, \"query\"),\n",
" corpus=px.Dataset(corpus_df.reset_index(drop=True), corpus_schema, \"corpus\"),\n",
")"
"# px.close_app()\n",
"# session = px.launch_app(\n",
"# primary=px.Dataset(query_df, query_schema, \"query\"),\n",
"# corpus=px.Dataset(corpus_df.reset_index(drop=True), corpus_schema, \"corpus\"),\n",
"# )"
]
},
{
Expand Down

0 comments on commit eaae4a8

Please sign in to comment.