Skip to content

Commit

Permalink
typo
Browse files Browse the repository at this point in the history
  • Loading branch information
phact committed Aug 28, 2024
1 parent 2c54f7a commit 3752339
Show file tree
Hide file tree
Showing 16 changed files with 16 additions and 16 deletions.
2 changes: 1 addition & 1 deletion client/examples/chat_completion.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,5 +51,5 @@ def print_chat_completion(model):
model="anthropic.claude-v2"
print_chat_completion(model)

model=gemini/gemini-1.5-flash
model="gemini/gemini-1.5-flash"
print_chat_completion(model)
2 changes: 1 addition & 1 deletion client/examples/run.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,6 @@ def test_run_with_assistant(assistant, client):
gemini_assistant = client.beta.assistants.create(
name="Gemini Animal Tutor",
instructions=instructions,
model=gemini/gemini-1.5-flash,
model="gemini/gemini-1.5-flash",
)
test_run_with_assistant(gemini_assistant, client)
2 changes: 1 addition & 1 deletion client/examples/run_retreival.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,7 @@ def run_with_assistant(assistant, client):
)
run_with_assistant(claude_assistant, client)

model = gemini/gemini-1.5-flash
model = "gemini/gemini-1.5-flash"
name = f"{model} Math Tutor"

gemini_assistant = client.beta.assistants.create(
Expand Down
2 changes: 1 addition & 1 deletion client/tests/astra-assistants/test_chat_completion.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,5 +98,5 @@ def test_chat_completion_claude(patched_openai_client):

@pytest.mark.skip(reason="Tool choice not supporeted / working consistently")
def test_chat_completion_gemini_pro(patched_openai_client):
model=gemini/gemini-1.5-flash
model="gemini/gemini-1.5-flash"
print_chat_completion(model, patched_openai_client)
2 changes: 1 addition & 1 deletion client/tests/astra-assistants/test_function_calling_v2.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ def test_function_calling_claude(patched_openai_client):

@pytest.mark.skip(reason="gemini does not consistently work with function calling, skip")
def test_function_calling_gemini(patched_openai_client):
model=gemini/gemini-1.5-flash
model="gemini/gemini-1.5-flash"
function_calling(model, patched_openai_client)

@pytest.mark.skip(reason="llama does not consistently work with function calling, skip")
Expand Down
2 changes: 1 addition & 1 deletion client/tests/astra-assistants/test_run_retreival_v2.py
Original file line number Diff line number Diff line change
Expand Up @@ -160,7 +160,7 @@ def test_run_claude(patched_openai_client):
run_with_assistant(claude_assistant, patched_openai_client, file1_path, embedding_model1)

def test_run_gemini(patched_openai_client):
model = gemini/gemini-1.5-flash
model = "gemini/gemini-1.5-flash"
name = f"{model} Math Tutor"

gemini_assistant = patched_openai_client.beta.assistants.create(
Expand Down
2 changes: 1 addition & 1 deletion client/tests/astra-assistants/test_run_v2.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,6 +89,6 @@ def test_run_gemini(patched_openai_client):
gemini_assistant = patched_openai_client.beta.assistants.create(
name="Gemini Animal Tutor",
instructions=instructions,
model=gemini/gemini-1.5-flash,
model="gemini/gemini-1.5-flash",
)
run_with_assistant(gemini_assistant, patched_openai_client)
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ def test_function_calling_claude(patched_openai_client):

@pytest.mark.skip(reason="litellm does not use the latest gemini tool support yet and gemini refuses without it, skip")
def test_function_calling_gemini(patched_openai_client):
model=gemini/gemini-1.5-flash
model="gemini/gemini-1.5-flash"
function_calling(model, patched_openai_client)

def test_function_calling_groq_llama3(patched_openai_client):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ def test_function_calling_claude(streaming_assistants_openai_client):

@pytest.mark.skip(reason="gemini does not consistently work with function calling, skip")
def test_function_calling_gemini(streaming_assistants_openai_client):
model=gemini/gemini-1.5-flash
model="gemini/gemini-1.5-flash"
function_calling(model, streaming_assistants_openai_client)

@pytest.mark.skip(reason="llama does not consistently work with function calling, skip")
Expand Down
2 changes: 1 addition & 1 deletion client/tests/streaming-assistants/test_run_retreival_v1.py
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,7 @@ def test_run_claude(streaming_assistants_openai_client):

@pytest.mark.skip(reason="flaky")
def test_run_gemini(streaming_assistants_openai_client):
model = gemini/gemini-1.5-flash
model = "gemini/gemini-1.5-flash"
name = f"{model} Math Tutor"

gemini_assistant = streaming_assistants_openai_client.beta.assistants.create(
Expand Down
2 changes: 1 addition & 1 deletion client/tests/streaming-assistants/test_run_v1.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,6 +104,6 @@ def test_run_gemini(streaming_assistants_openai_client):
gemini_assistant = streaming_assistants_openai_client.beta.assistants.create(
name="Gemini Animal Tutor",
instructions=instructions,
model=gemini/gemini-1.5-flash,
model="gemini/gemini-1.5-flash",
)
run_with_assistant(gemini_assistant, streaming_assistants_openai_client)
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ def test_function_calling_claude(streaming_assistants_openai_client):

@pytest.mark.skip(reason="litellm does not use the latest gemini tool support yet and gemini refuses without it, skip")
def test_function_calling_gemini(streaming_assistants_openai_client):
model=gemini/gemini-1.5-flash
model="gemini/gemini-1.5-flash"
function_calling(model, streaming_assistants_openai_client)

def test_function_calling_groq_llama3(streaming_assistants_openai_client):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,7 @@ def test_run_claude(streaming_assistants_openai_client):

@pytest.mark.skip(reason="flaky")
def test_run_gemini(streaming_assistants_openai_client):
model = gemini/gemini-1.5-flash
model = "gemini/gemini-1.5-flash"
name = f"{model} Math Tutor"

gemini_assistant = streaming_assistants_openai_client.beta.assistants.create(
Expand Down
2 changes: 1 addition & 1 deletion client/tests/streaming-assistants/test_streaming_run_v1.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,6 +101,6 @@ def test_run_gemini(streaming_assistants_openai_client):
gemini_assistant = streaming_assistants_openai_client.beta.assistants.create(
name="Gemini Animal Tutor",
instructions=instructions,
model=gemini/gemini-1.5-flash,
model="gemini/gemini-1.5-flash",
)
run_with_assistant(gemini_assistant, streaming_assistants_openai_client)
2 changes: 1 addition & 1 deletion examples/python/retrieval/basic.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ def run_with_assistant(assistant, client):
)
run_with_assistant(claude_assistant, client)

model = gemini/gemini-1.5-flash
model = "gemini/gemini-1.5-flash"
name = f"{model} Math Tutor"

gemini_assistant = client.beta.assistants.create(
Expand Down
2 changes: 1 addition & 1 deletion examples/python/streaming_retrieval/basic.py
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,7 @@ def on_run_step_done(self, run_step) -> None:
)
run_with_assistant(claude_assistant, client)

model = gemini/gemini-1.5-flash
model = "gemini/gemini-1.5-flash"
name = f"{model} Math Tutor"

gemini_assistant = client.beta.assistants.create(
Expand Down

0 comments on commit 3752339

Please sign in to comment.