Update app.py
Browse files
app.py
CHANGED
|
@@ -2,6 +2,7 @@ import os, io, sys, subprocess, shutil
|
|
| 2 |
from dotenv import load_dotenv
|
| 3 |
from openai import OpenAI
|
| 4 |
from google import genai
|
|
|
|
| 5 |
import gradio as gr
|
| 6 |
from datetime import datetime
|
| 7 |
from placeholder_python_code import pi_1, pi_2
|
|
@@ -67,7 +68,7 @@ def write_output(cpp: str):
|
|
| 67 |
with open(f"/tmp/optimized-{current_time}.cpp", "w") as f:
|
| 68 |
f.write(code)
|
| 69 |
|
| 70 |
-
def convert_and_optimize_code_with_openai(python):
|
| 71 |
stream = openai_client.chat.completions.create(
|
| 72 |
model=OPENAI_MODEL,
|
| 73 |
messages=messages_for_python(python),
|
|
@@ -81,12 +82,15 @@ def convert_and_optimize_code_with_openai(python):
|
|
| 81 |
|
| 82 |
yield fragment
|
| 83 |
|
| 84 |
-
def convert_and_optimize_code_with_gemini(python):
|
| 85 |
user_prompt = user_prompt_for_python(python)
|
| 86 |
|
| 87 |
stream = gemini_client.models.generate_content_stream(
|
| 88 |
model=GEMINI_MODEL,
|
| 89 |
-
contents=user_prompt
|
|
|
|
|
|
|
|
|
|
| 90 |
)
|
| 91 |
|
| 92 |
for chunk in stream:
|
|
|
|
| 2 |
from dotenv import load_dotenv
|
| 3 |
from openai import OpenAI
|
| 4 |
from google import genai
|
| 5 |
+
from google.genai import types
|
| 6 |
import gradio as gr
|
| 7 |
from datetime import datetime
|
| 8 |
from placeholder_python_code import pi_1, pi_2
|
|
|
|
| 68 |
with open(f"/tmp/optimized-{current_time}.cpp", "w") as f:
|
| 69 |
f.write(code)
|
| 70 |
|
| 71 |
+
def convert_and_optimize_code_with_openai(python: str):
|
| 72 |
stream = openai_client.chat.completions.create(
|
| 73 |
model=OPENAI_MODEL,
|
| 74 |
messages=messages_for_python(python),
|
|
|
|
| 82 |
|
| 83 |
yield fragment
|
| 84 |
|
| 85 |
+
def convert_and_optimize_code_with_gemini(python: str):
|
| 86 |
user_prompt = user_prompt_for_python(python)
|
| 87 |
|
| 88 |
stream = gemini_client.models.generate_content_stream(
|
| 89 |
model=GEMINI_MODEL,
|
| 90 |
+
contents=user_prompt,
|
| 91 |
+
config=types.GenerateContentConfig(
|
| 92 |
+
system_instruction=system_message_for_python()
|
| 93 |
+
),
|
| 94 |
)
|
| 95 |
|
| 96 |
for chunk in stream:
|