Merge branch 'ed-donner:main' into main

This commit is contained in:
Tochi Nwachukwu
2025-10-23 06:20:12 +01:00
committed by GitHub
59 changed files with 20341 additions and 0 deletions

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,264 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"id": "fee27f39",
"metadata": {},
"outputs": [],
"source": [
"# imports\n",
"\n",
"import os\n",
"from dotenv import load_dotenv\n",
"from openai import OpenAI\n",
"import gradio as gr\n",
"\n",
"load_dotenv(override=True)\n",
"openai_api_key = os.getenv('OPENAI_API_KEY')\n",
"anthropic_api_key = os.getenv('ANTHROPIC_API_KEY')\n",
"google_api_key = os.getenv('GOOGLE_API_KEY')\n",
"ollama_api_key = os.getenv('OLLAMA_API_KEY')\n",
"\n",
"if openai_api_key:\n",
" print(f\"OpenAI API Key exists and begins {openai_api_key[:8]}\")\n",
"else:\n",
" print(\"OpenAI API Key not set\")\n",
" \n",
"if anthropic_api_key:\n",
" print(f\"Anthropic API Key exists and begins {anthropic_api_key[:7]}\")\n",
"else:\n",
" print(\"Anthropic API Key not set (and this is optional)\")\n",
"\n",
"if google_api_key:\n",
" print(f\"Google API Key exists and begins {google_api_key[:2]}\")\n",
"else:\n",
" print(\"Google API Key not set (and this is optional)\")\n",
"\n",
"if ollama_api_key:\n",
" print(f\"OLLAMA API Key exists and begins {ollama_api_key[:2]}\")\n",
"else:\n",
" print(\"OLLAMA API Key not set (and this is optional)\")\n",
"\n",
"# Connect to client libraries\n",
"\n",
"openai = OpenAI()\n",
"\n",
"anthropic_url = \"https://api.anthropic.com/v1/\"\n",
"gemini_url = \"https://generativelanguage.googleapis.com/v1beta/openai/\"\n",
"ollama_url = \"http://localhost:11434/v1\"\n",
"\n",
"anthropic = OpenAI(api_key=anthropic_api_key, base_url=anthropic_url)\n",
"gemini = OpenAI(api_key=google_api_key, base_url=gemini_url)\n",
"ollama = OpenAI(api_key=ollama_api_key, base_url=ollama_url)\n",
"\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "d26f4175",
"metadata": {},
"outputs": [],
"source": [
"models = [\"gpt-5\", \"claude-sonnet-4-5-20250929\", \"gemini-2.5-pro\", \"gpt-oss:20b-cloud\", ]\n",
"\n",
"clients = {\"gpt-5\": openai, \"claude-sonnet-4-5-20250929\": anthropic, \"gemini-2.5-pro\": gemini, \"gpt-oss:20b-cloud\": ollama}\n",
"\n",
"# Want to keep costs ultra-low? Replace this with models of your choice, using the examples from yesterday"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "76563884",
"metadata": {},
"outputs": [],
"source": [
"system_prompt_doc = \"\"\"You are an expert Python developer and code reviewer.\n",
"Your job is to read the user's provided function, and return:\n",
"1. A concise, PEP-257-compliant docstring summarizing what the function does, clarifying types, parameters, return values, and side effects.\n",
"2. Helpful inline comments that improve both readability and maintainability, without restating what the code obviously does.\n",
"\n",
"Only output the function, not explanations or additional text. \n",
"Do not modify variable names or refactor the function logic.\n",
"Your response should improve the code's clarity and documentation, making it easier for others to understand and maintain.\n",
"Don't be extremely verbose.\n",
"Your answer should be at a {level} level of expertise.\n",
"\"\"\"\n",
"\n",
"system_prompt_tests = \"\"\"You are a seasoned Python developer and testing expert.\n",
"Your task is to read the user's provided function, and generate:\n",
"1. A concise set of meaningful unit tests that thoroughly validate the function's correctness, including typical, edge, and error cases.\n",
"2. The tests should be written for pytest (or unittest if pytest is not appropriate), use clear, descriptive names, and avoid unnecessary complexity.\n",
"3. If dependencies or mocking are needed, include minimal necessary setup code (but avoid over-mocking).\n",
"\n",
"Only output the relevant test code, not explanations or extra text.\n",
"Do not change the original function; focus solely on comprehensive, maintainable test coverage that other developers can easily understand and extend.\n",
"\"\"\"\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "1bd82e96",
"metadata": {},
"outputs": [],
"source": [
"def generate_documentation(code, model, level):\n",
" response = clients[model].chat.completions.create(\n",
" model=model,\n",
" messages=[\n",
" {\"role\": \"system\", \"content\": system_prompt_doc.format(level=level)},\n",
" {\"role\": \"user\", \"content\": code}\n",
" ],\n",
" stream=True\n",
" )\n",
" output = \"\"\n",
" for chunk in response:\n",
" output += chunk.choices[0].delta.content or \"\"\n",
" yield output.replace(\"```python\", \"\").replace(\"```\", \"\")\n",
"\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "b01b3421",
"metadata": {},
"outputs": [],
"source": [
"def generate_tests(code, model ):\n",
" response = clients[model].chat.completions.create(\n",
" model=model,\n",
" messages=[\n",
" {\"role\": \"system\", \"content\": system_prompt_tests},\n",
" {\"role\": \"user\", \"content\": code}\n",
" ],\n",
" stream=True\n",
" )\n",
" output = \"\"\n",
" for chunk in response:\n",
" output += chunk.choices[0].delta.content or \"\"\n",
" yield output.replace(\"```python\", \"\").replace(\"```\", \"\")\n",
"\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "16b71915",
"metadata": {},
"outputs": [],
"source": [
"vscode_dark = gr.themes.Monochrome(\n",
" primary_hue=\"blue\",\n",
" secondary_hue=\"slate\",\n",
" neutral_hue=\"slate\",\n",
").set(\n",
" body_background_fill=\"#1e1e1e\",\n",
" body_background_fill_dark=\"#1e1e1e\",\n",
" block_background_fill=\"#252526\",\n",
" block_background_fill_dark=\"#252526\",\n",
" block_border_color=\"#3e3e42\",\n",
" block_border_color_dark=\"#3e3e42\",\n",
" border_color_primary=\"#3e3e42\",\n",
" block_label_background_fill=\"#252526\",\n",
" block_label_background_fill_dark=\"#252526\",\n",
" block_label_text_color=\"#cccccc\",\n",
" block_label_text_color_dark=\"#cccccc\",\n",
" block_title_text_color=\"#cccccc\",\n",
" block_title_text_color_dark=\"#cccccc\",\n",
" body_text_color=\"#d4d4d4\",\n",
" body_text_color_dark=\"#d4d4d4\",\n",
" button_primary_background_fill=\"#0e639c\",\n",
" button_primary_background_fill_dark=\"#0e639c\",\n",
" button_primary_background_fill_hover=\"#1177bb\",\n",
" button_primary_background_fill_hover_dark=\"#1177bb\",\n",
" button_primary_text_color=\"#ffffff\",\n",
" button_primary_text_color_dark=\"#ffffff\",\n",
" input_background_fill=\"#3c3c3c\",\n",
" input_background_fill_dark=\"#3c3c3c\",\n",
" color_accent=\"#007acc\",\n",
" color_accent_soft=\"#094771\",\n",
")\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "23311022",
"metadata": {},
"outputs": [],
"source": [
"import gradio as gr\n",
"\n",
"with gr.Blocks(theme=vscode_dark, css=\"\"\"\n",
" .gradio-container {font-family: 'Consolas', 'Monaco', monospace;}\n",
" h1 {color: #d4d4d4 !important;}\n",
"\"\"\") as ui:\n",
" gr.Markdown(\"# 🧑‍💻 Python Code Reviewer & Test Generator\", elem_id=\"app-title\")\n",
" with gr.Tab(\"Docstring & Comments\") as tab1:\n",
" gr.Markdown(\"# Function Docstring & Comment Helper\\nPaste your function below and get helpful docstrings and inline comments!\")\n",
"\n",
" with gr.Row():\n",
" code_input_1 = gr.Code(label=\"Paste your Python function here\", lines=10, language=\"python\")\n",
" code_output = gr.Code(label=\"Function with improved docstring and comments\", lines=10, language=\"python\")\n",
" \n",
" with gr.Row(equal_height=True):\n",
" level_radio = gr.Radio(choices=[\"Junior\", \"Mid\", \"Senior\"], value=\"Mid\", label=\"Reviewer level\", interactive=True)\n",
" model_dropdown = gr.Dropdown(choices=models, value=models[-1], label=\"Select model\")\n",
" submit_doc_btn = gr.Button(\"Generate docstring & comments\", scale=0.5)\n",
"\n",
" submit_doc_btn.click(\n",
" generate_documentation, \n",
" inputs=[code_input_1, model_dropdown, level_radio], \n",
" outputs=code_output\n",
" )\n",
"\n",
" with gr.Tab(\"Unit Tests\") as tab2:\n",
" gr.Markdown(\"# Unit Test Generator\\nPaste your function below and get auto-generated unit tests!\")\n",
"\n",
" with gr.Row():\n",
" code_input_2 = gr.Code(label=\"Paste your Python function here\", lines=10, language=\"python\")\n",
" code_output_2 = gr.Code(label=\"Generated tests\", lines=10, language=\"python\")\n",
" \n",
" with gr.Row(equal_height=True):\n",
" model_dropdown_2 = gr.Dropdown(choices=models, value=models[-1], label=\"Select model\")\n",
" submit_test_btn = gr.Button(\"Generate unit tests\", scale=0.5)\n",
"\n",
" submit_test_btn.click(\n",
" generate_tests, \n",
" inputs=[code_input_2, model_dropdown_2], \n",
" outputs=code_output_2\n",
" )\n",
" \n",
" tab2.select(lambda x: x, inputs=code_input_1, outputs=code_input_2)\n",
" tab1.select(lambda x: x, inputs=code_input_2, outputs=code_input_1)\n",
"\n",
"ui.launch(share=False, inbrowser=True)"
]
}
],
"metadata": {
"kernelspec": {
"display_name": ".venv",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.12.8"
}
},
"nbformat": 4,
"nbformat_minor": 5
}

View File

@@ -0,0 +1,346 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": 13,
"id": "d7ac40dd",
"metadata": {},
"outputs": [],
"source": [
"import os\n",
"from openai import OpenAI\n",
"from dotenv import load_dotenv\n",
"import gradio as gr\n",
"import io\n",
"import sys \n",
"import subprocess"
]
},
{
"cell_type": "code",
"execution_count": 14,
"id": "f0737df3",
"metadata": {},
"outputs": [],
"source": [
"load_dotenv(override=True)\n",
"openai_api_key = os.getenv('OPENAI_API_KEY')\n",
"anthropic_api_key = os.getenv('ANTHROPIC_API_KEY')\n",
"google_api_key = os.getenv('GOOGLE_API_KEY')\n",
"ds_api_key = os.getenv('DEEPSEEK_API_KEY')\n",
"grok_api_key = os.getenv('GROK_API_KEY')\n"
]
},
{
"cell_type": "code",
"execution_count": 15,
"id": "834d1fa7",
"metadata": {},
"outputs": [],
"source": [
"MODEL_MAP = {\n",
" \"GPT\": {\n",
" \"model\": \"gpt-4o-mini\",\n",
" \"key\": openai_api_key,\n",
" \"endpoint\": \"https://api.openai.com/v1\",\n",
" },\n",
" \"CLAUDE_3_5_SONNET\": {\n",
" \"model\": \"claude-3-5-sonnet-20240620\",\n",
" \"key\": anthropic_api_key,\n",
" \"endpoint\": \"https://api.anthropic.com/v1\"\n",
" },\n",
" \"Grok\": {\n",
" \"model\": \"grok-beta\",\n",
" \"key\": grok_api_key,\n",
" \"endpoint\": \"https://api.grok.com/v1\"\n",
" }, \n",
" \"DeepSeek\": {\n",
" \"model\": \"deepseek-coder\",\n",
" \"key\": ds_api_key,\n",
" \"endpoint\": \"https://api.deepseek.com/v1\",\n",
" },\n",
" \"Google\": {\n",
" \"model\": \"gemini-2.0-flash-exp\",\n",
" \"key\": google_api_key,\n",
" \"endpoint\": \"https://generativelanguage.googleapis.com/v1beta/openai\"\n",
" },\n",
"}"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "87d0508f",
"metadata": {},
"outputs": [],
"source": [
"class PortCode:\n",
" def __init__(self, progress=None, model_name=MODEL_MAP[\"GPT\"]):\n",
" self.progress = progress\n",
" self.model_deets = model_name\n",
" self.model = OpenAI(\n",
" api_key=model_name[\"key\"],\n",
" base_url=model_name[\"endpoint\"]\n",
" )\n",
" self.cpp_code = \"\"\n",
" \n",
" def update_progress(self, value, desc=\"\"):\n",
" if self.progress:\n",
" self.progress(value, desc=desc)\n",
" \n",
" def port_python_to_cpp(self, python_code):\n",
" self.update_progress(0.3, desc=\"Converting Python to C++...\")\n",
" \n",
" system_prompt = \"\"\"\n",
" Your task is to convert Python code into high performance C++ code.\n",
" Respond only with C++ code. Do not provide any explanation other than occasional comments.\n",
" The C++ response needs to produce an identical output in the fastest possible time.\n",
" \"\"\"\n",
" \n",
" user_prompt = f\"\"\"\n",
" Port this Python code to C++ with the fastest possible implementation that produces identical output in the least time.\n",
" Respond only with C++ code.\n",
" Python code to port:\n",
"\n",
" ```python\n",
" {python_code}\n",
" ```\n",
" \"\"\"\n",
" \n",
" messages = [\n",
" {\"role\": \"system\", \"content\": system_prompt},\n",
" {\"role\": \"user\", \"content\": user_prompt}\n",
" ]\n",
" \n",
" try:\n",
" response = self.model.chat.completions.create(\n",
" model=self.model_deets[\"model\"],\n",
" messages=messages\n",
" )\n",
" \n",
" cpp_code = response.choices[0].message.content\n",
" cpp_code = cpp_code.replace('```cpp', '').replace('```', '').strip()\n",
" \n",
" self.cpp_code = cpp_code\n",
" \n",
" self.update_progress(1.0, desc=\"Conversion complete!\")\n",
" return cpp_code\n",
" \n",
" except Exception as e:\n",
" error_msg = f\"Error converting code: {str(e)}\"\n",
" self.update_progress(1.0, desc=\"Conversion failed!\")\n",
" return error_msg\n",
" \n",
" def run_python_code(self, python_code):\n",
" self.update_progress(0.1, desc=\"Running Python code...\")\n",
" \n",
" globals_dict = {\"__builtins__\": __builtins__}\n",
" buffer = io.StringIO()\n",
" old_stdout = sys.stdout\n",
" sys.stdout = buffer\n",
" \n",
" try:\n",
" exec(python_code, globals_dict)\n",
" output = buffer.getvalue()\n",
" self.update_progress(1.0, desc=\"Python execution complete!\")\n",
" except Exception as e:\n",
" output = f\"Error: {e}\"\n",
" self.update_progress(1.0, desc=\"Python execution failed!\")\n",
" finally:\n",
" sys.stdout = old_stdout\n",
" \n",
" return output\n",
" \n",
" def compile_cpp(self, cpp_code=None):\n",
" if cpp_code is None:\n",
" cpp_code = self.cpp_code\n",
" \n",
" if not cpp_code:\n",
" return \"No C++ code to compile. Please convert Python code first.\"\n",
" \n",
" self.update_progress(0.5, desc=\"Compiling C++ code...\")\n",
" \n",
" with open(\"main.cpp\", \"w\") as f:\n",
" f.write(cpp_code)\n",
" \n",
" compile_command = [\n",
" \"clang++\", \"-std=c++17\", \"-Ofast\", \"-mcpu=native\", \n",
" \"-flto=thin\", \"-fvisibility=hidden\", \"-DNDEBUG\", \n",
" \"main.cpp\", \"-o\", \"main\"\n",
" ]\n",
" \n",
" try:\n",
" subprocess.run(compile_command, check=True, text=True, capture_output=True)\n",
" self.update_progress(1.0, desc=\"C++ compilation complete!\")\n",
" return \"Compilation successful!\"\n",
" \n",
" except subprocess.CalledProcessError as e:\n",
" error_msg = f\"Compilation error: {e.stderr}\"\n",
" self.update_progress(1.0, desc=\"C++ compilation failed!\")\n",
" return error_msg\n",
" except Exception as e:\n",
" error_msg = f\"Error: {str(e)}\"\n",
" self.update_progress(1.0, desc=\"C++ compilation failed!\")\n",
" return error_msg\n",
" \n",
" def run_cpp(self):\n",
" self.update_progress(0.1, desc=\"Running C++ code...\")\n",
" \n",
" run_command = [\"./main\"]\n",
" \n",
" try:\n",
" if not os.path.exists(\"./main\"):\n",
" return \"No compiled executable found. Please compile C++ code first.\"\n",
" \n",
" run_result = subprocess.run(run_command, check=True, text=True, capture_output=True)\n",
" print(\"hello .....\")\n",
" self.update_progress(1.0, desc=\"C++ execution complete!\")\n",
" return run_result.stdout\n",
" \n",
" except subprocess.CalledProcessError as e:\n",
" error_msg = f\"Runtime error: {e.stderr}\"\n",
" self.update_progress(1.0, desc=\"C++ execution failed!\")\n",
" return error_msg\n",
" except Exception as e:\n",
" error_msg = f\"Error: {str(e)}\"\n",
" self.update_progress(1.0, desc=\"C++ execution failed!\")\n",
" return error_msg\n",
" \n",
" def compile_and_run_cpp(self, cpp_code=None):\n",
" \"\"\"Compile and run C++ code in one step\"\"\"\n",
" if cpp_code is None:\n",
" cpp_code = self.cpp_code\n",
" \n",
" if not cpp_code:\n",
" return \"No C++ code to compile and run. Please convert Python code first.\"\n",
" \n",
" compile_result = self.compile_cpp(cpp_code)\n",
" if \"error\" in compile_result.lower():\n",
" return compile_result\n",
" \n",
" return self.run_cpp()\n",
" \n",
" def get_cpp_code(self):\n",
" \"\"\"Get the stored C++ code\"\"\"\n",
" return self.cpp_code\n",
" \n",
" def set_cpp_code(self, cpp_code):\n",
" \"\"\"Manually set C++ code\"\"\"\n",
" self.cpp_code = cpp_code"
]
},
{
"cell_type": "code",
"execution_count": 37,
"id": "4680573d",
"metadata": {},
"outputs": [],
"source": [
"\n",
"class Interface:\n",
" def __init__(self):\n",
" self.port_code = PortCode(gr.Progress())\n",
" \n",
" def create_interface(self):\n",
" with gr.Blocks(title=\"Code Porter\") as interface:\n",
" gr.Markdown(\"# 🚀 Python to C++ Converter\")\n",
" \n",
" with gr.Row():\n",
" python_input = gr.TextArea(label=\"Python Code\", lines=15)\n",
" cpp_output = gr.TextArea(label=\"C++ Code\", lines=15, interactive=False)\n",
" \n",
" with gr.Row():\n",
" python_result = gr.TextArea(label=\"Python Output\", lines=4, interactive=False)\n",
" cpp_result = gr.TextArea(label=\"C++ Output\", lines=4, interactive=False)\n",
" \n",
" with gr.Row():\n",
" run_python_btn = gr.Button(\"Run Python\")\n",
" run_cpp_btn = gr.Button(\"Run C++\")\n",
" \n",
" with gr.Row():\n",
" model_dropdown = gr.Dropdown(MODEL_MAP.keys(), value=\"GPT\", label=\"Model\")\n",
" \n",
" with gr.Row():\n",
" convert_btn = gr.Button(\"Convert\", variant=\"primary\")\n",
" \n",
" # Events\n",
" convert_btn.click(self.convert_code, [python_input, model_dropdown], cpp_output)\n",
" run_python_btn.click(self.run_python, python_input, python_result)\n",
" run_cpp_btn.click(self.run_cpp, cpp_output, cpp_result)\n",
" model_dropdown.change(self.update_model, model_dropdown, None)\n",
" \n",
" return interface\n",
" \n",
" def convert_code(self, python_code, model_name):\n",
" self.port_code = PortCode(model_name=MODEL_MAP[model_name])\n",
" return self.port_code.port_python_to_cpp(python_code)\n",
" \n",
" def run_python(self, python_code):\n",
" return self.port_code.run_python_code(python_code)\n",
" \n",
" def run_cpp(self, cpp_code):\n",
" self.port_code.set_cpp_code(cpp_code)\n",
" return self.port_code.compile_and_run_cpp()\n",
" \n",
" def update_model(self, model_name):\n",
" self.port_code = PortCode(model_name=MODEL_MAP[model_name])\n",
" \n",
" def launch(self, inbrowser=False):\n",
" self.create_interface().launch(inbrowser=inbrowser)"
]
},
{
"cell_type": "code",
"execution_count": 38,
"id": "7ced6dc2",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"* Running on local URL: http://127.0.0.1:7906\n",
"* To create a public link, set `share=True` in `launch()`.\n"
]
},
{
"data": {
"text/html": [
"<div><iframe src=\"http://127.0.0.1:7906/\" width=\"100%\" height=\"500\" allow=\"autoplay; camera; microphone; clipboard-read; clipboard-write;\" frameborder=\"0\" allowfullscreen></iframe></div>"
],
"text/plain": [
"<IPython.core.display.HTML object>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"I = Interface()\n",
"I.launch()"
]
}
],
"metadata": {
"kernelspec": {
"display_name": ".venv",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.12.12"
}
},
"nbformat": 4,
"nbformat_minor": 5
}

View File

@@ -0,0 +1,6 @@
#include <iostream>
int main() {
std::cout << "hi" << std::endl;
return 0;
}

File diff suppressed because it is too large Load Diff