diff --git a/week4/community-contributions/bharat_puri/code_generator.ipynb b/week4/community-contributions/bharat_puri/code_generator.ipynb
new file mode 100644
index 0000000..508864b
--- /dev/null
+++ b/week4/community-contributions/bharat_puri/code_generator.ipynb
@@ -0,0 +1,1241 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "id": "4a6ab9a2-28a2-445d-8512-a0dc8d1b54e9",
+ "metadata": {},
+ "source": [
+ "# Code Generator\n",
+ "\n",
+ "The requirement: use a Frontier model to generate high performance C++ code from Python code\n"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "d5ccb926-7b49-44a4-99ab-8ef20b5778c0",
+ "metadata": {},
+ "source": [
+ "
\n",
+ " \n",
+ " \n",
+ " \n",
+ " | \n",
+ " \n",
+ " Reminder: OPTIONAL to execute C++ code or Rust code\n",
+ " As an alternative, you can run it on the website given yesterday\n",
+ " | \n",
+ "
\n",
+ "
"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "d90e04a2-5b8a-4fd5-9db8-27c02f033313",
+ "metadata": {},
+ "source": [
+ "\n",
+ " \n",
+ " \n",
+ " \n",
+ " | \n",
+ " \n",
+ " Important Note\n",
+ " \n",
+ " In this lab, I use high end models GPT 5, Claude 4.5 Sonnet, Gemini 2.5 Pro, Grok 4, which are the slightly higher priced models. The costs are still low, but if you'd prefer to keep costs ultra low, please pick lower cost models like gpt-5-nano.\n",
+ " \n",
+ " | \n",
+ "
\n",
+ "
"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 6,
+ "id": "e610bf56-a46e-4aff-8de1-ab49d62b1ad3",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# imports\n",
+ "\n",
+ "import os\n",
+ "import io\n",
+ "import sys\n",
+ "from dotenv import load_dotenv\n",
+ "import sys\n",
+ "sys.path.append(os.path.abspath(os.path.join(\"..\", \"..\"))) \n",
+ "from openai import OpenAI\n",
+ "import gradio as gr\n",
+ "import subprocess\n",
+ "from IPython.display import Markdown, display\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 7,
+ "id": "4f672e1c-87e9-4865-b760-370fa605e614",
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "OpenAI API Key exists and begins sk-proj-\n",
+ "Anthropic API Key not set (and this is optional)\n",
+ "Google API Key not set (and this is optional)\n",
+ "Grok API Key not set (and this is optional)\n",
+ "Groq API Key not set (and this is optional)\n",
+ "OpenRouter API Key not set (and this is optional)\n"
+ ]
+ }
+ ],
+ "source": [
+ "load_dotenv(override=True)\n",
+ "openai_api_key = os.getenv('OPENAI_API_KEY')\n",
+ "anthropic_api_key = os.getenv('ANTHROPIC_API_KEY')\n",
+ "google_api_key = os.getenv('GOOGLE_API_KEY')\n",
+ "grok_api_key = os.getenv('GROK_API_KEY')\n",
+ "groq_api_key = os.getenv('GROQ_API_KEY')\n",
+ "openrouter_api_key = os.getenv('OPENROUTER_API_KEY')\n",
+ "\n",
+ "if openai_api_key:\n",
+ " print(f\"OpenAI API Key exists and begins {openai_api_key[:8]}\")\n",
+ "else:\n",
+ " print(\"OpenAI API Key not set\")\n",
+ " \n",
+ "if anthropic_api_key:\n",
+ " print(f\"Anthropic API Key exists and begins {anthropic_api_key[:7]}\")\n",
+ "else:\n",
+ " print(\"Anthropic API Key not set (and this is optional)\")\n",
+ "\n",
+ "if google_api_key:\n",
+ " print(f\"Google API Key exists and begins {google_api_key[:2]}\")\n",
+ "else:\n",
+ " print(\"Google API Key not set (and this is optional)\")\n",
+ "\n",
+ "if grok_api_key:\n",
+ " print(f\"Grok API Key exists and begins {grok_api_key[:4]}\")\n",
+ "else:\n",
+ " print(\"Grok API Key not set (and this is optional)\")\n",
+ "\n",
+ "if groq_api_key:\n",
+ " print(f\"Groq API Key exists and begins {groq_api_key[:4]}\")\n",
+ "else:\n",
+ " print(\"Groq API Key not set (and this is optional)\")\n",
+ "\n",
+ "if openrouter_api_key:\n",
+ " print(f\"OpenRouter API Key exists and begins {openrouter_api_key[:6]}\")\n",
+ "else:\n",
+ " print(\"OpenRouter API Key not set (and this is optional)\")\n",
+ "\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 8,
+ "id": "59863df1",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Connect to client libraries\n",
+ "\n",
+ "openai = OpenAI()\n",
+ "\n",
+ "anthropic_url = \"https://api.anthropic.com/v1/\"\n",
+ "gemini_url = \"https://generativelanguage.googleapis.com/v1beta/openai/\"\n",
+ "grok_url = \"https://api.x.ai/v1\"\n",
+ "groq_url = \"https://api.groq.com/openai/v1\"\n",
+ "ollama_url = \"http://localhost:11434/v1\"\n",
+ "openrouter_url = \"https://openrouter.ai/api/v1\"\n",
+ "\n",
+ "anthropic = OpenAI(api_key=anthropic_api_key, base_url=anthropic_url)\n",
+ "gemini = OpenAI(api_key=google_api_key, base_url=gemini_url)\n",
+ "grok = OpenAI(api_key=grok_api_key, base_url=grok_url)\n",
+ "groq = OpenAI(api_key=groq_api_key, base_url=groq_url)\n",
+ "ollama = OpenAI(api_key=\"ollama\", base_url=ollama_url)\n",
+ "openrouter = OpenAI(api_key=openrouter_api_key, base_url=openrouter_url)\n",
+ "\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 9,
+ "id": "8aa149ed-9298-4d69-8fe2-8f5de0f667da",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "models = [\"gpt-5\", \"claude-sonnet-4-5-20250929\", \"grok-4\", \"gemini-2.5-pro\", \"qwen2.5-coder\", \"deepseek-coder-v2\", \"gpt-oss:20b\", \"qwen/qwen3-coder-30b-a3b-instruct\", \"openai/gpt-oss-120b\", ]\n",
+ "\n",
+ "clients = {\"gpt-5\": openai, \"claude-sonnet-4-5-20250929\": anthropic, \"grok-4\": grok, \"gemini-2.5-pro\": gemini, \"openai/gpt-oss-120b\": groq, \"qwen2.5-coder\": ollama, \"deepseek-coder-v2\": ollama, \"gpt-oss:20b\": ollama, \"qwen/qwen3-coder-30b-a3b-instruct\": openrouter}\n",
+ "\n",
+ "# Want to keep costs ultra-low? Replace this with models of your choice, using the examples from yesterday"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 10,
+ "id": "68c1f1be",
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "{'installed': False,\n",
+ " 'rustc': {'path': '',\n",
+ " 'version': '',\n",
+ " 'host_triple': '',\n",
+ " 'release': '',\n",
+ " 'commit_hash': ''},\n",
+ " 'cargo': {'path': '', 'version': ''},\n",
+ " 'rustup': {'path': '',\n",
+ " 'version': '',\n",
+ " 'active_toolchain': '',\n",
+ " 'default_toolchain': '',\n",
+ " 'toolchains': [],\n",
+ " 'targets_installed': []},\n",
+ " 'rust_analyzer': {'path': ''},\n",
+ " 'env': {'CARGO_HOME': '',\n",
+ " 'RUSTUP_HOME': '',\n",
+ " 'RUSTFLAGS': '',\n",
+ " 'CARGO_BUILD_TARGET': ''},\n",
+ " 'execution_examples': []}"
+ ]
+ },
+ "execution_count": 10,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "from system_info import retrieve_system_info, rust_toolchain_info\n",
+ "\n",
+ "system_info = retrieve_system_info()\n",
+ "rust_info = rust_toolchain_info()\n",
+ "rust_info"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 11,
+ "id": "b8bd44f5",
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/markdown": [
+ "You do not have a Rust toolchain installed. You need to install it.\n",
+ "\n",
+ "Simplest install (Intel macOS):\n",
+ "1) Using Homebrew (recommended on your setup)\n",
+ "- Install rustup:\n",
+ " - brew install rustup-init\n",
+ "- Initialize the default stable toolchain:\n",
+ " - rustup-init -y\n",
+ "- Load the environment for the current shell (so rustc/cargo are on PATH):\n",
+ " - source \"$HOME/.cargo/env\"\n",
+ "\n",
+ "2) Verify:\n",
+ "- rustc --version\n",
+ "- cargo --version\n",
+ "\n",
+ "Note: If a new shell doesnβt see rustc, add this to your shell profile (~/.zshrc or ~/.bashrc):\n",
+ "- echo 'source \"$HOME/.cargo/env\"' >> ~/.zshrc\n",
+ "- source ~/.zshrc\n",
+ "\n",
+ "Alternative (official installer):\n",
+ "- curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y\n",
+ "- source \"$HOME/.cargo/env\"\n",
+ "\n",
+ "Python subprocess commands (max runtime performance, compile can be slow):\n",
+ "Use rustc directly with aggressive optimization, native CPU tuning, LTO, and single codegen unit.\n",
+ "\n",
+ "- compile_command:\n",
+ "```python\n",
+ "compile_command = [\n",
+ " \"rustc\",\n",
+ " \"-C\", \"opt-level=3\",\n",
+ " \"-C\", \"lto=fat\",\n",
+ " \"-C\", \"codegen-units=1\",\n",
+ " \"-C\", \"target-cpu=native\",\n",
+ " \"-C\", \"debuginfo=0\",\n",
+ " \"main.rs\",\n",
+ " \"-o\", \"main\"\n",
+ "]\n",
+ "```\n",
+ "\n",
+ "- run_command:\n",
+ "```python\n",
+ "run_command = [\"./main\"]\n",
+ "```\n",
+ "\n",
+ "If your Python process canβt find rustc, use the full path:\n",
+ "```python\n",
+ "compile_command[0] = str(Path.home() / \".cargo\" / \"bin\" / \"rustc\")\n",
+ "```"
+ ],
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "message = f\"\"\"\n",
+ "Here is a report of the system information for my computer.\n",
+ "I want to run a Rust compiler to compile a single rust file called main.rs and then execute it in the simplest way possible.\n",
+ "Please reply with whether I need to install a Rust toolchain to do this. If so, please provide the simplest step by step instructions to do so.\n",
+ "\n",
+ "If I'm already set up to compile Rust code, then I'd like to run something like this in Python to compile and execute the code:\n",
+ "```python\n",
+ "compile_command = # something here - to achieve the fastest possible runtime performance\n",
+ "compile_result = subprocess.run(compile_command, check=True, text=True, capture_output=True)\n",
+ "run_command = # something here\n",
+ "run_result = subprocess.run(run_command, check=True, text=True, capture_output=True)\n",
+ "return run_result.stdout\n",
+ "```\n",
+ "Please tell me exactly what I should use for the compile_command and run_command.\n",
+ "Have the maximum possible runtime performance in mind; compile time can be slow. Fastest possible runtime performance for this platform is key.\n",
+ "Reply with the commands in markdown.\n",
+ "\n",
+ "System information:\n",
+ "{system_info}\n",
+ "\n",
+ "Rust toolchain information:\n",
+ "{rust_info}\n",
+ "\"\"\"\n",
+ "\n",
+ "response = openai.chat.completions.create(model=models[0], messages=[{\"role\": \"user\", \"content\": message}])\n",
+ "display(Markdown(response.choices[0].message.content))"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "81e92c12",
+ "metadata": {},
+ "source": [
+ "## For C++, overwrite this with the commands from yesterday, or for Rust, use the new commands\n",
+ "\n",
+ "Or just use the website like yesterday:\n",
+ "\n",
+ " https://www.programiz.com/cpp-programming/online-compiler/"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 12,
+ "id": "d734a634",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "compile_command = [\n",
+ " \"/Users/ed/.cargo/bin/rustc\",\n",
+ " \"main.rs\",\n",
+ " \"-C\", \"opt-level=3\",\n",
+ " \"-C\", \"target-cpu=native\",\n",
+ " \"-C\", \"codegen-units=1\",\n",
+ " \"-C\", \"lto=fat\",\n",
+ " \"-C\", \"panic=abort\",\n",
+ " \"-C\", \"strip=symbols\",\n",
+ " \"-o\", \"main\",\n",
+ "]\n",
+ "\n",
+ "run_command = [\"./main\"]\n"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "f0b0a437",
+ "metadata": {},
+ "source": [
+ "## And now, on with the main task"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 13,
+ "id": "6896636f-923e-4a2c-9d6c-fac07828a201",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "language = \"Rust\" # or \"C++\"\n",
+ "extension = \"rs\" if language == \"Rust\" else \"cpp\"\n",
+ "\n",
+ "system_prompt = f\"\"\"\n",
+ "Your task is to convert Python code into high performance {language} code.\n",
+ "Respond only with {language} code. Do not provide any explanation other than occasional comments.\n",
+ "The {language} response needs to produce an identical output in the fastest possible time.\n",
+ "\"\"\"\n",
+ "\n",
+ "def user_prompt_for(python):\n",
+ " return f\"\"\"\n",
+ "Port this Python code to {language} with the fastest possible implementation that produces identical output in the least time.\n",
+ "The system information is:\n",
+ "{system_info}\n",
+ "Your response will be written to a file called main.{language} and then compiled and executed; the compilation command is:\n",
+ "{compile_command}\n",
+ "Respond only with {language} code.\n",
+ "Python code to port:\n",
+ "\n",
+ "```python\n",
+ "{python}\n",
+ "```\n",
+ "\"\"\""
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 14,
+ "id": "8e7b3546-57aa-4c29-bc5d-f211970d04eb",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "def messages_for(python):\n",
+ " return [\n",
+ " {\"role\": \"system\", \"content\": system_prompt},\n",
+ " {\"role\": \"user\", \"content\": user_prompt_for(python)}\n",
+ " ]\n",
+ " "
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 15,
+ "id": "c6190659-f54c-4951-bef4-4960f8e51cc4",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "def write_output(code):\n",
+ " with open(f\"main.{extension}\", \"w\") as f:\n",
+ " f.write(code)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 16,
+ "id": "e7d2fea8-74c6-4421-8f1e-0e76d5b201b9",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "def port(model, python):\n",
+ " client = clients[model]\n",
+ " reasoning_effort = \"high\" if 'gpt' in model else None\n",
+ " response = client.chat.completions.create(model=model, messages=messages_for(python), reasoning_effort=reasoning_effort)\n",
+ " reply = response.choices[0].message.content\n",
+ " reply = reply.replace('```cpp','').replace('```rust','').replace('```','')\n",
+ " return reply"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 17,
+ "id": "7fe1cd4b-d2c5-4303-afed-2115a3fef200",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "def run_python(code):\n",
+ " globals_dict = {\"__builtins__\": __builtins__}\n",
+ "\n",
+ " buffer = io.StringIO()\n",
+ " old_stdout = sys.stdout\n",
+ " sys.stdout = buffer\n",
+ "\n",
+ " try:\n",
+ " exec(code, globals_dict)\n",
+ " output = buffer.getvalue()\n",
+ " except Exception as e:\n",
+ " output = f\"Error: {e}\"\n",
+ " finally:\n",
+ " sys.stdout = old_stdout\n",
+ "\n",
+ " return output"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 21,
+ "id": "4194e40c-04ab-4940-9d64-b4ad37c5bb40",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Use the commands from GPT 5\n",
+ "\n",
+ "def compile_and_run(code):\n",
+ " write_output(code)\n",
+ " try:\n",
+ " subprocess.run(compile_command, check=True, text=True, capture_output=True)\n",
+ " run_result = subprocess.run(run_command, check=True, text=True, capture_output=True)\n",
+ " return run_result.stdout\n",
+ " except subprocess.CalledProcessError as e:\n",
+ " return f\"An error occurred:\\n{e.stderr}\""
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 24,
+ "id": "e0e78d53-86bc-4948-8654-cfc29e2f4c72",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Defining value for python_hard\n",
+ "\n",
+ "python_hard = \"\"\"# Be careful to support large numbers\n",
+ "\n",
+ "def lcg(seed, a=1664525, c=1013904223, m=2**32):\n",
+ " value = seed\n",
+ " while True:\n",
+ " value = (a * value + c) % m\n",
+ " yield value\n",
+ " \n",
+ "def max_subarray_sum(n, seed, min_val, max_val):\n",
+ " lcg_gen = lcg(seed)\n",
+ " random_numbers = [next(lcg_gen) % (max_val - min_val + 1) + min_val for _ in range(n)]\n",
+ " max_sum = float('-inf')\n",
+ " for i in range(n):\n",
+ " current_sum = 0\n",
+ " for j in range(i, n):\n",
+ " current_sum += random_numbers[j]\n",
+ " if current_sum > max_sum:\n",
+ " max_sum = current_sum\n",
+ " return max_sum\n",
+ "\n",
+ "def total_max_subarray_sum(n, initial_seed, min_val, max_val):\n",
+ " total_sum = 0\n",
+ " lcg_gen = lcg(initial_seed)\n",
+ " for _ in range(20):\n",
+ " seed = next(lcg_gen)\n",
+ " total_sum += max_subarray_sum(n, seed, min_val, max_val)\n",
+ " return total_sum\n",
+ "\n",
+ "# Parameters\n",
+ "n = 10000 # Number of random numbers\n",
+ "initial_seed = 42 # Initial seed for the LCG\n",
+ "min_val = -10 # Minimum value of random numbers\n",
+ "max_val = 10 # Maximum value of random numbers\n",
+ "\n",
+ "# Timing the function\n",
+ "import time\n",
+ "start_time = time.time()\n",
+ "result = total_max_subarray_sum(n, initial_seed, min_val, max_val)\n",
+ "end_time = time.time()\n",
+ "\n",
+ "print(\"Total Maximum Subarray Sum (20 runs):\", result)\n",
+ "print(\"Execution Time: {:.6f} seconds\".format(end_time - start_time))\n",
+ "\"\"\""
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "c3b497b3-f569-420e-b92e-fb0f49957ce0",
+ "metadata": {},
+ "source": [
+ "python_hard = \"\"\"# Be careful to support large numbers\n",
+ "\n",
+ "def lcg(seed, a=1664525, c=1013904223, m=2**32):\n",
+ " value = seed\n",
+ " while True:\n",
+ " value = (a * value + c) % m\n",
+ " yield value\n",
+ " \n",
+ "def max_subarray_sum(n, seed, min_val, max_val):\n",
+ " lcg_gen = lcg(seed)\n",
+ " random_numbers = [next(lcg_gen) % (max_val - min_val + 1) + min_val for _ in range(n)]\n",
+ " max_sum = float('-inf')\n",
+ " for i in range(n):\n",
+ " current_sum = 0\n",
+ " for j in range(i, n):\n",
+ " current_sum += random_numbers[j]\n",
+ " if current_sum > max_sum:\n",
+ " max_sum = current_sum\n",
+ " return max_sum\n",
+ "\n",
+ "def total_max_subarray_sum(n, initial_seed, min_val, max_val):\n",
+ " total_sum = 0\n",
+ " lcg_gen = lcg(initial_seed)\n",
+ " for _ in range(20):\n",
+ " seed = next(lcg_gen)\n",
+ " total_sum += max_subarray_sum(n, seed, min_val, max_val)\n",
+ " return total_sum\n",
+ "\n",
+ "# Parameters\n",
+ "n = 10000 # Number of random numbers\n",
+ "initial_seed = 42 # Initial seed for the LCG\n",
+ "min_val = -10 # Minimum value of random numbers\n",
+ "max_val = 10 # Maximum value of random numbers\n",
+ "\n",
+ "# Timing the function\n",
+ "import time\n",
+ "start_time = time.time()\n",
+ "result = total_max_subarray_sum(n, initial_seed, min_val, max_val)\n",
+ "end_time = time.time()\n",
+ "\n",
+ "print(\"Total Maximum Subarray Sum (20 runs):\", result)\n",
+ "print(\"Execution Time: {:.6f} seconds\".format(end_time - start_time))\n",
+ "\"\"\""
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 25,
+ "id": "465d6cad",
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "* Running on local URL: http://127.0.0.1:7863\n",
+ "* To create a public link, set `share=True` in `launch()`.\n"
+ ]
+ },
+ {
+ "data": {
+ "text/html": [
+ ""
+ ],
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ },
+ {
+ "data": {
+ "text/plain": []
+ },
+ "execution_count": 25,
+ "metadata": {},
+ "output_type": "execute_result"
+ },
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "Traceback (most recent call last):\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/queueing.py\", line 759, in process_events\n",
+ " response = await route_utils.call_process_api(\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/route_utils.py\", line 354, in call_process_api\n",
+ " output = await app.get_blocks().process_api(\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/blocks.py\", line 2116, in process_api\n",
+ " result = await self.call_function(\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/blocks.py\", line 1623, in call_function\n",
+ " prediction = await anyio.to_thread.run_sync( # type: ignore\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/anyio/to_thread.py\", line 56, in run_sync\n",
+ " return await get_async_backend().run_sync_in_worker_thread(\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/anyio/_backends/_asyncio.py\", line 2485, in run_sync_in_worker_thread\n",
+ " return await future\n",
+ " ^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/anyio/_backends/_asyncio.py\", line 976, in run\n",
+ " result = context.run(func, *args)\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/utils.py\", line 915, in wrapper\n",
+ " response = f(*args, **kwargs)\n",
+ " ^^^^^^^^^^^^^^^^^^\n",
+ " File \"/var/folders/4b/hxjfyqbj3n37xcfpsg9k7mfr0000gn/T/ipykernel_14396/3180071184.py\", line 6, in compile_and_run\n",
+ " subprocess.run(compile_command, check=True, text=True, capture_output=True)\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/subprocess.py\", line 548, in run\n",
+ " with Popen(*popenargs, **kwargs) as process:\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/subprocess.py\", line 1026, in __init__\n",
+ " self._execute_child(args, executable, preexec_fn, close_fds,\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/subprocess.py\", line 1955, in _execute_child\n",
+ " raise child_exception_type(errno_num, err_msg, err_filename)\n",
+ "FileNotFoundError: [Errno 2] No such file or directory: '/Users/ed/.cargo/bin/rustc'\n",
+ "Traceback (most recent call last):\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/queueing.py\", line 759, in process_events\n",
+ " response = await route_utils.call_process_api(\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/route_utils.py\", line 354, in call_process_api\n",
+ " output = await app.get_blocks().process_api(\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/blocks.py\", line 2116, in process_api\n",
+ " result = await self.call_function(\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/blocks.py\", line 1623, in call_function\n",
+ " prediction = await anyio.to_thread.run_sync( # type: ignore\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/anyio/to_thread.py\", line 56, in run_sync\n",
+ " return await get_async_backend().run_sync_in_worker_thread(\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/anyio/_backends/_asyncio.py\", line 2485, in run_sync_in_worker_thread\n",
+ " return await future\n",
+ " ^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/anyio/_backends/_asyncio.py\", line 976, in run\n",
+ " result = context.run(func, *args)\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/utils.py\", line 915, in wrapper\n",
+ " response = f(*args, **kwargs)\n",
+ " ^^^^^^^^^^^^^^^^^^\n",
+ " File \"/var/folders/4b/hxjfyqbj3n37xcfpsg9k7mfr0000gn/T/ipykernel_14396/3180071184.py\", line 6, in compile_and_run\n",
+ " subprocess.run(compile_command, check=True, text=True, capture_output=True)\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/subprocess.py\", line 548, in run\n",
+ " with Popen(*popenargs, **kwargs) as process:\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/subprocess.py\", line 1026, in __init__\n",
+ " self._execute_child(args, executable, preexec_fn, close_fds,\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/subprocess.py\", line 1955, in _execute_child\n",
+ " raise child_exception_type(errno_num, err_msg, err_filename)\n",
+ "FileNotFoundError: [Errno 2] No such file or directory: '/Users/ed/.cargo/bin/rustc'\n",
+ "Traceback (most recent call last):\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/queueing.py\", line 759, in process_events\n",
+ " response = await route_utils.call_process_api(\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/route_utils.py\", line 354, in call_process_api\n",
+ " output = await app.get_blocks().process_api(\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/blocks.py\", line 2116, in process_api\n",
+ " result = await self.call_function(\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/blocks.py\", line 1623, in call_function\n",
+ " prediction = await anyio.to_thread.run_sync( # type: ignore\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/anyio/to_thread.py\", line 56, in run_sync\n",
+ " return await get_async_backend().run_sync_in_worker_thread(\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/anyio/_backends/_asyncio.py\", line 2485, in run_sync_in_worker_thread\n",
+ " return await future\n",
+ " ^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/anyio/_backends/_asyncio.py\", line 976, in run\n",
+ " result = context.run(func, *args)\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/utils.py\", line 915, in wrapper\n",
+ " response = f(*args, **kwargs)\n",
+ " ^^^^^^^^^^^^^^^^^^\n",
+ " File \"/var/folders/4b/hxjfyqbj3n37xcfpsg9k7mfr0000gn/T/ipykernel_14396/3180071184.py\", line 6, in compile_and_run\n",
+ " subprocess.run(compile_command, check=True, text=True, capture_output=True)\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/subprocess.py\", line 548, in run\n",
+ " with Popen(*popenargs, **kwargs) as process:\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/subprocess.py\", line 1026, in __init__\n",
+ " self._execute_child(args, executable, preexec_fn, close_fds,\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/subprocess.py\", line 1955, in _execute_child\n",
+ " raise child_exception_type(errno_num, err_msg, err_filename)\n",
+ "FileNotFoundError: [Errno 2] No such file or directory: '/Users/ed/.cargo/bin/rustc'\n",
+ "Traceback (most recent call last):\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/queueing.py\", line 759, in process_events\n",
+ " response = await route_utils.call_process_api(\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/route_utils.py\", line 354, in call_process_api\n",
+ " output = await app.get_blocks().process_api(\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/blocks.py\", line 2116, in process_api\n",
+ " result = await self.call_function(\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/blocks.py\", line 1623, in call_function\n",
+ " prediction = await anyio.to_thread.run_sync( # type: ignore\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/anyio/to_thread.py\", line 56, in run_sync\n",
+ " return await get_async_backend().run_sync_in_worker_thread(\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/anyio/_backends/_asyncio.py\", line 2485, in run_sync_in_worker_thread\n",
+ " return await future\n",
+ " ^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/anyio/_backends/_asyncio.py\", line 976, in run\n",
+ " result = context.run(func, *args)\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/utils.py\", line 915, in wrapper\n",
+ " response = f(*args, **kwargs)\n",
+ " ^^^^^^^^^^^^^^^^^^\n",
+ " File \"/var/folders/4b/hxjfyqbj3n37xcfpsg9k7mfr0000gn/T/ipykernel_14396/3180071184.py\", line 6, in compile_and_run\n",
+ " subprocess.run(compile_command, check=True, text=True, capture_output=True)\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/subprocess.py\", line 548, in run\n",
+ " with Popen(*popenargs, **kwargs) as process:\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/subprocess.py\", line 1026, in __init__\n",
+ " self._execute_child(args, executable, preexec_fn, close_fds,\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/subprocess.py\", line 1955, in _execute_child\n",
+ " raise child_exception_type(errno_num, err_msg, err_filename)\n",
+ "FileNotFoundError: [Errno 2] No such file or directory: '/Users/ed/.cargo/bin/rustc'\n",
+ "Traceback (most recent call last):\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/queueing.py\", line 759, in process_events\n",
+ " response = await route_utils.call_process_api(\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/route_utils.py\", line 354, in call_process_api\n",
+ " output = await app.get_blocks().process_api(\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/blocks.py\", line 2116, in process_api\n",
+ " result = await self.call_function(\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/blocks.py\", line 1623, in call_function\n",
+ " prediction = await anyio.to_thread.run_sync( # type: ignore\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/anyio/to_thread.py\", line 56, in run_sync\n",
+ " return await get_async_backend().run_sync_in_worker_thread(\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/anyio/_backends/_asyncio.py\", line 2485, in run_sync_in_worker_thread\n",
+ " return await future\n",
+ " ^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/anyio/_backends/_asyncio.py\", line 976, in run\n",
+ " result = context.run(func, *args)\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/utils.py\", line 915, in wrapper\n",
+ " response = f(*args, **kwargs)\n",
+ " ^^^^^^^^^^^^^^^^^^\n",
+ " File \"/var/folders/4b/hxjfyqbj3n37xcfpsg9k7mfr0000gn/T/ipykernel_14396/3180071184.py\", line 6, in compile_and_run\n",
+ " subprocess.run(compile_command, check=True, text=True, capture_output=True)\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/subprocess.py\", line 548, in run\n",
+ " with Popen(*popenargs, **kwargs) as process:\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/subprocess.py\", line 1026, in __init__\n",
+ " self._execute_child(args, executable, preexec_fn, close_fds,\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/subprocess.py\", line 1955, in _execute_child\n",
+ " raise child_exception_type(errno_num, err_msg, err_filename)\n",
+ "FileNotFoundError: [Errno 2] No such file or directory: '/Users/ed/.cargo/bin/rustc'\n",
+ "Traceback (most recent call last):\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/queueing.py\", line 759, in process_events\n",
+ " response = await route_utils.call_process_api(\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/route_utils.py\", line 354, in call_process_api\n",
+ " output = await app.get_blocks().process_api(\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/blocks.py\", line 2116, in process_api\n",
+ " result = await self.call_function(\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/blocks.py\", line 1623, in call_function\n",
+ " prediction = await anyio.to_thread.run_sync( # type: ignore\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/anyio/to_thread.py\", line 56, in run_sync\n",
+ " return await get_async_backend().run_sync_in_worker_thread(\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/anyio/_backends/_asyncio.py\", line 2485, in run_sync_in_worker_thread\n",
+ " return await future\n",
+ " ^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/anyio/_backends/_asyncio.py\", line 976, in run\n",
+ " result = context.run(func, *args)\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/utils.py\", line 915, in wrapper\n",
+ " response = f(*args, **kwargs)\n",
+ " ^^^^^^^^^^^^^^^^^^\n",
+ " File \"/var/folders/4b/hxjfyqbj3n37xcfpsg9k7mfr0000gn/T/ipykernel_14396/3180071184.py\", line 6, in compile_and_run\n",
+ " subprocess.run(compile_command, check=True, text=True, capture_output=True)\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/subprocess.py\", line 548, in run\n",
+ " with Popen(*popenargs, **kwargs) as process:\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/subprocess.py\", line 1026, in __init__\n",
+ " self._execute_child(args, executable, preexec_fn, close_fds,\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/subprocess.py\", line 1955, in _execute_child\n",
+ " raise child_exception_type(errno_num, err_msg, err_filename)\n",
+ "FileNotFoundError: [Errno 2] No such file or directory: '/Users/ed/.cargo/bin/rustc'\n",
+ "Traceback (most recent call last):\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/queueing.py\", line 759, in process_events\n",
+ " response = await route_utils.call_process_api(\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/route_utils.py\", line 354, in call_process_api\n",
+ " output = await app.get_blocks().process_api(\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/blocks.py\", line 2116, in process_api\n",
+ " result = await self.call_function(\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/blocks.py\", line 1623, in call_function\n",
+ " prediction = await anyio.to_thread.run_sync( # type: ignore\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/anyio/to_thread.py\", line 56, in run_sync\n",
+ " return await get_async_backend().run_sync_in_worker_thread(\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/anyio/_backends/_asyncio.py\", line 2485, in run_sync_in_worker_thread\n",
+ " return await future\n",
+ " ^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/anyio/_backends/_asyncio.py\", line 976, in run\n",
+ " result = context.run(func, *args)\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/utils.py\", line 915, in wrapper\n",
+ " response = f(*args, **kwargs)\n",
+ " ^^^^^^^^^^^^^^^^^^\n",
+ " File \"/var/folders/4b/hxjfyqbj3n37xcfpsg9k7mfr0000gn/T/ipykernel_14396/3180071184.py\", line 6, in compile_and_run\n",
+ " subprocess.run(compile_command, check=True, text=True, capture_output=True)\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/subprocess.py\", line 548, in run\n",
+ " with Popen(*popenargs, **kwargs) as process:\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/subprocess.py\", line 1026, in __init__\n",
+ " self._execute_child(args, executable, preexec_fn, close_fds,\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/subprocess.py\", line 1955, in _execute_child\n",
+ " raise child_exception_type(errno_num, err_msg, err_filename)\n",
+ "FileNotFoundError: [Errno 2] No such file or directory: '/Users/ed/.cargo/bin/rustc'\n",
+ "Traceback (most recent call last):\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/queueing.py\", line 759, in process_events\n",
+ " response = await route_utils.call_process_api(\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/route_utils.py\", line 354, in call_process_api\n",
+ " output = await app.get_blocks().process_api(\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/blocks.py\", line 2116, in process_api\n",
+ " result = await self.call_function(\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/blocks.py\", line 1623, in call_function\n",
+ " prediction = await anyio.to_thread.run_sync( # type: ignore\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/anyio/to_thread.py\", line 56, in run_sync\n",
+ " return await get_async_backend().run_sync_in_worker_thread(\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/anyio/_backends/_asyncio.py\", line 2485, in run_sync_in_worker_thread\n",
+ " return await future\n",
+ " ^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/anyio/_backends/_asyncio.py\", line 976, in run\n",
+ " result = context.run(func, *args)\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/utils.py\", line 915, in wrapper\n",
+ " response = f(*args, **kwargs)\n",
+ " ^^^^^^^^^^^^^^^^^^\n",
+ " File \"/var/folders/4b/hxjfyqbj3n37xcfpsg9k7mfr0000gn/T/ipykernel_14396/3180071184.py\", line 6, in compile_and_run\n",
+ " subprocess.run(compile_command, check=True, text=True, capture_output=True)\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/subprocess.py\", line 548, in run\n",
+ " with Popen(*popenargs, **kwargs) as process:\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/subprocess.py\", line 1026, in __init__\n",
+ " self._execute_child(args, executable, preexec_fn, close_fds,\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/subprocess.py\", line 1955, in _execute_child\n",
+ " raise child_exception_type(errno_num, err_msg, err_filename)\n",
+ "FileNotFoundError: [Errno 2] No such file or directory: '/Users/ed/.cargo/bin/rustc'\n",
+ "Traceback (most recent call last):\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/queueing.py\", line 759, in process_events\n",
+ " response = await route_utils.call_process_api(\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/route_utils.py\", line 354, in call_process_api\n",
+ " output = await app.get_blocks().process_api(\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/blocks.py\", line 2116, in process_api\n",
+ " result = await self.call_function(\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/blocks.py\", line 1623, in call_function\n",
+ " prediction = await anyio.to_thread.run_sync( # type: ignore\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/anyio/to_thread.py\", line 56, in run_sync\n",
+ " return await get_async_backend().run_sync_in_worker_thread(\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/anyio/_backends/_asyncio.py\", line 2485, in run_sync_in_worker_thread\n",
+ " return await future\n",
+ " ^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/anyio/_backends/_asyncio.py\", line 976, in run\n",
+ " result = context.run(func, *args)\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/utils.py\", line 915, in wrapper\n",
+ " response = f(*args, **kwargs)\n",
+ " ^^^^^^^^^^^^^^^^^^\n",
+ " File \"/var/folders/4b/hxjfyqbj3n37xcfpsg9k7mfr0000gn/T/ipykernel_14396/3180071184.py\", line 6, in compile_and_run\n",
+ " subprocess.run(compile_command, check=True, text=True, capture_output=True)\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/subprocess.py\", line 548, in run\n",
+ " with Popen(*popenargs, **kwargs) as process:\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/subprocess.py\", line 1026, in __init__\n",
+ " self._execute_child(args, executable, preexec_fn, close_fds,\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/subprocess.py\", line 1955, in _execute_child\n",
+ " raise child_exception_type(errno_num, err_msg, err_filename)\n",
+ "FileNotFoundError: [Errno 2] No such file or directory: '/Users/ed/.cargo/bin/rustc'\n",
+ "Traceback (most recent call last):\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/queueing.py\", line 759, in process_events\n",
+ " response = await route_utils.call_process_api(\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/route_utils.py\", line 354, in call_process_api\n",
+ " output = await app.get_blocks().process_api(\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/blocks.py\", line 2116, in process_api\n",
+ " result = await self.call_function(\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/blocks.py\", line 1623, in call_function\n",
+ " prediction = await anyio.to_thread.run_sync( # type: ignore\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/anyio/to_thread.py\", line 56, in run_sync\n",
+ " return await get_async_backend().run_sync_in_worker_thread(\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/anyio/_backends/_asyncio.py\", line 2485, in run_sync_in_worker_thread\n",
+ " return await future\n",
+ " ^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/anyio/_backends/_asyncio.py\", line 976, in run\n",
+ " result = context.run(func, *args)\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/utils.py\", line 915, in wrapper\n",
+ " response = f(*args, **kwargs)\n",
+ " ^^^^^^^^^^^^^^^^^^\n",
+ " File \"/var/folders/4b/hxjfyqbj3n37xcfpsg9k7mfr0000gn/T/ipykernel_14396/3180071184.py\", line 6, in compile_and_run\n",
+ " subprocess.run(compile_command, check=True, text=True, capture_output=True)\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/subprocess.py\", line 548, in run\n",
+ " with Popen(*popenargs, **kwargs) as process:\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/subprocess.py\", line 1026, in __init__\n",
+ " self._execute_child(args, executable, preexec_fn, close_fds,\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/subprocess.py\", line 1955, in _execute_child\n",
+ " raise child_exception_type(errno_num, err_msg, err_filename)\n",
+ "FileNotFoundError: [Errno 2] No such file or directory: '/Users/ed/.cargo/bin/rustc'\n",
+ "Traceback (most recent call last):\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/queueing.py\", line 759, in process_events\n",
+ " response = await route_utils.call_process_api(\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/route_utils.py\", line 354, in call_process_api\n",
+ " output = await app.get_blocks().process_api(\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/blocks.py\", line 2116, in process_api\n",
+ " result = await self.call_function(\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/blocks.py\", line 1623, in call_function\n",
+ " prediction = await anyio.to_thread.run_sync( # type: ignore\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/anyio/to_thread.py\", line 56, in run_sync\n",
+ " return await get_async_backend().run_sync_in_worker_thread(\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/anyio/_backends/_asyncio.py\", line 2485, in run_sync_in_worker_thread\n",
+ " return await future\n",
+ " ^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/anyio/_backends/_asyncio.py\", line 976, in run\n",
+ " result = context.run(func, *args)\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/utils.py\", line 915, in wrapper\n",
+ " response = f(*args, **kwargs)\n",
+ " ^^^^^^^^^^^^^^^^^^\n",
+ " File \"/var/folders/4b/hxjfyqbj3n37xcfpsg9k7mfr0000gn/T/ipykernel_14396/3180071184.py\", line 6, in compile_and_run\n",
+ " subprocess.run(compile_command, check=True, text=True, capture_output=True)\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/subprocess.py\", line 548, in run\n",
+ " with Popen(*popenargs, **kwargs) as process:\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/subprocess.py\", line 1026, in __init__\n",
+ " self._execute_child(args, executable, preexec_fn, close_fds,\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/subprocess.py\", line 1955, in _execute_child\n",
+ " raise child_exception_type(errno_num, err_msg, err_filename)\n",
+ "FileNotFoundError: [Errno 2] No such file or directory: '/Users/ed/.cargo/bin/rustc'\n",
+ "Traceback (most recent call last):\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/queueing.py\", line 759, in process_events\n",
+ " response = await route_utils.call_process_api(\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/route_utils.py\", line 354, in call_process_api\n",
+ " output = await app.get_blocks().process_api(\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/blocks.py\", line 2116, in process_api\n",
+ " result = await self.call_function(\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/blocks.py\", line 1623, in call_function\n",
+ " prediction = await anyio.to_thread.run_sync( # type: ignore\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/anyio/to_thread.py\", line 56, in run_sync\n",
+ " return await get_async_backend().run_sync_in_worker_thread(\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/anyio/_backends/_asyncio.py\", line 2485, in run_sync_in_worker_thread\n",
+ " return await future\n",
+ " ^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/anyio/_backends/_asyncio.py\", line 976, in run\n",
+ " result = context.run(func, *args)\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/utils.py\", line 915, in wrapper\n",
+ " response = f(*args, **kwargs)\n",
+ " ^^^^^^^^^^^^^^^^^^\n",
+ " File \"/var/folders/4b/hxjfyqbj3n37xcfpsg9k7mfr0000gn/T/ipykernel_14396/3180071184.py\", line 6, in compile_and_run\n",
+ " subprocess.run(compile_command, check=True, text=True, capture_output=True)\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/subprocess.py\", line 548, in run\n",
+ " with Popen(*popenargs, **kwargs) as process:\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/subprocess.py\", line 1026, in __init__\n",
+ " self._execute_child(args, executable, preexec_fn, close_fds,\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/subprocess.py\", line 1955, in _execute_child\n",
+ " raise child_exception_type(errno_num, err_msg, err_filename)\n",
+ "FileNotFoundError: [Errno 2] No such file or directory: '/Users/ed/.cargo/bin/rustc'\n",
+ "Traceback (most recent call last):\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/queueing.py\", line 759, in process_events\n",
+ " response = await route_utils.call_process_api(\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/route_utils.py\", line 354, in call_process_api\n",
+ " output = await app.get_blocks().process_api(\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/blocks.py\", line 2116, in process_api\n",
+ " result = await self.call_function(\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/blocks.py\", line 1623, in call_function\n",
+ " prediction = await anyio.to_thread.run_sync( # type: ignore\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/anyio/to_thread.py\", line 56, in run_sync\n",
+ " return await get_async_backend().run_sync_in_worker_thread(\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/anyio/_backends/_asyncio.py\", line 2485, in run_sync_in_worker_thread\n",
+ " return await future\n",
+ " ^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/anyio/_backends/_asyncio.py\", line 976, in run\n",
+ " result = context.run(func, *args)\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/utils.py\", line 915, in wrapper\n",
+ " response = f(*args, **kwargs)\n",
+ " ^^^^^^^^^^^^^^^^^^\n",
+ " File \"/var/folders/4b/hxjfyqbj3n37xcfpsg9k7mfr0000gn/T/ipykernel_14396/3180071184.py\", line 6, in compile_and_run\n",
+ " subprocess.run(compile_command, check=True, text=True, capture_output=True)\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/subprocess.py\", line 548, in run\n",
+ " with Popen(*popenargs, **kwargs) as process:\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/subprocess.py\", line 1026, in __init__\n",
+ " self._execute_child(args, executable, preexec_fn, close_fds,\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/subprocess.py\", line 1955, in _execute_child\n",
+ " raise child_exception_type(errno_num, err_msg, err_filename)\n",
+ "FileNotFoundError: [Errno 2] No such file or directory: '/Users/ed/.cargo/bin/rustc'\n",
+ "Traceback (most recent call last):\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/queueing.py\", line 759, in process_events\n",
+ " response = await route_utils.call_process_api(\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/route_utils.py\", line 354, in call_process_api\n",
+ " output = await app.get_blocks().process_api(\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/blocks.py\", line 2116, in process_api\n",
+ " result = await self.call_function(\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/blocks.py\", line 1623, in call_function\n",
+ " prediction = await anyio.to_thread.run_sync( # type: ignore\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/anyio/to_thread.py\", line 56, in run_sync\n",
+ " return await get_async_backend().run_sync_in_worker_thread(\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/anyio/_backends/_asyncio.py\", line 2485, in run_sync_in_worker_thread\n",
+ " return await future\n",
+ " ^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/anyio/_backends/_asyncio.py\", line 976, in run\n",
+ " result = context.run(func, *args)\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/utils.py\", line 915, in wrapper\n",
+ " response = f(*args, **kwargs)\n",
+ " ^^^^^^^^^^^^^^^^^^\n",
+ " File \"/var/folders/4b/hxjfyqbj3n37xcfpsg9k7mfr0000gn/T/ipykernel_14396/3180071184.py\", line 6, in compile_and_run\n",
+ " subprocess.run(compile_command, check=True, text=True, capture_output=True)\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/subprocess.py\", line 548, in run\n",
+ " with Popen(*popenargs, **kwargs) as process:\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/subprocess.py\", line 1026, in __init__\n",
+ " self._execute_child(args, executable, preexec_fn, close_fds,\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/subprocess.py\", line 1955, in _execute_child\n",
+ " raise child_exception_type(errno_num, err_msg, err_filename)\n",
+ "FileNotFoundError: [Errno 2] No such file or directory: '/Users/ed/.cargo/bin/rustc'\n",
+ "Traceback (most recent call last):\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/queueing.py\", line 759, in process_events\n",
+ " response = await route_utils.call_process_api(\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/route_utils.py\", line 354, in call_process_api\n",
+ " output = await app.get_blocks().process_api(\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/blocks.py\", line 2116, in process_api\n",
+ " result = await self.call_function(\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/blocks.py\", line 1623, in call_function\n",
+ " prediction = await anyio.to_thread.run_sync( # type: ignore\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/anyio/to_thread.py\", line 56, in run_sync\n",
+ " return await get_async_backend().run_sync_in_worker_thread(\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/anyio/_backends/_asyncio.py\", line 2485, in run_sync_in_worker_thread\n",
+ " return await future\n",
+ " ^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/anyio/_backends/_asyncio.py\", line 976, in run\n",
+ " result = context.run(func, *args)\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/site-packages/gradio/utils.py\", line 915, in wrapper\n",
+ " response = f(*args, **kwargs)\n",
+ " ^^^^^^^^^^^^^^^^^^\n",
+ " File \"/var/folders/4b/hxjfyqbj3n37xcfpsg9k7mfr0000gn/T/ipykernel_14396/3180071184.py\", line 6, in compile_and_run\n",
+ " subprocess.run(compile_command, check=True, text=True, capture_output=True)\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/subprocess.py\", line 548, in run\n",
+ " with Popen(*popenargs, **kwargs) as process:\n",
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/subprocess.py\", line 1026, in __init__\n",
+ " self._execute_child(args, executable, preexec_fn, close_fds,\n",
+ " File \"/opt/anaconda3/envs/llms/lib/python3.11/subprocess.py\", line 1955, in _execute_child\n",
+ " raise child_exception_type(errno_num, err_msg, err_filename)\n",
+ "FileNotFoundError: [Errno 2] No such file or directory: '/Users/ed/.cargo/bin/rustc'\n"
+ ]
+ }
+ ],
+ "source": [
+ "from styles import CSS\n",
+ "\n",
+ "with gr.Blocks(css=CSS, theme=gr.themes.Monochrome(), title=f\"Port from Python to {language}\") as ui:\n",
+ " with gr.Row(equal_height=True):\n",
+ " with gr.Column(scale=6):\n",
+ " python = gr.Code(\n",
+ " label=\"Python (original)\",\n",
+ " value=python_hard,\n",
+ " language=\"python\",\n",
+ " lines=26\n",
+ " )\n",
+ " with gr.Column(scale=6):\n",
+ " cpp = gr.Code(\n",
+ " label=f\"{language} (generated)\",\n",
+ " value=\"\",\n",
+ " language=\"cpp\",\n",
+ " lines=26\n",
+ " )\n",
+ "\n",
+ " with gr.Row(elem_classes=[\"controls\"]):\n",
+ " python_run = gr.Button(\"Run Python\", elem_classes=[\"run-btn\", \"py\"])\n",
+ " model = gr.Dropdown(models, value=models[0], show_label=False)\n",
+ " convert = gr.Button(f\"Port to {language}\", elem_classes=[\"convert-btn\"])\n",
+ " cpp_run = gr.Button(f\"Run {language}\", elem_classes=[\"run-btn\", \"cpp\"])\n",
+ "\n",
+ " with gr.Row(equal_height=True):\n",
+ " with gr.Column(scale=6):\n",
+ " python_out = gr.TextArea(label=\"Python result\", lines=8, elem_classes=[\"py-out\"])\n",
+ " with gr.Column(scale=6):\n",
+ " cpp_out = gr.TextArea(label=f\"{language} result\", lines=8, elem_classes=[\"cpp-out\"])\n",
+ "\n",
+ " convert.click(fn=port, inputs=[model, python], outputs=[cpp])\n",
+ " python_run.click(fn=run_python, inputs=[python], outputs=[python_out])\n",
+ " cpp_run.click(fn=compile_and_run, inputs=[cpp], outputs=[cpp_out])\n",
+ "\n",
+ "ui.launch(inbrowser=True)\n"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "2311ada8",
+ "metadata": {},
+ "source": [
+ "## RESULTS!\n",
+ "\n",
+ "Qwen 2.5 Coder: FAIL \n",
+ "Gemini 2.5 Pro: FAIL \n",
+ "DeepSeek Coder v2: FAIL \n",
+ "Qwen3 Coder 30B: FAIL \n",
+ "Claude Sonnet 4.5: FAIL \n",
+ "GPT-5: FAIL \n",
+ "\n",
+ "3rd place: GPT-oss-20B: 0.000341 \n",
+ "2nd place: Grok 4: 0.000317 \n",
+ "**1st place: OpenAI GPT-OSS 120B: 0.000304** "
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "b9b51dc7",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "print(f\"In Ed's experimenet, the GPT-OSS 120B model outcome is {33.755209/0.000304:,.0f} times faster than the Python code.\")"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "6197bb97",
+ "metadata": {},
+ "outputs": [],
+ "source": []
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3 (ipykernel)",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.11.14"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}
diff --git a/week4/community-contributions/bharat_puri/docstring_generator.ipynb b/week4/community-contributions/bharat_puri/docstring_generator.ipynb
new file mode 100644
index 0000000..7ab37f8
--- /dev/null
+++ b/week4/community-contributions/bharat_puri/docstring_generator.ipynb
@@ -0,0 +1,759 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "id": "4a6ab9a2-28a2-445d-8512-a0dc8d1b54e9",
+ "metadata": {},
+ "source": [
+ "# Code DocString / Comment Generator\n",
+ "\n",
+ "Submitted By : Bharat Puri\n",
+ "\n",
+ "Goal: Build a code tool that scans Python modules, finds functions/classes\n",
+ "without docstrings, and uses an LLM (Claude / GPT / Gemini / Qwen etc.)\n",
+ "to generate high-quality Google or NumPy style docstrings."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 20,
+ "id": "e610bf56-a46e-4aff-8de1-ab49d62b1ad3",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# imports\n",
+ "\n",
+ "import os\n",
+ "import io\n",
+ "import sys\n",
+ "import re\n",
+ "from dotenv import load_dotenv\n",
+ "import sys\n",
+ "sys.path.append(os.path.abspath(os.path.join(\"..\", \"..\"))) \n",
+ "from openai import OpenAI\n",
+ "import gradio as gr\n",
+ "import subprocess\n",
+ "from IPython.display import Markdown, display\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 2,
+ "id": "4f672e1c-87e9-4865-b760-370fa605e614",
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "OpenAI API Key exists and begins sk-proj-\n",
+ "Anthropic API Key not set (and this is optional)\n",
+ "Google API Key not set (and this is optional)\n",
+ "Grok API Key not set (and this is optional)\n",
+ "Groq API Key not set (and this is optional)\n",
+ "OpenRouter API Key not set (and this is optional)\n"
+ ]
+ }
+ ],
+ "source": [
+ "load_dotenv(override=True)\n",
+ "openai_api_key = os.getenv('OPENAI_API_KEY')\n",
+ "anthropic_api_key = os.getenv('ANTHROPIC_API_KEY')\n",
+ "google_api_key = os.getenv('GOOGLE_API_KEY')\n",
+ "grok_api_key = os.getenv('GROK_API_KEY')\n",
+ "groq_api_key = os.getenv('GROQ_API_KEY')\n",
+ "openrouter_api_key = os.getenv('OPENROUTER_API_KEY')\n",
+ "\n",
+ "if openai_api_key:\n",
+ " print(f\"OpenAI API Key exists and begins {openai_api_key[:8]}\")\n",
+ "else:\n",
+ " print(\"OpenAI API Key not set\")\n",
+ " \n",
+ "if anthropic_api_key:\n",
+ " print(f\"Anthropic API Key exists and begins {anthropic_api_key[:7]}\")\n",
+ "else:\n",
+ " print(\"Anthropic API Key not set (and this is optional)\")\n",
+ "\n",
+ "if google_api_key:\n",
+ " print(f\"Google API Key exists and begins {google_api_key[:2]}\")\n",
+ "else:\n",
+ " print(\"Google API Key not set (and this is optional)\")\n",
+ "\n",
+ "if grok_api_key:\n",
+ " print(f\"Grok API Key exists and begins {grok_api_key[:4]}\")\n",
+ "else:\n",
+ " print(\"Grok API Key not set (and this is optional)\")\n",
+ "\n",
+ "if groq_api_key:\n",
+ " print(f\"Groq API Key exists and begins {groq_api_key[:4]}\")\n",
+ "else:\n",
+ " print(\"Groq API Key not set (and this is optional)\")\n",
+ "\n",
+ "if openrouter_api_key:\n",
+ " print(f\"OpenRouter API Key exists and begins {openrouter_api_key[:6]}\")\n",
+ "else:\n",
+ " print(\"OpenRouter API Key not set (and this is optional)\")\n",
+ "\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 18,
+ "id": "59863df1",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Connect to client libraries\n",
+ "\n",
+ "openai = OpenAI()\n",
+ "\n",
+ "anthropic_url = \"https://api.anthropic.com/v1/\"\n",
+ "gemini_url = \"https://generativelanguage.googleapis.com/v1beta/openai/\"\n",
+ "grok_url = \"https://api.x.ai/v1\"\n",
+ "groq_url = \"https://api.groq.com/openai/v1\"\n",
+ "ollama_url = \"http://localhost:11434/v1\"\n",
+ "openrouter_url = \"https://openrouter.ai/api/v1\"\n",
+ "\n",
+ "anthropic = OpenAI(api_key=anthropic_api_key, base_url=anthropic_url)\n",
+ "gemini = OpenAI(api_key=google_api_key, base_url=gemini_url)\n",
+ "grok = OpenAI(api_key=grok_api_key, base_url=grok_url)\n",
+ "groq = OpenAI(api_key=groq_api_key, base_url=groq_url)\n",
+ "ollama = OpenAI(api_key=\"ollama\", base_url=ollama_url)\n",
+ "openrouter = OpenAI(api_key=openrouter_api_key, base_url=openrouter_url)\n",
+ "\n",
+ "MODEL = os.getenv(\"DOCGEN_MODEL\", \"gpt-4o-mini\")\n",
+ "\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 4,
+ "id": "8aa149ed-9298-4d69-8fe2-8f5de0f667da",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "models = [\"gpt-5\", \"claude-sonnet-4-5-20250929\", \"grok-4\", \"gemini-2.5-pro\", \"qwen2.5-coder\", \"deepseek-coder-v2\", \"gpt-oss:20b\", \"qwen/qwen3-coder-30b-a3b-instruct\", \"openai/gpt-oss-120b\", ]\n",
+ "\n",
+ "clients = {\"gpt-5\": openai, \"claude-sonnet-4-5-20250929\": anthropic, \"grok-4\": grok, \"gemini-2.5-pro\": gemini, \"openai/gpt-oss-120b\": groq, \"qwen2.5-coder\": ollama, \"deepseek-coder-v2\": ollama, \"gpt-oss:20b\": ollama, \"qwen/qwen3-coder-30b-a3b-instruct\": openrouter}\n",
+ "\n",
+ "# Want to keep costs ultra-low? Replace this with models of your choice, using the examples from yesterday"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 8,
+ "id": "17b7d074-b1a4-4673-adec-918f82a4eff0",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# ================================================================\n",
+ "# Prompt Templates and Utilities\n",
+ "# ================================================================\n",
+ "\n",
+ "DOCSTYLE_TEMPLATES = {\n",
+ " \"google\": (\n",
+ " \"You will write a concise Google-style Python docstring for the given function or class.\\n\"\n",
+ " \"Rules:\\n\"\n",
+ " \"- One-line summary followed by short details.\\n\"\n",
+ " \"- Include Args:, Returns:, Raises: only if relevant.\\n\"\n",
+ " \"- Keep under 12 lines, no code fences or markdown formatting.\\n\"\n",
+ " \"Return ONLY the text between triple quotes.\"\n",
+ " ),\n",
+ "}\n",
+ "\n",
+ "SYSTEM_PROMPT = (\n",
+ " \"You are a senior Python engineer and technical writer. \"\n",
+ " \"Write precise, helpful docstrings.\"\n",
+ ")\n",
+ "\n",
+ "\n",
+ "def make_user_prompt(style: str, module_name: str, signature: str, code_context: str) -> str:\n",
+ " \"\"\"Build the user message for the model based on template and context.\"\"\"\n",
+ " instr = DOCSTYLE_TEMPLATES.get(style, DOCSTYLE_TEMPLATES[\"google\"])\n",
+ " prompt = (\n",
+ " f\"{instr}\\n\\n\"\n",
+ " f\"Module: {module_name}\\n\"\n",
+ " f\"Signature:\\n{signature}\\n\\n\"\n",
+ " f\"Code context:\\n{code_context}\\n\\n\"\n",
+ " \"Return ONLY a triple-quoted docstring, for example:\\n\"\n",
+ " '\"\"\"One-line summary.\\n\\n'\n",
+ " \"Args:\\n\"\n",
+ " \" x: Description\\n\"\n",
+ " \"Returns:\\n\"\n",
+ " \" y: Description\\n\"\n",
+ " '\"\"\"'\n",
+ " )\n",
+ " return prompt\n",
+ "\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 15,
+ "id": "16b3c10f-f7bc-4a2f-a22f-65c6807b7574",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# ================================================================\n",
+ "# LLM Chat Helper β OpenAI GPT\n",
+ "# ================================================================\n",
+ "\n",
+ "def llm_generate_docstring(signature: str, context: str, style: str = \"google\", module_name: str = \"module\") -> str:\n",
+ " \"\"\"\n",
+ " Sends a chat completion request to OpenAI GPT model to generate\n",
+ " a docstring based on code context and function signature.\n",
+ " \"\"\"\n",
+ " user_prompt = make_user_prompt(style, module_name, signature, context)\n",
+ "\n",
+ " response = openai.chat.completions.create(\n",
+ " model=MODEL,\n",
+ " temperature=0.2,\n",
+ " messages=[\n",
+ " {\"role\": \"system\", \"content\": \"You are a senior Python engineer and technical writer.\"},\n",
+ " {\"role\": \"user\", \"content\": user_prompt}\n",
+ " ]\n",
+ " )\n",
+ "\n",
+ " text = response.choices[0].message.content.strip()\n",
+ " # Extract only the text inside triple quotes if present\n",
+ " match = re.search(r'\"\"\"(.*?)\"\"\"', text, re.S)\n",
+ " return (match.group(1).strip() if match else text)\n",
+ "\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 11,
+ "id": "82da91ac-e563-4425-8b45-1b94880d342f",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# ================================================================\n",
+ "# π§± AST Parsing Utilities β find missing docstrings\n",
+ "# ================================================================\n",
+ "import ast\n",
+ "\n",
+ "def node_signature(node: ast.AST) -> str:\n",
+ " \"\"\"\n",
+ " Build a readable signature string from a FunctionDef or ClassDef node.\n",
+ " Example: def add(x, y) -> int:\n",
+ " \"\"\"\n",
+ " if isinstance(node, (ast.FunctionDef, ast.AsyncFunctionDef)):\n",
+ " args = [a.arg for a in node.args.args]\n",
+ " if node.args.vararg:\n",
+ " args.append(\"*\" + node.args.vararg.arg)\n",
+ " for a in node.args.kwonlyargs:\n",
+ " args.append(a.arg + \"=?\")\n",
+ " if node.args.kwarg:\n",
+ " args.append(\"**\" + node.args.kwarg.arg)\n",
+ " ret = \"\"\n",
+ " if getattr(node, \"returns\", None):\n",
+ " try:\n",
+ " ret = f\" -> {ast.unparse(node.returns)}\"\n",
+ " except Exception:\n",
+ " pass\n",
+ " return f\"def {node.name}({', '.join(args)}){ret}:\"\n",
+ "\n",
+ " elif isinstance(node, ast.ClassDef):\n",
+ " return f\"class {node.name}:\"\n",
+ "\n",
+ " return \"\"\n",
+ "\n",
+ "\n",
+ "def context_snippet(src: str, node: ast.AST, max_lines: int = 60) -> str:\n",
+ " \"\"\"\n",
+ " Extract a small snippet of source code around a node for context.\n",
+ " This helps the LLM understand what the function/class does.\n",
+ " \"\"\"\n",
+ " lines = src.splitlines()\n",
+ " start = getattr(node, \"lineno\", 1) - 1\n",
+ " end = getattr(node, \"end_lineno\", start + 1)\n",
+ " snippet = lines[start:end]\n",
+ " if len(snippet) > max_lines:\n",
+ " snippet = snippet[:max_lines] + [\"# ... (truncated) ...\"]\n",
+ " return \"\\n\".join(snippet)\n",
+ "\n",
+ "\n",
+ "def find_missing_docstrings(src: str):\n",
+ " \"\"\"\n",
+ " Parse the Python source code and return a list of nodes\n",
+ " (module, class, function) that do NOT have docstrings.\n",
+ " \"\"\"\n",
+ " tree = ast.parse(src)\n",
+ " missing = []\n",
+ "\n",
+ " # Module-level docstring check\n",
+ " if ast.get_docstring(tree) is None:\n",
+ " missing.append((\"module\", tree))\n",
+ "\n",
+ " # Walk through the AST for classes and functions\n",
+ " for node in ast.walk(tree):\n",
+ " if isinstance(node, (ast.ClassDef, ast.FunctionDef, ast.AsyncFunctionDef)):\n",
+ " if ast.get_docstring(node) is None:\n",
+ " kind = \"class\" if isinstance(node, ast.ClassDef) else \"function\"\n",
+ " missing.append((kind, node))\n",
+ "\n",
+ " return missing\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 12,
+ "id": "ea69108f-e4ca-4326-89fe-97c5748c0e79",
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Missing docstring β module: \n",
+ "Missing docstring β function: def add(x, y):\n",
+ "Missing docstring β class: class Counter:\n",
+ "Missing docstring β function: def inc(self):\n"
+ ]
+ }
+ ],
+ "source": [
+ "## Quick Test ##\n",
+ "\n",
+ "code = '''\n",
+ "def add(x, y):\n",
+ " return x + y\n",
+ "\n",
+ "class Counter:\n",
+ " def inc(self):\n",
+ " self.total += 1\n",
+ "'''\n",
+ "\n",
+ "for kind, node in find_missing_docstrings(code):\n",
+ " print(f\"Missing docstring β {kind}: {node_signature(node)}\")\n",
+ "\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 16,
+ "id": "00d65b96-e65d-4e11-89be-06f265a5f2e3",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# ================================================================\n",
+ "# Insert Generated Docstrings into Code\n",
+ "# ================================================================\n",
+ "import difflib\n",
+ "import textwrap\n",
+ "\n",
+ "def insert_docstring(src: str, node: ast.AST, docstring: str) -> str:\n",
+ " \"\"\"\n",
+ " Insert a generated docstring inside a function/class node.\n",
+ " Keeps indentation consistent with the original code.\n",
+ " \"\"\"\n",
+ " lines = src.splitlines()\n",
+ " if not hasattr(node, \"body\") or not node.body:\n",
+ " return src # nothing to insert into\n",
+ "\n",
+ " start_idx = node.body[0].lineno - 1\n",
+ " indent = re.match(r\"\\s*\", lines[start_idx]).group(0)\n",
+ " ds_lines = textwrap.indent(f'\"\"\"{docstring.strip()}\"\"\"', indent).splitlines()\n",
+ "\n",
+ " new_lines = lines[:start_idx] + ds_lines + [\"\"] + lines[start_idx:]\n",
+ " return \"\\n\".join(new_lines)\n",
+ "\n",
+ "\n",
+ "def insert_module_docstring(src: str, docstring: str) -> str:\n",
+ " \"\"\"Insert a module-level docstring at the top of the file.\"\"\"\n",
+ " lines = src.splitlines()\n",
+ " ds_block = f'\"\"\"{docstring.strip()}\"\"\"\\n'\n",
+ " return ds_block + \"\\n\".join(lines)\n",
+ "\n",
+ "\n",
+ "def diff_text(a: str, b: str) -> str:\n",
+ " \"\"\"Show unified diff of original vs updated code.\"\"\"\n",
+ " return \"\".join(\n",
+ " difflib.unified_diff(\n",
+ " a.splitlines(keepends=True),\n",
+ " b.splitlines(keepends=True),\n",
+ " fromfile=\"original.py\",\n",
+ " tofile=\"updated.py\",\n",
+ " )\n",
+ " )\n",
+ "\n",
+ "\n",
+ "def generate_docstrings_for_source(src: str, style: str = \"google\", module_name: str = \"module\"):\n",
+ " \"\"\"\n",
+ " Find all missing docstrings, generate them via LLM,\n",
+ " and insert them back into the source code.\n",
+ " \"\"\"\n",
+ " targets = find_missing_docstrings(src)\n",
+ " updated = src\n",
+ " report = []\n",
+ "\n",
+ " for kind, node in sorted(targets, key=lambda t: 0 if t[0] == \"module\" else 1):\n",
+ " sig = \"module \" + module_name if kind == \"module\" else node_signature(node)\n",
+ " ctx = src if kind == \"module\" else context_snippet(src, node)\n",
+ " doc = llm_generate_docstring(sig, ctx, style=style, module_name=module_name)\n",
+ "\n",
+ " if kind == \"module\":\n",
+ " updated = insert_module_docstring(updated, doc)\n",
+ " else:\n",
+ " updated = insert_docstring(updated, node, doc)\n",
+ "\n",
+ " report.append({\"kind\": kind, \"signature\": sig, \"doc_preview\": doc[:150]})\n",
+ "\n",
+ " return updated, report\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 21,
+ "id": "d00cf4b7-773d-49cb-8262-9d11d787ee10",
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "=== Generated Docstrings ===\n",
+ "- module: module demo\n",
+ " Adds two numbers and returns the result.\n",
+ "\n",
+ "Args:\n",
+ " x: The first number to add.\n",
+ " y: The second number to add.\n",
+ "Returns:\n",
+ " The sum of x and y.\n",
+ "- function: def add(x, y):\n",
+ " Returns the sum of two numbers.\n",
+ "\n",
+ "This function takes two numerical inputs and returns their sum. \n",
+ "It supports both integers and floats.\n",
+ "\n",
+ "Args:\n",
+ " x: \n",
+ "- class: class Counter:\n",
+ " A simple counter class to track increments.\n",
+ "\n",
+ "This class provides a method to increment a total count. \n",
+ "It initializes the total count to zero and allo\n",
+ "- function: def inc(self):\n",
+ " Increments the total attribute by one.\n",
+ "\n",
+ "This method updates the instance's total value, which is expected to be an integer, by adding one to it. It is\n",
+ "\n",
+ "=== Updated Source ===\n",
+ "\"\"\"Adds two numbers and returns the result.\n",
+ "\n",
+ "\"\"\"Returns the sum of two numbers.\n",
+ "\n",
+ "This function takes two numerical inputs and returns their sum. \n",
+ "\"\"\"A simple counter class to track increments.\n",
+ "\"\"\"Increments the total attribute by one.\n",
+ "\n",
+ "This method updates the instance's total value, which is expected to be an integer, by adding one to it. It is typically used to track counts or totals within the class context.\"\"\"\n",
+ "\n",
+ "\n",
+ "This class provides a method to increment a total count. \n",
+ "It initializes the total count to zero and allows for \n",
+ "incrementing it by one each time the `inc` method is called.\n",
+ "\n",
+ "Args:\n",
+ " None\n",
+ "Returns:\n",
+ " None\"\"\"\n",
+ "\n",
+ "It supports both integers and floats.\n",
+ "\n",
+ "Args:\n",
+ " x: The first number to add.\n",
+ " y: The second number to add.\n",
+ "\n",
+ "Returns:\n",
+ " The sum of x and y.\"\"\"\n",
+ "\n",
+ "Args:\n",
+ " x: The first number to add.\n",
+ " y: The second number to add.\n",
+ "Returns:\n",
+ " The sum of x and y.\"\"\"\n",
+ "\n",
+ "def add(x, y):\n",
+ " return x + y\n",
+ "\n",
+ "class Counter:\n",
+ " def inc(self):\n",
+ " self.total += 1\n"
+ ]
+ }
+ ],
+ "source": [
+ "## Quick Test ##\n",
+ "new_code, report = generate_docstrings_for_source(code, style=\"google\", module_name=\"demo\")\n",
+ "\n",
+ "print(\"=== Generated Docstrings ===\")\n",
+ "for r in report:\n",
+ " print(f\"- {r['kind']}: {r['signature']}\")\n",
+ " print(\" \", r['doc_preview'])\n",
+ "print(\"\\n=== Updated Source ===\")\n",
+ "print(new_code)\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 21,
+ "id": "b318db41-c05d-48ce-9990-b6f1a0577c68",
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "=== Generated Docstrings ===\n",
+ "- module: module demo\n",
+ " Adds two numbers and returns the result.\n",
+ "\n",
+ "Args:\n",
+ " x: The first number to add.\n",
+ " y: The second number to add.\n",
+ "Returns:\n",
+ " The sum of x and y.\n",
+ "- function: def add(x, y):\n",
+ " Returns the sum of two numbers.\n",
+ "\n",
+ "This function takes two numerical inputs and returns their sum. \n",
+ "It supports both integers and floats.\n",
+ "\n",
+ "Args:\n",
+ " x: \n",
+ "- class: class Counter:\n",
+ " A simple counter class to track increments.\n",
+ "\n",
+ "This class provides a method to increment a total count. \n",
+ "It initializes the total count to zero and allo\n",
+ "- function: def inc(self):\n",
+ " Increments the total attribute by one.\n",
+ "\n",
+ "This method updates the instance's total value, which is expected to be an integer, by adding one to it. It is\n",
+ "\n",
+ "=== Updated Source ===\n",
+ "\"\"\"Adds two numbers and returns the result.\n",
+ "\n",
+ "\"\"\"Returns the sum of two numbers.\n",
+ "\n",
+ "This function takes two numerical inputs and returns their sum. \n",
+ "\"\"\"A simple counter class to track increments.\n",
+ "\"\"\"Increments the total attribute by one.\n",
+ "\n",
+ "This method updates the instance's total value, which is expected to be an integer, by adding one to it. It is typically used to track counts or totals within the class context.\"\"\"\n",
+ "\n",
+ "\n",
+ "This class provides a method to increment a total count. \n",
+ "It initializes the total count to zero and allows for \n",
+ "incrementing it by one each time the `inc` method is called.\n",
+ "\n",
+ "Args:\n",
+ " None\n",
+ "Returns:\n",
+ " None\"\"\"\n",
+ "\n",
+ "It supports both integers and floats.\n",
+ "\n",
+ "Args:\n",
+ " x: The first number to add.\n",
+ " y: The second number to add.\n",
+ "\n",
+ "Returns:\n",
+ " The sum of x and y.\"\"\"\n",
+ "\n",
+ "Args:\n",
+ " x: The first number to add.\n",
+ " y: The second number to add.\n",
+ "Returns:\n",
+ " The sum of x and y.\"\"\"\n",
+ "\n",
+ "def add(x, y):\n",
+ " return x + y\n",
+ "\n",
+ "class Counter:\n",
+ " def inc(self):\n",
+ " self.total += 1\n"
+ ]
+ }
+ ],
+ "source": [
+ "# ================================================================\n",
+ "# π File-Based Workflow β preview or apply docstrings\n",
+ "# ================================================================\n",
+ "from pathlib import Path\n",
+ "import pandas as pd\n",
+ "\n",
+ "def process_file(path: str, style: str = \"google\", apply: bool = False) -> pd.DataFrame:\n",
+ " \"\"\"\n",
+ " Process a .py file: find missing docstrings, generate them via GPT,\n",
+ " and either preview the diff or apply the updates in place.\n",
+ " \"\"\"\n",
+ " p = Path(path)\n",
+ " src = p.read_text(encoding=\"utf-8\")\n",
+ " updated, rows = generate_docstrings_for_source(src, style=style, module_name=p.stem)\n",
+ "\n",
+ " if apply:\n",
+ " p.write_text(updated, encoding=\"utf-8\")\n",
+ " print(f\"β
Updated file written β {p}\")\n",
+ " else:\n",
+ " print(\"π Diff preview:\")\n",
+ " print(diff_text(src, updated))\n",
+ "\n",
+ " return pd.DataFrame(rows)\n",
+ "\n",
+ "# Example usage:\n",
+ "# df = process_file(\"my_script.py\", style=\"google\", apply=False) # preview\n",
+ "# df = process_file(\"my_script.py\", style=\"google\", apply=True) # overwrite with docstrings\n",
+ "# df\n",
+ "\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 22,
+ "id": "8962cf0e-9255-475e-bbc1-21500be0cd78",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# ================================================================\n",
+ "# π File-Based Workflow β preview or apply docstrings\n",
+ "# ================================================================\n",
+ "from pathlib import Path\n",
+ "import pandas as pd\n",
+ "\n",
+ "def process_file(path: str, style: str = \"google\", apply: bool = False) -> pd.DataFrame:\n",
+ " \"\"\"\n",
+ " Process a .py file: find missing docstrings, generate them via GPT,\n",
+ " and either preview the diff or apply the updates in place.\n",
+ " \"\"\"\n",
+ " p = Path(path)\n",
+ " src = p.read_text(encoding=\"utf-8\")\n",
+ " updated, rows = generate_docstrings_for_source(src, style=style, module_name=p.stem)\n",
+ "\n",
+ " if apply:\n",
+ " p.write_text(updated, encoding=\"utf-8\")\n",
+ " print(f\"β
Updated file written β {p}\")\n",
+ " else:\n",
+ " print(\"π Diff preview:\")\n",
+ " print(diff_text(src, updated))\n",
+ "\n",
+ " return pd.DataFrame(rows)\n",
+ "\n",
+ "# Example usage:\n",
+ "# df = process_file(\"my_script.py\", style=\"google\", apply=False) # preview\n",
+ "# df = process_file(\"my_script.py\", style=\"google\", apply=True) # overwrite with docstrings\n",
+ "# df\n",
+ "\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 24,
+ "id": "b0b0f852-982f-4918-9b5d-89880cc12003",
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "* Running on local URL: http://127.0.0.1:7864\n",
+ "* To create a public link, set `share=True` in `launch()`.\n"
+ ]
+ },
+ {
+ "data": {
+ "text/html": [
+ ""
+ ],
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ },
+ {
+ "data": {
+ "text/plain": []
+ },
+ "execution_count": 24,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "# ================================================================\n",
+ "# π¨ Gradio Interface β Auto Docstring Generator\n",
+ "# ================================================================\n",
+ "import gradio as gr\n",
+ "\n",
+ "def gradio_generate(code_text: str, style: str):\n",
+ " \"\"\"Wrapper for Gradio β generates docstrings for pasted code.\"\"\"\n",
+ " if not code_text.strip():\n",
+ " return \"β οΈ Please paste some Python code first.\"\n",
+ " try:\n",
+ " updated, _ = generate_docstrings_for_source(\n",
+ " code_text, style=style, module_name=\"gradio_snippet\"\n",
+ " )\n",
+ " return updated\n",
+ " except Exception as e:\n",
+ " return f\"β Error: {e}\"\n",
+ "\n",
+ "# Build Gradio UI\n",
+ "with gr.Blocks(theme=gr.themes.Soft()) as doc_ui:\n",
+ " gr.Markdown(\"## π§ Auto Docstring Generator β by Bharat Puri\\nPaste your Python code below and click **Generate Docstrings**.\")\n",
+ "\n",
+ " with gr.Row():\n",
+ " code_input = gr.Code(\n",
+ " label=\"Paste your Python code here\",\n",
+ " language=\"python\",\n",
+ " lines=20,\n",
+ " value=\"def add(a, b):\\n return a + b\\n\\nclass Counter:\\n def inc(self):\\n self.total += 1\",\n",
+ " )\n",
+ " code_output = gr.Code(\n",
+ " label=\"Generated code with docstrings\",\n",
+ " language=\"python\",\n",
+ " lines=20,\n",
+ " )\n",
+ "\n",
+ " style_choice = gr.Radio(\n",
+ " [\"google\"], value=\"google\", label=\"Docstring Style\"\n",
+ " )\n",
+ "\n",
+ " generate_btn = gr.Button(\"π Generate Docstrings\")\n",
+ " generate_btn.click(\n",
+ " fn=gradio_generate,\n",
+ " inputs=[code_input, style_choice],\n",
+ " outputs=[code_output],\n",
+ " )\n",
+ "\n",
+ "# Launch app\n",
+ "doc_ui.launch(share=False)\n"
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3 (ipykernel)",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.11.14"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}
diff --git a/week4/community-contributions/bharat_puri/main.rs b/week4/community-contributions/bharat_puri/main.rs
new file mode 100644
index 0000000..2e98235
--- /dev/null
+++ b/week4/community-contributions/bharat_puri/main.rs
@@ -0,0 +1,72 @@
+use std::time::Instant;
+
+#[inline(always)]
+fn lcg_next(state: &mut u32) -> u32 {
+ const A: u32 = 1664525;
+ const C: u32 = 1013904223;
+ *state = state.wrapping_mul(A).wrapping_add(C);
+ *state
+}
+
+#[inline(always)]
+fn max_subarray_sum(n: usize, seed: u32, min_val: i128, max_val: i128) -> i128 {
+ let mut state = seed;
+ let range_len_i128 = max_val - min_val + 1;
+ // Assume valid inputs where max_val >= min_val
+ let range_len_u128 = range_len_i128 as u128;
+
+ // Kadane's algorithm in a single pass, streaming values from LCG
+ let mut max_so_far: i128;
+ let mut current_max: i128;
+
+ // First element initializes Kadane's state
+ let v0 = lcg_next(&mut state) as u128;
+ let x0 = (v0 % range_len_u128) as i128 + min_val;
+ current_max = x0;
+ max_so_far = x0;
+
+ // Remaining elements
+ let mut i = 1usize;
+ while i < n {
+ let v = lcg_next(&mut state) as u128;
+ let x = (v % range_len_u128) as i128 + min_val;
+ let sum = current_max + x;
+ current_max = if sum > x { sum } else { x };
+ if current_max > max_so_far {
+ max_so_far = current_max;
+ }
+ i += 1;
+ }
+
+ max_so_far
+}
+
+#[inline(always)]
+fn total_max_subarray_sum(n: usize, initial_seed: u32, min_val: i128, max_val: i128) -> i128 {
+ let mut total_sum: i128 = 0;
+ let mut seed_state = initial_seed;
+ let mut i = 0;
+ while i < 20 {
+ let seed = lcg_next(&mut seed_state);
+ total_sum += max_subarray_sum(n, seed, min_val, max_val);
+ i += 1;
+ }
+ total_sum
+}
+
+fn main() {
+ // Parameters
+ let n: usize = 10000;
+ let initial_seed: u32 = 42;
+ let min_val: i128 = -10;
+ let max_val: i128 = 10;
+
+ // Timing the function
+ let start_time = Instant::now();
+ let result = total_max_subarray_sum(n, initial_seed, min_val, max_val);
+ let duration = start_time.elapsed();
+ let seconds = duration.as_secs_f64();
+
+ println!("Total Maximum Subarray Sum (20 runs): {}", result);
+ println!("Execution Time: {:.6} seconds", seconds);
+}
\ No newline at end of file