diff --git a/week4/community-contributions/max.solo23/convert_python_to_c++.ipynb b/week4/community-contributions/max.solo23/convert_python_to_c++.ipynb new file mode 100644 index 0000000..390f446 --- /dev/null +++ b/week4/community-contributions/max.solo23/convert_python_to_c++.ipynb @@ -0,0 +1,870 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "id": "a389285f-5e8e-46ec-bcae-9b159ef7aa80", + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "import io\n", + "\n", + "import sys\n", + "from dotenv import load_dotenv\n", + "from openai import OpenAI\n", + "import google.generativeai\n", + "import anthropic\n", + "from IPython.display import Markdown, display, update_display\n", + "import gradio as gr\n", + "import subprocess" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "850164b8-5bab-402f-9e80-d251930d9017", + "metadata": {}, + "outputs": [], + "source": [ + "load_dotenv()\n", + "os.environ['OPENAI_API_KEY'] = os.getenv('OPENAI_API_KEY', 'your-key-if-not-using-env')\n", + "os.environ['ANTHROPIC_API_KEY'] = os.getenv('ANTHROPIC_API_KEY', 'your-key-if-not-using-env')" + ] + }, + { + "cell_type": "code", + "execution_count": 56, + "id": "87650c13-e987-42a2-9089-23827bc81ffb", + "metadata": {}, + "outputs": [], + "source": [ + "openai = OpenAI()\n", + "claude = anthropic.Anthropic()\n", + "OPENAI_MODEL = \"gpt-5-nano\"\n", + "CLAUDE_MODEL = \"claude-3-5-sonnet-20240620\" # CLAUDE_MODEL = \"claude-3-haiku-20240307\"" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "9d359c39-4eab-478c-a208-08a38b799093", + "metadata": {}, + "outputs": [], + "source": [ + "system_message = \"You are an assistant that reimplements Python code in high performance C++ for an windows 10. \"\n", + "system_message += \"Respond only with C++ code; use comments sparingly and do not provide any explanation other than occasional comments. \"\n", + "system_message += \"The C++ response needs to produce an identical output in the fastest possible time.\"" + ] + }, + { + "cell_type": "code", + "execution_count": 27, + "id": "1d8da092-6c5c-4c42-aed9-dc219fdd97bb", + "metadata": {}, + "outputs": [], + "source": [ + "def user_prompt_for(python):\n", + " user_prompt = \"Rewrite this Python code in C++ with the fastest possible implementation that produces identical output in the least time. Add a pause in the end of the code so it waits button press after execution. \"\n", + " user_prompt += \"Respond only with C++ code; do not explain your work other than a few comments. \"\n", + " user_prompt += \"Pay attention to number types to ensure no int overflows. Remember to #include all necessary C++ packages such as iomanip.\\n\\n\"\n", + " user_prompt += python\n", + " return user_prompt" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "be49c7dc-5c82-468e-b41e-7fca27fbb2e0", + "metadata": {}, + "outputs": [], + "source": [ + "def messages_for(python):\n", + " return [\n", + " {\"role\": \"system\", \"content\": system_message},\n", + " {\"role\": \"user\", \"content\": user_prompt_for(python)}\n", + " ]" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "afeded80-a4a0-4349-89c3-2e9370730e92", + "metadata": {}, + "outputs": [], + "source": [ + "def write_output(cpp):\n", + " code = cpp.replace(\"```cpp\",\"\").replace(\"```\",\"\")\n", + " with open(\"optimized.cpp\", \"w\") as f:\n", + " f.write(code)" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "6a53a889-9146-4049-850d-9b44d7245b8b", + "metadata": {}, + "outputs": [], + "source": [ + "def optimize_gpt(python): \n", + " stream = openai.chat.completions.create(model=OPENAI_MODEL, messages=messages_for(python), stream=True)\n", + " reply = \"\"\n", + " for chunk in stream:\n", + " fragment = chunk.choices[0].delta.content or \"\"\n", + " reply += fragment\n", + " print(fragment, end='', flush=True)\n", + " write_output(reply)" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "id": "3d31f2e4-2bcb-4d9a-bb73-6979784c9234", + "metadata": {}, + "outputs": [], + "source": [ + "def optimize_claude(python):\n", + " result = claude.messages.stream(\n", + " model=CLAUDE_MODEL,\n", + " max_tokens=2000,\n", + " system=system_message,\n", + " messages=[{\"role\": \"user\", \"content\": user_prompt_for(python)}],\n", + " )\n", + " reply = \"\"\n", + " with result as stream:\n", + " for text in stream.text_stream:\n", + " reply += text\n", + " print(text, end=\"\", flush=True)\n", + " write_output(reply)" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "id": "bff0c7f1-6402-46a7-940d-5233e93d1089", + "metadata": {}, + "outputs": [], + "source": [ + "pi = \"\"\"\n", + "import time\n", + "\n", + "def calculate(iterations, param1, param2):\n", + " result = 1.0\n", + " for i in range(1, iterations+1):\n", + " j = i * param1 - param2\n", + " result -= (1/j)\n", + " j = i * param1 + param2\n", + " result += (1/j)\n", + " return result\n", + "\n", + "start_time = time.time()\n", + "result = calculate(100_000_000, 4, 1) * 4\n", + "end_time = time.time()\n", + "\n", + "print(f\"Result: {result:.12f}\")\n", + "print(f\"Execution Time: {(end_time - start_time):.6f} seconds\")\n", + "\"\"\"" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "id": "b878f33e-9008-496d-b8f6-f844c22e6a04", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Result: 3.141592658589\n", + "Execution Time: 9.382045 seconds\n" + ] + } + ], + "source": [ + "exec(pi)" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "id": "af8bc910-9136-4305-a1a8-a47fa0566505", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "#include \n", + "#include \n", + "#include \n", + "\n", + "int main() {\n", + " const long long iterations = 100000000LL;\n", + " const double param1 = 4.0;\n", + " const double param2 = 1.0;\n", + "\n", + " double result = 1.0;\n", + "\n", + " auto start = std::chrono::high_resolution_clock::now();\n", + " for (long long i = 1; i <= iterations; ++i) {\n", + " double j = i * param1 - param2;\n", + " result -= 1.0 / j;\n", + " j = i * param1 + param2;\n", + " result += 1.0 / j;\n", + " }\n", + " auto end = std::chrono::high_resolution_clock::now();\n", + "\n", + " double final_result = result * 4.0;\n", + "\n", + " std::cout.setf(std::ios::fixed);\n", + " std::cout << std::setprecision(12);\n", + " std::cout << \"Result: \" << final_result << \"\\n\";\n", + "\n", + " std::chrono::duration elapsed = end - start;\n", + " std::cout << std::setprecision(6);\n", + " std::cout << \"Execution Time: \" << elapsed.count() << \" seconds\" << std::endl;\n", + "\n", + " return 0;\n", + "}" + ] + } + ], + "source": [ + "optimize_gpt(pi)" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "id": "8d37469c-ab26-452f-8efb-e1b65f842f90", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Result: 3.141592658589\n", + "Execution Time: 9.346793 seconds\n" + ] + } + ], + "source": [ + "exec(pi)" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "id": "1164d9cc-7ad8-4e5c-98bb-a186ad23f4d7", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\".\" non e riconosciuto come comando interno o esterno,\n", + " un programma eseguibile o un file batch.\n" + ] + } + ], + "source": [ + "!g++ -O2 -std=c++17 optimized.cpp -o optimized\n", + "!./optimized" + ] + }, + { + "cell_type": "code", + "execution_count": 30, + "id": "d4b501d0-3707-4c07-95c7-7f6fe389859c", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "```cpp\n", + " \n", + "omanip>e lude \n", + "\n", + " calculate(long long int iterations, double param1, double param2) {\n", + " result = 1.0;\n", + " long int i = 1; i <= iterations; i++) {\n", + "double j = i * param1 - param2;\n", + " (1.0 / j);ult -=\n", + " * param1 + param2;\n", + " (1.0 / j);ult +=\n", + " }\n", + " return result;\n", + "}\n", + "\n", + " main() {\n", + " start_time = std::chrono::high_resolution_clock::now();\n", + " result = calculate(100000000, 4, 1) * 4;\n", + "d_time = std::chrono::high_resolution_clock::now();\n", + "\n", + " << std::fixed << std::setprecision(12) << \"Result: \" << result << std::endl;\n", + "cout << \"Execution Time: \" << std::chrono::duration_cast>(end_time - start_time).count() << \" seconds\" << std::endl;\n", + "\n", + "d::cout << \"Press any key to exit...\" << std::endl;\n", + "_getch();\n", + "; return 0\n", + "}\n", + "```" + ] + } + ], + "source": [ + "optimize_claude(pi)" + ] + }, + { + "cell_type": "code", + "execution_count": 31, + "id": "c81bf6fc-27b7-4f42-a2c9-42eff17b6e41", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\".\" non e riconosciuto come comando interno o esterno,\n", + " un programma eseguibile o un file batch.\n" + ] + } + ], + "source": [ + "!g++ -O2 -std=c++17 optimized.cpp -o optimized\n", + "!./optimized\n", + " " + ] + }, + { + "cell_type": "code", + "execution_count": 32, + "id": "a5e8e6f5-50d4-4c37-8a10-dd07fbd24089", + "metadata": {}, + "outputs": [], + "source": [ + "python_hard = \"\"\"# Be careful to support large number sizes\n", + "\n", + "def lcg(seed, a=1664525, c=1013904223, m=2**32):\n", + " value = seed\n", + " while True:\n", + " value = (a * value + c) % m\n", + " yield value\n", + " \n", + "def max_subarray_sum(n, seed, min_val, max_val):\n", + " lcg_gen = lcg(seed)\n", + " random_numbers = [next(lcg_gen) % (max_val - min_val + 1) + min_val for _ in range(n)]\n", + " max_sum = float('-inf')\n", + " for i in range(n):\n", + " current_sum = 0\n", + " for j in range(i, n):\n", + " current_sum += random_numbers[j]\n", + " if current_sum > max_sum:\n", + " max_sum = current_sum\n", + " return max_sum\n", + "\n", + "def total_max_subarray_sum(n, initial_seed, min_val, max_val):\n", + " total_sum = 0\n", + " lcg_gen = lcg(initial_seed)\n", + " for _ in range(20):\n", + " seed = next(lcg_gen)\n", + " total_sum += max_subarray_sum(n, seed, min_val, max_val)\n", + " return total_sum\n", + "\n", + "# Parameters\n", + "n = 10000 # Number of random numbers\n", + "initial_seed = 42 # Initial seed for the LCG\n", + "min_val = -10 # Minimum value of random numbers\n", + "max_val = 10 # Maximum value of random numbers\n", + "\n", + "# Timing the function\n", + "import time\n", + "start_time = time.time()\n", + "result = total_max_subarray_sum(n, initial_seed, min_val, max_val)\n", + "end_time = time.time()\n", + "\n", + "print(\"Total Maximum Subarray Sum (20 runs):\", result)\n", + "print(\"Execution Time: {:.6f} seconds\".format(end_time - start_time))\n", + "\"\"\"" + ] + }, + { + "cell_type": "code", + "execution_count": 33, + "id": "cd8a29a9-94fa-43f5-ae4d-517182cfd218", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Total Maximum Subarray Sum (20 runs): 10980\n", + "Execution Time: 34.608083 seconds\n" + ] + } + ], + "source": [ + "exec(python_hard)" + ] + }, + { + "cell_type": "code", + "execution_count": 57, + "id": "39c64322-ef0f-4d58-a54e-c31077ceadb9", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "#include \n", + "#include \n", + "#include \n", + "#include \n", + "#include \n", + "\n", + "static inline uint32_t lcg_next(uint32_t value) {\n", + " const uint64_t a = 1664525ULL;\n", + " const uint64_t c = 1013904223ULL;\n", + " const uint64_t m = 0x100000000ULL; // 2^32\n", + " uint64_t t = a * value + c;\n", + " t %= m;\n", + " return static_cast(t);\n", + "}\n", + "\n", + "static inline int64_t max_subarray_sum_with_seed(uint32_t seed, int n, int min_val, int max_val) {\n", + " int range = max_val - min_val + 1;\n", + " int64_t max_ending_here = 0;\n", + " int64_t max_so_far = std::numeric_limits::min();\n", + " uint32_t v = seed;\n", + " bool started = false;\n", + " for (int i = 0; i < n; ++i) {\n", + " v = lcg_next(v);\n", + " int x = static_cast(v % range) + min_val;\n", + " if (!started) {\n", + " max_ending_here = x;\n", + " max_so_far = x;\n", + " started = true;\n", + " } else {\n", + " max_ending_here = (max_ending_here > 0) ? max_ending_here + x : x;\n", + " if (max_ending_here > max_so_far) max_so_far = max_ending_here;\n", + " }\n", + " }\n", + " return max_so_far;\n", + "}\n", + "\n", + "int main() {\n", + " const int n = 10000;\n", + " const uint32_t initial_seed = 42;\n", + " const int min_val = -10;\n", + " const int max_val = 10;\n", + "\n", + " auto start = std::chrono::high_resolution_clock::now();\n", + "\n", + " uint32_t seed = initial_seed;\n", + " long long total = 0;\n", + " for (int t = 0; t < 20; ++t) {\n", + " seed = lcg_next(seed);\n", + " total += max_subarray_sum_with_seed(seed, n, min_val, max_val);\n", + " }\n", + "\n", + " auto end = std::chrono::high_resolution_clock::now();\n", + " std::chrono::duration diff = end - start;\n", + "\n", + " std::cout << \"Total Maximum Subarray Sum (20 runs): \" << total << \"\\n\";\n", + " std::cout << std::fixed << std::setprecision(6);\n", + " std::cout << \"Execution Time: \" << diff.count() << \" seconds\" << std::endl;\n", + "\n", + " std::cout << \"Press Enter to exit...\";\n", + " std::cin.ignore(std::numeric_limits::max(), '\\n');\n", + " std::cin.get();\n", + "\n", + " return 0;\n", + "}" + ] + } + ], + "source": [ + "optimize_gpt(python_hard)" + ] + }, + { + "cell_type": "code", + "execution_count": 58, + "id": "922f484e-4e16-4ca6-b80d-0736972e18f5", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\".\" non e riconosciuto come comando interno o esterno,\n", + " un programma eseguibile o un file batch.\n" + ] + } + ], + "source": [ + "!g++ -O2 -std=c++17 optimized.cpp -o optimized\n", + "!./optimized" + ] + }, + { + "cell_type": "code", + "execution_count": 47, + "id": "38009c18-7496-4d55-bb0a-bfcc7c6a430e", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "#include \n", + "#include \n", + "ono>lude \n", + ">include e clude e random_numbers(n);\n", + " (int i = 0; i < n; ++i) {\n", + "[i] = lcg.next() % (max_val - min_val + 1) + min_val;\n", + " }\n", + "\n", + "t max_sum = std::numeric_limits::min();\n", + "int64_t current_sum = 0;\n", + " = 0; i < n; ++i) {\n", + "_sum = std::max(current_sum + random_numbers[i], random_numbers[i]);\n", + " = std::max(max_sum, current_sum);\n", + "} \n", + "; return max_sum\n", + "}\n", + "\n", + "64_t total_max_subarray_sum(int n, uint64_t initial_seed, int min_val, int max_val) {\n", + " total_sum = 0;\n", + "CG lcg(initial_seed);\n", + " = 0; i < 20; ++i) {\n", + "uint64_t seed = lcg.next();\n", + "sum += max_subarray_sum(n, seed, min_val, max_val);\n", + " }\n", + "; return total_sum\n", + "}\n", + "\n", + " main() {\n", + "const int n = 10000;\n", + " uint64_t initial_seed = 42;\n", + " min_val = -10;\n", + " int max_val = 10;\n", + "\n", + "auto start_time = std::chrono::high_resolution_clock::now();\n", + "int64_t result = total_max_subarray_sum(n, initial_seed, min_val, max_val);\n", + " = std::chrono::high_resolution_clock::now();\n", + "\n", + " duration = std::chrono::duration_cast(end_time - start_time);\n", + "\n", + "< \"Total Maximum Subarray Sum (20 runs): \" << result << std::endl;\n", + " << \"Execution Time: \" << std::fixed << std::setprecision(6) << duration.count() / 1e6 << \" seconds\" << std::endl;\n", + "\n", + "d::cout << \"Press any key to continue...\";\n", + "etch();\n", + "\n", + " 0; return\n", + "}" + ] + } + ], + "source": [ + "optimize_claude(python_hard)" + ] + }, + { + "cell_type": "code", + "execution_count": 48, + "id": "9012f543-ab06-4d7c-bf5f-250f4a6c43bd", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\".\" non e riconosciuto come comando interno o esterno,\n", + " un programma eseguibile o un file batch.\n" + ] + } + ], + "source": [ + "!g++ -O2 -std=c++17 optimized.cpp -o optimized\n", + "!./optimized" + ] + }, + { + "cell_type": "code", + "execution_count": 59, + "id": "3a1e4027-3309-48d5-9387-a8b309a325bf", + "metadata": {}, + "outputs": [], + "source": [ + "def stream_gpt(python): \n", + " stream = openai.chat.completions.create(model=OPENAI_MODEL, messages=messages_for(python), stream=True)\n", + " reply = \"\"\n", + " for chunk in stream:\n", + " fragment = chunk.choices[0].delta.content or \"\"\n", + " reply += fragment\n", + " yield reply.replace('```cpp\\n','').replace('```','')" + ] + }, + { + "cell_type": "code", + "execution_count": 60, + "id": "bcfa7016-ce17-4a8d-aa43-9acdd884159e", + "metadata": {}, + "outputs": [], + "source": [ + "def stream_claude(python):\n", + " result = claude.messages.stream(\n", + " model=CLAUDE_MODEL,\n", + " max_tokens=2000,\n", + " system=system_message,\n", + " messages=[{\"role\": \"user\", \"content\": user_prompt_for(python)}],\n", + " )\n", + " reply = \"\"\n", + " with result as stream:\n", + " for text in stream.text_stream:\n", + " reply += text\n", + " yield reply.replace('```cpp\\n','').replace('```','')" + ] + }, + { + "cell_type": "code", + "execution_count": 61, + "id": "56790d11-d24d-40ab-8f1a-5283726b5764", + "metadata": {}, + "outputs": [], + "source": [ + "def optimize(python, model):\n", + " if model==\"GPT\":\n", + " result = stream_gpt(python)\n", + " elif model==\"Claude\":\n", + " result = stream_claude(python)\n", + " else:\n", + " raise ValueError(\"Unknown model\")\n", + " for stream_so_far in result:\n", + " yield stream_so_far " + ] + }, + { + "cell_type": "code", + "execution_count": 63, + "id": "6d7c83c0-e239-44df-b977-3fb6a2398b6a", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "* Running on local URL: http://127.0.0.1:7860\n", + "\n", + "To create a public link, set `share=True` in `launch()`.\n" + ] + }, + { + "data": { + "text/html": [ + "
" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/plain": [] + }, + "execution_count": 63, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "with gr.Blocks() as ui:\n", + " with gr.Row():\n", + " python = gr.Textbox(label=\"Python code: \", lines=10, value=python_hard)\n", + " cpp = gr.Textbox(label=\"C++ code: \", lines=10)\n", + " with gr.Row():\n", + " model = gr.Dropdown([\"GPT\", \"Claude\"], label=\"Select model\", value=\"GPT\")\n", + " convert = gr.Button(\"Convert code\")\n", + "\n", + " convert.click(optimize, inputs=[python, model], outputs=[cpp])\n", + "ui.launch(inbrowser=True)" + ] + }, + { + "cell_type": "code", + "execution_count": 65, + "id": "d9156094-3e06-4c3a-9ede-3e6f9bf94de7", + "metadata": {}, + "outputs": [], + "source": [ + "def execute_python(code):\n", + " try:\n", + " output = io.StringIO()\n", + " sys.stdout = output\n", + " exec(code)\n", + " finally:\n", + " sys.stdout = sys.__stdout__\n", + " return output.getvalue()" + ] + }, + { + "cell_type": "code", + "execution_count": 64, + "id": "4da83af8-c8e1-474d-b954-d07957f55c37", + "metadata": {}, + "outputs": [], + "source": [ + "def execute_cpp(code):\n", + " write_output(code)\n", + " try:\n", + " compile_cmd = [\"g++\", \"-O2\", \"-std=c++17\", \"optimized.cpp\", \"-o\", \"optimized\"]\n", + " compile_result = subprocess.run(compile_cmd, check=True, text=True, capture_output=True)\n", + " run_cmd = [\"./optimized\"]\n", + " run_result = subprocess.run(run_cmd, check=True, text=True, capture_output=True)\n", + " return run_result.stdout\n", + " except subprocess.CalledProcessError as e:\n", + " return f\"An error occurred:\\n{e.stderr}\"" + ] + }, + { + "cell_type": "code", + "execution_count": 66, + "id": "55f3f532-aa10-4c91-9d22-e7463f9a646b", + "metadata": {}, + "outputs": [], + "source": [ + "# !g++ -O2 -std=c++17 optimized.cpp -o optimized\n", + "# !./optimized" + ] + }, + { + "cell_type": "code", + "execution_count": 67, + "id": "56f3ba77-a339-48f2-bd81-44cdd90c9458", + "metadata": {}, + "outputs": [], + "source": [ + "css = \"\"\"\n", + ".python {background-color: #306998;}\n", + ".cpp {background-color: #050;}\n", + "\"\"\"" + ] + }, + { + "cell_type": "code", + "execution_count": 68, + "id": "38c17a7a-45c5-4656-99ef-556f7d1a909b", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "* Running on local URL: http://127.0.0.1:7861\n", + "\n", + "To create a public link, set `share=True` in `launch()`.\n" + ] + }, + { + "data": { + "text/html": [ + "
" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/plain": [] + }, + "execution_count": 68, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "with gr.Blocks(css=css) as ui:\n", + " gr.Markdown(\"## Convert code from Python to C++\")\n", + " with gr.Row():\n", + " python = gr.Textbox(label=\"Python code:\", value=python_hard, lines=10)\n", + " cpp = gr.Textbox(label=\"C++ code:\", lines=10)\n", + " with gr.Row():\n", + " model = gr.Dropdown([\"GPT\", \"Claude\"], label=\"Select model\", value=\"GPT\")\n", + " with gr.Row():\n", + " convert = gr.Button(\"Convert code\")\n", + " with gr.Row():\n", + " python_run = gr.Button(\"Run Python\")\n", + " cpp_run = gr.Button(\"Run C++\")\n", + " with gr.Row():\n", + " python_out = gr.TextArea(label=\"Python result:\", elem_classes=[\"python\"])\n", + " cpp_out = gr.TextArea(label=\"C++ result:\", elem_classes=[\"cpp\"])\n", + "\n", + " convert.click(optimize, inputs=[python, model], outputs=[cpp])\n", + " python_run.click(execute_python, inputs=[python], outputs=[python_out])\n", + " cpp_run.click(execute_cpp, inputs=[cpp], outputs=[cpp_out])\n", + "\n", + "ui.launch(inbrowser=True)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.11" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/week4/community-contributions/max.solo23/optimized.cpp b/week4/community-contributions/max.solo23/optimized.cpp new file mode 100644 index 0000000..ed96ce5 --- /dev/null +++ b/week4/community-contributions/max.solo23/optimized.cpp @@ -0,0 +1,73 @@ +#include +#include +#include +#include +#include +#include + +class LCG { +private: + uint64_t value; + const uint64_t a = 1664525; + const uint64_t c = 1013904223; + const uint64_t m = 1ULL << 32; + +public: + LCG(uint64_t seed) : value(seed) {} + + uint64_t next() { + value = (a * value + c) % m; + return value; + } +}; + +int64_t max_subarray_sum(int n, uint64_t seed, int min_val, int max_val) { + LCG lcg(seed); + std::vector random_numbers(n); + for (int i = 0; i < n; ++i) { + random_numbers[i] = lcg.next() % (max_val - min_val + 1) + min_val; + } + + int64_t max_sum = std::numeric_limits::min(); + int64_t current_sum = 0; + int64_t min_sum = 0; + + for (int i = 0; i < n; ++i) { + current_sum += random_numbers[i]; + max_sum = std::max(max_sum, current_sum - min_sum); + min_sum = std::min(min_sum, current_sum); + } + + return max_sum; +} + +int64_t total_max_subarray_sum(int n, uint64_t initial_seed, int min_val, int max_val) { + int64_t total_sum = 0; + LCG lcg(initial_seed); + for (int i = 0; i < 20; ++i) { + uint64_t seed = lcg.next(); + total_sum += max_subarray_sum(n, seed, min_val, max_val); + } + return total_sum; +} + +int main() { + const int n = 10000; + const uint64_t initial_seed = 42; + const int min_val = -10; + const int max_val = 10; + + auto start_time = std::chrono::high_resolution_clock::now(); + int64_t result = total_max_subarray_sum(n, initial_seed, min_val, max_val); + auto end_time = std::chrono::high_resolution_clock::now(); + + auto duration = std::chrono::duration_cast(end_time - start_time); + + std::cout << "Total Maximum Subarray Sum (20 runs): " << result << std::endl; + std::cout << "Execution Time: " << std::fixed << std::setprecision(6) << duration.count() / 1e6 << " seconds" << std::endl; + + std::cout << "Press Enter to exit..."; + std::cin.get(); + + return 0; +} \ No newline at end of file diff --git a/week4/community-contributions/max.solo23/optimized.exe b/week4/community-contributions/max.solo23/optimized.exe new file mode 100644 index 0000000..c4bda34 Binary files /dev/null and b/week4/community-contributions/max.solo23/optimized.exe differ