Removed outputs and added comments
This commit is contained in:
@@ -2,7 +2,7 @@
|
|||||||
"cells": [
|
"cells": [
|
||||||
{
|
{
|
||||||
"cell_type": "code",
|
"cell_type": "code",
|
||||||
"execution_count": 1,
|
"execution_count": null,
|
||||||
"id": "768629e6",
|
"id": "768629e6",
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
"outputs": [],
|
"outputs": [],
|
||||||
@@ -18,19 +18,10 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"cell_type": "code",
|
"cell_type": "code",
|
||||||
"execution_count": 2,
|
"execution_count": null,
|
||||||
"id": "84a945dc",
|
"id": "84a945dc",
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
"outputs": [
|
"outputs": [],
|
||||||
{
|
|
||||||
"name": "stdout",
|
|
||||||
"output_type": "stream",
|
|
||||||
"text": [
|
|
||||||
"OpenAI API Key exists and begins sk-proj-\n",
|
|
||||||
"Google API Key exists and begins AIzaSyCW\n"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"source": [
|
"source": [
|
||||||
"# Load environment variables in a file called .env\n",
|
"# Load environment variables in a file called .env\n",
|
||||||
"# Print the key prefixes to help with any debugging\n",
|
"# Print the key prefixes to help with any debugging\n",
|
||||||
@@ -52,7 +43,7 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"cell_type": "code",
|
"cell_type": "code",
|
||||||
"execution_count": 3,
|
"execution_count": null,
|
||||||
"id": "ad8ae0b6",
|
"id": "ad8ae0b6",
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
"outputs": [],
|
"outputs": [],
|
||||||
@@ -67,7 +58,7 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"cell_type": "code",
|
"cell_type": "code",
|
||||||
"execution_count": 13,
|
"execution_count": null,
|
||||||
"id": "f66cf12f",
|
"id": "f66cf12f",
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
"outputs": [],
|
"outputs": [],
|
||||||
@@ -137,7 +128,7 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"cell_type": "code",
|
"cell_type": "code",
|
||||||
"execution_count": 21,
|
"execution_count": null,
|
||||||
"id": "5aa66868",
|
"id": "5aa66868",
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
"outputs": [],
|
"outputs": [],
|
||||||
@@ -165,7 +156,7 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"cell_type": "code",
|
"cell_type": "code",
|
||||||
"execution_count": 22,
|
"execution_count": null,
|
||||||
"id": "51c9dadc",
|
"id": "51c9dadc",
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
"outputs": [],
|
"outputs": [],
|
||||||
@@ -175,148 +166,25 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"cell_type": "code",
|
"cell_type": "code",
|
||||||
"execution_count": 23,
|
"execution_count": null,
|
||||||
"id": "548efb27",
|
"id": "548efb27",
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
"outputs": [
|
"outputs": [
|
||||||
{
|
{
|
||||||
"name": "stdout",
|
"ename": "NameError",
|
||||||
"output_type": "stream",
|
"evalue": "name 'gr' is not defined",
|
||||||
"text": [
|
"output_type": "error",
|
||||||
"* Running on local URL: http://127.0.0.1:7863\n",
|
"traceback": [
|
||||||
"* To create a public link, set `share=True` in `launch()`.\n"
|
"\u001b[31m---------------------------------------------------------------------------\u001b[39m",
|
||||||
]
|
"\u001b[31mNameError\u001b[39m Traceback (most recent call last)",
|
||||||
},
|
"\u001b[36mCell\u001b[39m\u001b[36m \u001b[39m\u001b[32mIn[1]\u001b[39m\u001b[32m, line 1\u001b[39m\n\u001b[32m----> \u001b[39m\u001b[32m1\u001b[39m view = \u001b[43mgr\u001b[49m.Interface(\n\u001b[32m 2\u001b[39m fn=run_boardgame_conversation,\n\u001b[32m 3\u001b[39m inputs=[gr.Textbox(label=\u001b[33m\"\u001b[39m\u001b[33mInput the name of the board game:\u001b[39m\u001b[33m\"\u001b[39m)],\n\u001b[32m 4\u001b[39m outputs=[gr.Markdown(label=\u001b[33m\"\u001b[39m\u001b[33mConversation:\u001b[39m\u001b[33m\"\u001b[39m)],\n\u001b[32m 5\u001b[39m flagging_mode=\u001b[33m\"\u001b[39m\u001b[33mnever\u001b[39m\u001b[33m\"\u001b[39m\n\u001b[32m 6\u001b[39m )\n\u001b[32m 7\u001b[39m view.launch()\n",
|
||||||
{
|
"\u001b[31mNameError\u001b[39m: name 'gr' is not defined"
|
||||||
"data": {
|
|
||||||
"text/html": [
|
|
||||||
"<div><iframe src=\"http://127.0.0.1:7863/\" width=\"100%\" height=\"500\" allow=\"autoplay; camera; microphone; clipboard-read; clipboard-write;\" frameborder=\"0\" allowfullscreen></iframe></div>"
|
|
||||||
],
|
|
||||||
"text/plain": [
|
|
||||||
"<IPython.core.display.HTML object>"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"metadata": {},
|
|
||||||
"output_type": "display_data"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"data": {
|
|
||||||
"text/plain": []
|
|
||||||
},
|
|
||||||
"execution_count": 23,
|
|
||||||
"metadata": {},
|
|
||||||
"output_type": "execute_result"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "stdout",
|
|
||||||
"output_type": "stream",
|
|
||||||
"text": [
|
|
||||||
"Journalist:\n",
|
|
||||||
"I would like to review the board game Santorini.\n",
|
|
||||||
"\n",
|
|
||||||
"Critique:\n",
|
|
||||||
"Sure, ask me anything about the board game Santorini.\n",
|
|
||||||
"\n",
|
|
||||||
"\n",
|
|
||||||
"\n",
|
|
||||||
"***Question 1***\n",
|
|
||||||
"\n",
|
|
||||||
"\n",
|
|
||||||
"Journalist:\n",
|
|
||||||
"\n",
|
|
||||||
"\n",
|
|
||||||
"To start, could you briefly describe the core gameplay and objective of Santorini?\n",
|
|
||||||
"\n",
|
|
||||||
"\n",
|
|
||||||
"Critique:\n",
|
|
||||||
"Santorini is an abstract strategy game at its core. Two players compete to be the first to get one of their two pawns to the third level of a tower. Players take turns moving one of their pawns, and then building a block on an adjacent space. The build can be on the same level, one level higher, or down a level. Certain spaces have a dome, which signifies the tower is complete, and no one can build or move to that space.\n",
|
|
||||||
"\n",
|
|
||||||
"The game's main draw is the use of God Powers, which give each player a unique ability that breaks the standard rules of the game. This adds a significant layer of asymmetry and strategic depth.\n",
|
|
||||||
"\n",
|
|
||||||
"\n",
|
|
||||||
"\n",
|
|
||||||
"\n",
|
|
||||||
"***Question 2***\n",
|
|
||||||
"\n",
|
|
||||||
"\n",
|
|
||||||
"Journalist:\n",
|
|
||||||
"None\n",
|
|
||||||
"\n"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "stderr",
|
|
||||||
"output_type": "stream",
|
|
||||||
"text": [
|
|
||||||
"Traceback (most recent call last):\n",
|
|
||||||
" File \"c:\\Users\\Milan Lazic\\projects\\llm_engineering\\venv\\Lib\\site-packages\\gradio\\queueing.py\", line 745, in process_events\n",
|
|
||||||
" response = await route_utils.call_process_api(\n",
|
|
||||||
" ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
|
|
||||||
" ...<5 lines>...\n",
|
|
||||||
" )\n",
|
|
||||||
" ^\n",
|
|
||||||
" File \"c:\\Users\\Milan Lazic\\projects\\llm_engineering\\venv\\Lib\\site-packages\\gradio\\route_utils.py\", line 354, in call_process_api\n",
|
|
||||||
" output = await app.get_blocks().process_api(\n",
|
|
||||||
" ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
|
|
||||||
" ...<11 lines>...\n",
|
|
||||||
" )\n",
|
|
||||||
" ^\n",
|
|
||||||
" File \"c:\\Users\\Milan Lazic\\projects\\llm_engineering\\venv\\Lib\\site-packages\\gradio\\blocks.py\", line 2116, in process_api\n",
|
|
||||||
" result = await self.call_function(\n",
|
|
||||||
" ^^^^^^^^^^^^^^^^^^^^^^^^^\n",
|
|
||||||
" ...<8 lines>...\n",
|
|
||||||
" )\n",
|
|
||||||
" ^\n",
|
|
||||||
" File \"c:\\Users\\Milan Lazic\\projects\\llm_engineering\\venv\\Lib\\site-packages\\gradio\\blocks.py\", line 1623, in call_function\n",
|
|
||||||
" prediction = await anyio.to_thread.run_sync( # type: ignore\n",
|
|
||||||
" ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
|
|
||||||
" fn, *processed_input, limiter=self.limiter\n",
|
|
||||||
" ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
|
|
||||||
" )\n",
|
|
||||||
" ^\n",
|
|
||||||
" File \"c:\\Users\\Milan Lazic\\projects\\llm_engineering\\venv\\Lib\\site-packages\\anyio\\to_thread.py\", line 56, in run_sync\n",
|
|
||||||
" return await get_async_backend().run_sync_in_worker_thread(\n",
|
|
||||||
" ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
|
|
||||||
" func, args, abandon_on_cancel=abandon_on_cancel, limiter=limiter\n",
|
|
||||||
" ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
|
|
||||||
" )\n",
|
|
||||||
" ^\n",
|
|
||||||
" File \"c:\\Users\\Milan Lazic\\projects\\llm_engineering\\venv\\Lib\\site-packages\\anyio\\_backends\\_asyncio.py\", line 2476, in run_sync_in_worker_thread\n",
|
|
||||||
" return await future\n",
|
|
||||||
" ^^^^^^^^^^^^\n",
|
|
||||||
" File \"c:\\Users\\Milan Lazic\\projects\\llm_engineering\\venv\\Lib\\site-packages\\anyio\\_backends\\_asyncio.py\", line 967, in run\n",
|
|
||||||
" result = context.run(func, *args)\n",
|
|
||||||
" File \"c:\\Users\\Milan Lazic\\projects\\llm_engineering\\venv\\Lib\\site-packages\\gradio\\utils.py\", line 915, in wrapper\n",
|
|
||||||
" response = f(*args, **kwargs)\n",
|
|
||||||
" File \"C:\\Users\\Milan Lazic\\AppData\\Local\\Temp\\ipykernel_29732\\641492170.py\", line 14, in run_boardgame_conversation\n",
|
|
||||||
" gemini_next = call_boardgame_critique(gpt_messages, gemini_messages)\n",
|
|
||||||
" File \"C:\\Users\\Milan Lazic\\AppData\\Local\\Temp\\ipykernel_29732\\2813548043.py\", line 7, in call_boardgame_critique\n",
|
|
||||||
" completion = gemini_via_openai_client.chat.completions.create(\n",
|
|
||||||
" model=gemini_model,\n",
|
|
||||||
" messages=messages\n",
|
|
||||||
" )\n",
|
|
||||||
" File \"c:\\Users\\Milan Lazic\\projects\\llm_engineering\\venv\\Lib\\site-packages\\openai\\_utils\\_utils.py\", line 286, in wrapper\n",
|
|
||||||
" return func(*args, **kwargs)\n",
|
|
||||||
" File \"c:\\Users\\Milan Lazic\\projects\\llm_engineering\\venv\\Lib\\site-packages\\openai\\resources\\chat\\completions\\completions.py\", line 1147, in create\n",
|
|
||||||
" return self._post(\n",
|
|
||||||
" ~~~~~~~~~~^\n",
|
|
||||||
" \"/chat/completions\",\n",
|
|
||||||
" ^^^^^^^^^^^^^^^^^^^^\n",
|
|
||||||
" ...<46 lines>...\n",
|
|
||||||
" stream_cls=Stream[ChatCompletionChunk],\n",
|
|
||||||
" ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
|
|
||||||
" )\n",
|
|
||||||
" ^\n",
|
|
||||||
" File \"c:\\Users\\Milan Lazic\\projects\\llm_engineering\\venv\\Lib\\site-packages\\openai\\_base_client.py\", line 1259, in post\n",
|
|
||||||
" return cast(ResponseT, self.request(cast_to, opts, stream=stream, stream_cls=stream_cls))\n",
|
|
||||||
" ~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
|
|
||||||
" File \"c:\\Users\\Milan Lazic\\projects\\llm_engineering\\venv\\Lib\\site-packages\\openai\\_base_client.py\", line 1047, in request\n",
|
|
||||||
" raise self._make_status_error_from_response(err.response) from None\n",
|
|
||||||
"openai.BadRequestError: Error code: 400 - [{'error': {'code': 400, 'message': 'Unable to submit request because it must include at least one parts field, which describes the prompt input. Learn more: https://cloud.google.com/vertex-ai/generative-ai/docs/model-reference/gemini', 'status': 'INVALID_ARGUMENT'}}]\n"
|
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"source": [
|
"source": [
|
||||||
|
"# Create a Gradio interface for running boardgame conversations.\n",
|
||||||
|
"# The interface takes the board game name as input and displays the conversation as Markdown.\n",
|
||||||
"view = gr.Interface(\n",
|
"view = gr.Interface(\n",
|
||||||
" fn=run_boardgame_conversation,\n",
|
" fn=run_boardgame_conversation,\n",
|
||||||
" inputs=[gr.Textbox(label=\"Input the name of the board game:\")],\n",
|
" inputs=[gr.Textbox(label=\"Input the name of the board game:\")],\n",
|
||||||
|
|||||||
Reference in New Issue
Block a user