Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 17 additions & 0 deletions week2/community-contributions/SamuelAdebodun/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
# Technical Q&A (Week 2)

Web app that streams answers from **OpenAI (gpt-4o-mini)** or **Anthropic (Claude)**. Users pick a model in a dropdown, type a question, and see streamed Markdown in the UI. Built with Gradio; run cells in order and use the share link to open the app.

**Requirements:** `.env` with `OPENAI_API_KEY` and `ANTHROPIC_API_KEY`.

---

## Improvements over Week 1

| Week 1 | Week 2 |
|--------|--------|
| Notebook-only: run cells, hardcoded or `input()` question | **Gradio UI**: type any question, choose model, get streamed answer in the browser |
| Two separate flows (OpenAI cell, Ollama cell) | **Single flow**: one `answer()` + `MODEL_CHOICES`; switch provider via dropdown |
| OpenAI + Ollama (local) | **OpenAI + Anthropic** (both cloud; Anthropic via OpenAI-compatible API) |
| `update_display()` in Jupyter | **Streaming to Gradio** `gr.Markdown` with copy button |
| One question per run | **Reusable app**: submit many questions without re-running cells |
202 changes: 202 additions & 0 deletions week2/community-contributions/SamuelAdebodun/week2.ipynb
Original file line number Diff line number Diff line change
@@ -0,0 +1,202 @@
{
"cells": [
{
"cell_type": "code",
"metadata": {},
"source": [
"# Dependencies: env, OpenAI client, and Gradio for the web UI\n",
"import os\n",
"import json\n",
"import sqlite3\n",
"from dotenv import load_dotenv\n",
"from openai import OpenAI\n",
"import gradio as gr"
],
"execution_count": null,
"outputs": [],
"id": "74dc0ebc"
},
{
"cell_type": "code",
"metadata": {},
"source": [
"# Model IDs used for OpenAI and Anthropic API calls\n",
"MODEL_GPT = \"gpt-4o-mini\"\n",
"MODEL_CLAUDE = \"claude-sonnet-4-5-20250929\""
],
"execution_count": null,
"outputs": [],
"id": "8bd6d9c4"
},
{
"cell_type": "code",
"metadata": {},
"source": [
"load_dotenv(override=True)\n",
"openai_key = os.getenv('OPENAI_API_KEY')\n",
"anthropic_key = os.getenv('ANTHROPIC_API_KEY')\n",
"\n",
"if openai_key:\n",
" print(f\"OpenAI API Key exists and begins {openai_key[:8]}\")\n",
"else:\n",
" print(\"OpenAI API Key not set\")\n",
"\n",
"if anthropic_key:\n",
" print(f\"Anthropic API Key exists and begins {anthropic_key[:7]}\")\n",
"else:\n",
" print(\"Anthropic API Key not set\")\n"
],
"execution_count": null,
"outputs": [],
"id": "c72d0de1"
},
{
"cell_type": "code",
"metadata": {},
"source": [
"# Create OpenAI and Anthropic clients (Anthropic via OpenAI-compatible base URL)\n",
"openai_client = OpenAI()\n",
"anthropic_client = OpenAI(api_key=anthropic_key, base_url=\"https://api.anthropic.com/v1\")\n",
"print ( \"clients are ready!!\")"
],
"execution_count": null,
"outputs": [],
"id": "6ad8c068"
},
{
"cell_type": "code",
"metadata": {},
"source": [
"# System prompt that defines the assistant's role for all model calls\n",
"system_prompt = \"\"\"You are a helpful technical tutor who answers questions about \n",
"Python code, software engineering, devops, LLMs and Cloud engineering .\"\"\""
],
"execution_count": null,
"outputs": [],
"id": "e6ceaba8"
},
{
"cell_type": "code",
"metadata": {},
"source": [
"# Generator functions: stream chat completion chunks from OpenAI and Anthropic\n",
"def stream_openai(messages):\n",
" \"\"\"Stream completion chunks from OpenAI.\"\"\"\n",
" stream = openai_client.chat.completions.create(\n",
" model=MODEL_GPT,\n",
" messages=messages,\n",
" stream=True,\n",
" )\n",
" for chunk in stream:\n",
" if chunk.choices and chunk.choices[0].delta.content:\n",
" yield chunk.choices[0].delta.content\n",
"\n",
"\n",
"def stream_anthropic(messages):\n",
" \"\"\"Stream completion chunks from Anthropic (via OpenAI-compatible API).\"\"\"\n",
" stream = anthropic_client.chat.completions.create(\n",
" model=MODEL_CLAUDE,\n",
" messages=messages,\n",
" stream=True,\n",
" )\n",
" for chunk in stream:\n",
" if chunk.choices and chunk.choices[0].delta.content:\n",
" yield chunk.choices[0].delta.content"
],
"execution_count": null,
"outputs": [],
"id": "37a7313d"
},
{
"cell_type": "code",
"metadata": {},
"source": [
"\n",
"# Single source of truth: (display label, stream_fn)\n",
"MODEL_CHOICES = [\n",
" (\"OpenAI (gpt-4o-mini)\", stream_openai),\n",
" (\"Anthropic (claude-sonnet-4-5-20250929)\", stream_anthropic),\n",
"]\n",
"\n",
"\n",
"def answer(question, model_choice):\n",
" \"\"\"Stream an answer for the chosen model.\"\"\"\n",
" if not (question and question.strip()):\n",
" yield \"Please enter a question.\"\n",
" return\n",
" messages = [\n",
" {\"role\": \"system\", \"content\": system_prompt},\n",
" {\"role\": \"user\", \"content\": question.strip()},\n",
" ]\n",
" choice_map = dict(MODEL_CHOICES)\n",
" stream_fn = choice_map.get(model_choice, stream_openai)\n",
" acc = \"\"\n",
" for chunk in stream_fn(messages):\n",
" acc += chunk\n",
" yield acc"
],
"execution_count": null,
"outputs": [],
"id": "080503f0"
},
{
"cell_type": "code",
"metadata": {},
"source": [
"# Gradio UI: model dropdown, question textbox, streamed Markdown output; Submit + Enter trigger answer()\n",
"with gr.Blocks(title=\"Technical Q&A\") as demo:\n",
" gr.Markdown(\"## Technical Q&A — Ask a question, pick a model, get a streamed answer\")\n",
" model_dropdown = gr.Dropdown(\n",
" choices=[\"OpenAI (gpt-4o-mini)\", \"Anthropic (claude-sonnet-4-5-20250929)\"],\n",
" value=\"OpenAI (gpt-4o-mini)\",\n",
" label=\"Model\",\n",
" )\n",
" question_in = gr.Textbox(\n",
" placeholder=\"e.g. Explain: yield from {book.get('author') for book in books if book.get('author')}\",\n",
" label=\"Question\",\n",
" lines=3,\n",
" )\n",
"\n",
" gr.Markdown(\"### Answer\")\n",
" answer_out = gr.Markdown(value=\"*Answer will appear here.*\", show_copy_button=True)\n",
"\n",
" question_in.submit(\n",
" answer,\n",
" inputs=[question_in, model_dropdown],\n",
" outputs=answer_out,\n",
" )\n",
" gr.Button(\"Submit\").click(\n",
" answer,\n",
" inputs=[question_in, model_dropdown],\n",
" outputs=answer_out,\n",
" )\n",
"\n",
"demo.launch(share=True)"
],
"execution_count": null,
"outputs": [],
"id": "839d56a5"
}
],
"metadata": {
"kernelspec": {
"display_name": ".venv",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.12.12"
}
},
"nbformat": 4,
"nbformat_minor": 5
}