diff --git a/examples/agents_sdk/app_assistant_voice_agents.ipynb b/examples/agents_sdk/app_assistant_voice_agents.ipynb
index cecbfd6..0f9b2f6 100644
--- a/examples/agents_sdk/app_assistant_voice_agents.ipynb
+++ b/examples/agents_sdk/app_assistant_voice_agents.ipynb
@@ -462,22 +462,71 @@
"cell_type": "markdown",
"metadata": {},
"source": [
- "Executing the above code, gives us the following responses which correctly provide the same functionality as the text-based workflow.\n",
- "\n",
- "\n",
- "\n",
- "\n",
- "\n",
- ""
+ "Executing the above code, gives us the following responses which correctly provide the same functionality as the text-based workflow."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 5,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/html": [
+ "\n",
+ " \n",
+ " "
+ ],
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ },
+ {
+ "data": {
+ "text/html": [
+ "\n",
+ " \n",
+ " "
+ ],
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ },
+ {
+ "data": {
+ "text/html": [
+ "\n",
+ " \n",
+ " "
+ ],
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "from IPython.display import display, Audio\n",
+ "display(Audio(\"voice_agents_audio/account_balance_response_base.mp3\"))\n",
+ "display(Audio(\"voice_agents_audio/product_info_response_base.mp3\"))\n",
+ "display(Audio(\"voice_agents_audio/trending_items_response_base.mp3\"))"
]
},
{
@@ -689,34 +738,122 @@
"cell_type": "markdown",
"metadata": {},
"source": [
- "Running the above code gives us the following responses which are much more naturally worded and engaging in the delivery.\n",
- "\n",
- "\n",
- "\n",
- "\n",
- "\n",
- "\n",
- "\n",
- "...And for something less subtle, we can switch to the `themed_character_assistant` instructions and receive the following responses:\n",
- "\n",
- "\n",
- "\n",
- "\n"
+ "Running the above code gives us the following responses which are much more naturally worded and engaging in the delivery."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 6,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/html": [
+ "\n",
+ " \n",
+ " "
+ ],
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ },
+ {
+ "data": {
+ "text/html": [
+ "\n",
+ " \n",
+ " "
+ ],
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ },
+ {
+ "data": {
+ "text/html": [
+ "\n",
+ " \n",
+ " "
+ ],
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "display(Audio(\"voice_agents_audio/account_balance_response_opti.mp3\"))\n",
+ "display(Audio(\"voice_agents_audio/product_info_response_opti.mp3\"))\n",
+ "display(Audio(\"voice_agents_audio/trending_items_response_opti.mp3\"))"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "...And for something less subtle, we can switch to the `themed_character_assistant` instructions and receive the following responses:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 7,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/html": [
+ "\n",
+ " \n",
+ " "
+ ],
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ },
+ {
+ "data": {
+ "text/html": [
+ "\n",
+ " \n",
+ " "
+ ],
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "display(Audio(\"voice_agents_audio/product_info_character.wav\"))\n",
+ "display(Audio(\"voice_agents_audio/product_info_character_2.wav\"))"
]
},
{
diff --git a/registry.yaml b/registry.yaml
index 6fb429a..e07c14c 100644
--- a/registry.yaml
+++ b/registry.yaml
@@ -1840,15 +1840,15 @@
- title: Building a Voice Assistant with the Agents SDK
path: examples/agents_sdk/app_assistant_voice_agents.ipynb
- date: 2025-03-25
+ date: 2025-03-27
authors:
- rupert-openai
tags:
- - voice
+ - audio
- responses
- - functions
+ - speech
-- title: Multi-Language One-Way Translation with the Realtime API
+- title: Multi-Langugage One-Way Translation with the Realtime API
path: examples/voice_solutions/one_way_translation_using_realtime_api.mdx
date: 2025-03-24
authors: