From 90ef0f25e5615fa2bdd5982d6ce1162f4e3839c6 Mon Sep 17 00:00:00 2001 From: Matthew Date: Tue, 20 Jun 2023 18:46:46 -0400 Subject: [PATCH] fixed typo (#523) --- examples/How_to_call_functions_for_knowledge_retrieval.ipynb | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/examples/How_to_call_functions_for_knowledge_retrieval.ipynb b/examples/How_to_call_functions_for_knowledge_retrieval.ipynb index b7554c9..2383f13 100644 --- a/examples/How_to_call_functions_for_knowledge_retrieval.ipynb +++ b/examples/How_to_call_functions_for_knowledge_retrieval.ipynb @@ -172,6 +172,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "id": "f2e47962", "metadata": {}, @@ -338,7 +339,7 @@ "\n", "\n", "def extract_chunk(content, template_prompt):\n", - " \"\"\"This function applies a prompt to some input content. In this case it returns a summarize chunk of text\"\"\"\n", + " \"\"\"This function applies a prompt to some input content. In this case it returns a summarized chunk of text\"\"\"\n", " prompt = template_prompt + content\n", " response = openai.ChatCompletion.create(\n", " model=GPT_MODEL, messages=[{\"role\": \"user\", \"content\": prompt}], temperature=0\n", @@ -480,6 +481,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "id": "dab07e98", "metadata": {}, @@ -656,6 +658,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "id": "dd3e7868", "metadata": {},