From 1285f54a8c034b30dbac8af83b550203c9d68240 Mon Sep 17 00:00:00 2001 From: LawrenceFulton <47426563+LawrenceFulton@users.noreply.github.com> Date: Fri, 15 Dec 2023 11:38:27 +0100 Subject: [PATCH] Small doc fix (#1695) --- docs/getting_started/representation/llm.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/getting_started/representation/llm.md b/docs/getting_started/representation/llm.md index d27bcdcb..3d9bc985 100644 --- a/docs/getting_started/representation/llm.md +++ b/docs/getting_started/representation/llm.md @@ -123,7 +123,7 @@ much better results with a `flan-T5` like model: from transformers import pipeline from bertopic.representation import TextGeneration -prompt = "I have a topic described by the following keywords: [KEYWORDS]. Based on the previous keywords, what is this topic about?"" +prompt = "I have a topic described by the following keywords: [KEYWORDS]. Based on the previous keywords, what is this topic about?" # Create your representation model generator = pipeline('text2text-generation', model='google/flan-t5-base')