From 3032badd918b81e8bb6bd7282fa1864c1f6b2f21 Mon Sep 17 00:00:00 2001 From: CZYCW Date: Fri, 26 Jan 2024 10:04:12 +0800 Subject: [PATCH] fix variable type for top_p --- applications/Colossal-LLaMA-2/inference_example.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/Colossal-LLaMA-2/inference_example.py b/applications/Colossal-LLaMA-2/inference_example.py index 7fe2d92abd05..123290d45eab 100644 --- a/applications/Colossal-LLaMA-2/inference_example.py +++ b/applications/Colossal-LLaMA-2/inference_example.py @@ -51,7 +51,7 @@ def generate(args): parser.add_argument('--do_sample', type=bool, default=True, help="Set whether or not to use sampling") parser.add_argument('--temperature', type=float, default=0.3, help="Set temperature value") parser.add_argument('--top_k', type=int, default=50, help="Set top_k value for top-k-filtering") - parser.add_argument('--top_p', type=int, default=0.95, help="Set top_p value for generation") + parser.add_argument('--top_p', type=float, default=0.95, help="Set top_p value for generation") parser.add_argument('--input_txt', type=str, default="明月松间照,", help="The prompt input to the model") args = parser.parse_args() generate(args) \ No newline at end of file