From decaeeabf436bffe961601de622818c677c82716 Mon Sep 17 00:00:00 2001 From: rickard Date: Fri, 29 Sep 2023 19:01:06 +0200 Subject: [PATCH] fix openai start zone generation --- tale/llm/llm_utils.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tale/llm/llm_utils.py b/tale/llm/llm_utils.py index a28e3c9b..212a4d7d 100644 --- a/tale/llm/llm_utils.py +++ b/tale/llm/llm_utils.py @@ -516,9 +516,11 @@ def generate_start_zone(self, location_desc: str, story_type: str, story_context if self.backend == 'kobold_cpp': request_body = self._kobold_generation_prompt(request_body) request_body['prompt'] = prompt + request_body['max_length'] = 750 elif self.backend == 'openai': request_body['messages'][1]['content'] = prompt - request_body['max_length'] = 750 + request_body['max_tokens'] = 750 + result = self.io_util.synchronous_request(request_body) try: json_result = json.loads(parse_utils.sanitize_json(result))