From 2802791a1a4735287be93c6cbf861329c840a1dc Mon Sep 17 00:00:00 2001 From: VitoFe Date: Fri, 11 Oct 2024 23:18:19 +0200 Subject: [PATCH] Update chatgpt.py feat: add support for customizable system prompts and API base URL in ChatGptTranslator - Added `system_prompt` parameter to allow specifying a custom system-level prompt for translations. - Introduced `base_url` parameter, enabling the use of alternative AI models compatible with OpenAI's API format (e.g., Ollama). --- deep_translator/chatgpt.py | 21 +++++++++++++++------ 1 file changed, 15 insertions(+), 6 deletions(-) diff --git a/deep_translator/chatgpt.py b/deep_translator/chatgpt.py index 2ab3777..a499ef5 100644 --- a/deep_translator/chatgpt.py +++ b/deep_translator/chatgpt.py @@ -20,6 +20,8 @@ def __init__( target: str = "english", api_key: Optional[str] = os.getenv(OPEN_AI_ENV_VAR, None), model: Optional[str] = "gpt-3.5-turbo", + base_url: Optional[str] = None, + system_prompt: Optional[str] = None, **kwargs, ): """ @@ -43,18 +45,25 @@ def translate(self, text: str, **kwargs) -> str: import openai openai.api_key = self.api_key + openai.base_url = self.base_url prompt = f"Translate the text below into {self.target}.\n" prompt += f'Text: "{text}"' + messages = [] + + if system_prompt: + messages.append({"role": "system", "content": self.system_prompt}) + + messages.append( + { + "role": "user", + "content": prompt, + } + ) response = openai.ChatCompletion.create( model=self.model, - messages=[ - { - "role": "user", - "content": prompt, - } - ], + messages=messages, ) return response.choices[0].message.content