diff --git a/composite_demo/src/client.py b/composite_demo/src/client.py index 2ef2b45..4099973 100644 --- a/composite_demo/src/client.py +++ b/composite_demo/src/client.py @@ -37,10 +37,10 @@ class Client(Protocol): def process_input(history: list[dict], tools: list[dict], role_name_replace:dict=None) -> list[dict]: chat_history = [] - if len(tools) > 0: - chat_history.append( - {"role": "system", "content": build_system_prompt(list(ALL_TOOLS), tools)} - ) + #if len(tools) > 0: + chat_history.append( + {"role": "system", "content": build_system_prompt(list(ALL_TOOLS), tools)} + ) for conversation in history: role = str(conversation.role).removeprefix("<|").removesuffix("|>") diff --git a/composite_demo/src/clients/openai.py b/composite_demo/src/clients/openai.py index 82f4c12..756e7a6 100644 --- a/composite_demo/src/clients/openai.py +++ b/composite_demo/src/clients/openai.py @@ -43,12 +43,12 @@ class APIClient(Client): history: list[Conversation], **parameters, ) -> Generator[tuple[str | dict, list[dict]]]: - chat_history = process_input(history, tools) - messages = process_input(history, '', role_name_replace=self.role_name_replace) + chat_history = process_input(history, '', role_name_replace=self.role_name_replace) + #messages = process_input(history, '', role_name_replace=self.role_name_replace) openai_tools = format_openai_tool(tools) response = self.client.chat.completions.create( model="glm-4", - messages=messages, + messages=chat_history, tools=openai_tools, stream=self.use_stream, max_tokens=parameters["max_new_tokens"], diff --git a/composite_demo/src/conversation.py b/composite_demo/src/conversation.py index 25132ce..fb887df 100644 --- a/composite_demo/src/conversation.py +++ b/composite_demo/src/conversation.py @@ -30,7 +30,8 @@ def build_system_prompt( ): value = SELFCOG_PROMPT value += "\n\n" + datetime.now().strftime(DATE_PROMPT) - value += "\n\n# 可用工具" + if enabled_tools or functions: + value += "\n\n# 可用工具" contents = [] for tool in enabled_tools: contents.append(f"\n\n## {tool}\n\n{TOOL_SYSTEM_PROMPTS[tool]}")