From bee81e673c4a4c23ab5f2d76a9a569c6576e7fd6 Mon Sep 17 00:00:00 2001
From: wwewwt <wwewwt@163.com>
Date: Wed, 26 Jun 2024 11:21:33 +0800
Subject: [PATCH] =?UTF-8?q?=E4=BF=AE=E5=A4=8Dcomposite=5Fdemo=E4=B8=AD?=
 =?UTF-8?q?=E7=9A=84openai=20client=20=E7=BC=BA=E5=A4=B1system=5Fprompt?=
 =?UTF-8?q?=E7=9A=84=E9=97=AE=E9=A2=98?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 composite_demo/src/client.py         | 8 ++++----
 composite_demo/src/clients/openai.py | 6 +++---
 composite_demo/src/conversation.py   | 3 ++-
 3 files changed, 9 insertions(+), 8 deletions(-)

diff --git a/composite_demo/src/client.py b/composite_demo/src/client.py
index 2ef2b45..4099973 100644
--- a/composite_demo/src/client.py
+++ b/composite_demo/src/client.py
@@ -37,10 +37,10 @@ class Client(Protocol):
 
 def process_input(history: list[dict], tools: list[dict], role_name_replace:dict=None) -> list[dict]:
     chat_history = []
-    if len(tools) > 0:
-        chat_history.append(
-            {"role": "system", "content": build_system_prompt(list(ALL_TOOLS), tools)}
-        )
+    #if len(tools) > 0:
+    chat_history.append(
+        {"role": "system", "content": build_system_prompt(list(ALL_TOOLS), tools)}
+    )
 
     for conversation in history:
         role = str(conversation.role).removeprefix("<|").removesuffix("|>")
diff --git a/composite_demo/src/clients/openai.py b/composite_demo/src/clients/openai.py
index 82f4c12..756e7a6 100644
--- a/composite_demo/src/clients/openai.py
+++ b/composite_demo/src/clients/openai.py
@@ -43,12 +43,12 @@ class APIClient(Client):
         history: list[Conversation],
         **parameters,
     ) -> Generator[tuple[str | dict, list[dict]]]:
-        chat_history = process_input(history, tools)
-        messages = process_input(history, '', role_name_replace=self.role_name_replace)
+        chat_history = process_input(history, '', role_name_replace=self.role_name_replace)
+        #messages = process_input(history, '', role_name_replace=self.role_name_replace)
         openai_tools = format_openai_tool(tools)
         response = self.client.chat.completions.create(
             model="glm-4",
-            messages=messages,
+            messages=chat_history,
             tools=openai_tools,
             stream=self.use_stream,
             max_tokens=parameters["max_new_tokens"],
diff --git a/composite_demo/src/conversation.py b/composite_demo/src/conversation.py
index 25132ce..fb887df 100644
--- a/composite_demo/src/conversation.py
+++ b/composite_demo/src/conversation.py
@@ -30,7 +30,8 @@ def build_system_prompt(
 ):
     value = SELFCOG_PROMPT
     value += "\n\n" + datetime.now().strftime(DATE_PROMPT)
-    value += "\n\n# 可用工具"
+    if enabled_tools or functions:
+        value += "\n\n# 可用工具"
     contents = []
     for tool in enabled_tools:
         contents.append(f"\n\n## {tool}\n\n{TOOL_SYSTEM_PROMPTS[tool]}")