Merge pull request #242 from wwewwt/main

修复composite_demo中的openai client 缺失system_prompt的问题
This commit is contained in:
zR 2024-06-29 01:53:43 +08:00 committed by GitHub
commit aa6c942c19
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
3 changed files with 9 additions and 8 deletions

View File

@ -37,10 +37,10 @@ class Client(Protocol):
def process_input(history: list[dict], tools: list[dict], role_name_replace:dict=None) -> list[dict]:
chat_history = []
if len(tools) > 0:
chat_history.append(
{"role": "system", "content": build_system_prompt(list(ALL_TOOLS), tools)}
)
#if len(tools) > 0:
chat_history.append(
{"role": "system", "content": build_system_prompt(list(ALL_TOOLS), tools)}
)
for conversation in history:
role = str(conversation.role).removeprefix("<|").removesuffix("|>")

View File

@ -43,12 +43,12 @@ class APIClient(Client):
history: list[Conversation],
**parameters,
) -> Generator[tuple[str | dict, list[dict]]]:
chat_history = process_input(history, tools)
messages = process_input(history, '', role_name_replace=self.role_name_replace)
chat_history = process_input(history, '', role_name_replace=self.role_name_replace)
#messages = process_input(history, '', role_name_replace=self.role_name_replace)
openai_tools = format_openai_tool(tools)
response = self.client.chat.completions.create(
model="glm-4",
messages=messages,
messages=chat_history,
tools=openai_tools,
stream=self.use_stream,
max_tokens=parameters["max_new_tokens"],

View File

@ -30,7 +30,8 @@ def build_system_prompt(
):
value = SELFCOG_PROMPT
value += "\n\n" + datetime.now().strftime(DATE_PROMPT)
value += "\n\n# 可用工具"
if enabled_tools or functions:
value += "\n\n# 可用工具"
contents = []
for tool in enabled_tools:
contents.append(f"\n\n## {tool}\n\n{TOOL_SYSTEM_PROMPTS[tool]}")