89 lines
2.7 KiB
Python
89 lines
2.7 KiB
Python
|
"""
|
|||
|
This script creates a OpenAI Request demo for the glm-4-9b model, just Use OpenAI API to interact with the model.
|
|||
|
"""
|
|||
|
|
|||
|
from openai import OpenAI
|
|||
|
|
|||
|
base_url = "http://127.0.0.1:8000/v1/"
|
|||
|
client = OpenAI(api_key="EMPTY", base_url=base_url)
|
|||
|
|
|||
|
|
|||
|
def function_chat():
|
|||
|
messages = [{"role": "user", "content": "What's the weather like in San Francisco, Tokyo, and Paris?"}]
|
|||
|
tools = [
|
|||
|
{
|
|||
|
"type": "function",
|
|||
|
"function": {
|
|||
|
"name": "get_current_weather",
|
|||
|
"description": "Get the current weather in a given location",
|
|||
|
"parameters": {
|
|||
|
"type": "object",
|
|||
|
"properties": {
|
|||
|
"location": {
|
|||
|
"type": "string",
|
|||
|
"description": "The city and state, e.g. San Francisco, CA",
|
|||
|
},
|
|||
|
"unit": {"type": "string", "enum": ["celsius", "fahrenheit"]},
|
|||
|
},
|
|||
|
"required": ["location"],
|
|||
|
},
|
|||
|
},
|
|||
|
}
|
|||
|
]
|
|||
|
|
|||
|
# All Tools 能力: 绘图
|
|||
|
# messages = [{"role": "user", "content": "帮我画一张天空的画画吧"}]
|
|||
|
# tools = [{"type": "cogview"}]
|
|||
|
#
|
|||
|
# All Tools 能力: 联网查询
|
|||
|
# messages = [{"role": "user", "content": "今天黄金的价格"}]
|
|||
|
# tools = [{"type": "simple_browser"}]
|
|||
|
|
|||
|
response = client.chat.completions.create(
|
|||
|
model="glm-4",
|
|||
|
messages=messages,
|
|||
|
tools=tools,
|
|||
|
tool_choice="auto", # use "auto" to let the model choose the tool automatically
|
|||
|
# tool_choice={"type": "function", "function": {"name": "my_function"}},
|
|||
|
)
|
|||
|
if response:
|
|||
|
content = response.choices[0].message.content
|
|||
|
print(content)
|
|||
|
else:
|
|||
|
print("Error:", response.status_code)
|
|||
|
|
|||
|
|
|||
|
def simple_chat(use_stream=False):
|
|||
|
messages = [
|
|||
|
{
|
|||
|
"role": "system",
|
|||
|
"content": "你是 GLM-4,请你热情回答用户的问题。",
|
|||
|
},
|
|||
|
{
|
|||
|
"role": "user",
|
|||
|
"content": "你好,请你用生动的话语给我讲一个小故事吧"
|
|||
|
}
|
|||
|
]
|
|||
|
response = client.chat.completions.create(
|
|||
|
model="glm-4",
|
|||
|
messages=messages,
|
|||
|
stream=use_stream,
|
|||
|
max_tokens=1024,
|
|||
|
temperature=0.8,
|
|||
|
presence_penalty=1.1,
|
|||
|
top_p=0.8)
|
|||
|
if response:
|
|||
|
if use_stream:
|
|||
|
for chunk in response:
|
|||
|
print(chunk.choices[0].delta.content)
|
|||
|
else:
|
|||
|
content = response.choices[0].message.content
|
|||
|
print(content)
|
|||
|
else:
|
|||
|
print("Error:", response.status_code)
|
|||
|
|
|||
|
|
|||
|
if __name__ == "__main__":
|
|||
|
simple_chat()
|
|||
|
function_chat()
|