Skip to content

Commit 6bab5fe

Browse files
Zhikaiiiiskyline2006zzhangpurdue
authored
Feat: assistant api (#423)
Co-authored-by: skyline2006 <skyline2006@163.com> Co-authored-by: Zhicheng Zhang <zzhang.purdue@gmail.com>
1 parent 2f50c6b commit 6bab5fe

40 files changed

+1741
-457
lines changed

docker/docker-compose.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -7,4 +7,4 @@ services:
77
dockerfile: docker/tool_manager.dockerfile
88
ports:
99
- "31511:31511"
10-
entrypoint: uvicorn tool_service.tool_manager.api:app --host 0.0.0.0 --port 31511
10+
entrypoint: uvicorn modelscope_agent_servers.tool_manager_server.api:app --host 0.0.0.0 --port 31511

docker/tool_manager.dockerfile

+3-3
Original file line numberDiff line numberDiff line change
@@ -24,9 +24,9 @@ RUN mkdir -p assets
2424
RUN mkdir -p workspace
2525

2626
# install dependency
27-
ENV PYTHONPATH $PYTHONPATH:/app/tool_service
27+
ENV PYTHONPATH $PYTHONPATH:/app/modelscope_agent_servers
2828
RUN pip install fastapi pydantic uvicorn docker sqlmodel
2929

30-
COPY tool_service /app/tool_service
30+
COPY modelscope_agent_servers /app/modelscope_agent_servers
3131

32-
#ENTRYPOINT exec uvicorn tool_service.tool_manager.api:app --host 0.0.0.0 --port 31511
32+
#ENTRYPOINT exec uvicorn modelscope_agent_servers.tool_manager_server.api:app --host 0.0.0.0 --port 31511

docker/tool_node.dockerfile

+2-2
Original file line numberDiff line numberDiff line change
@@ -33,11 +33,11 @@ RUN pip install --no-cache-dir -r requirements.txt
3333
RUN pip install fastapi uvicorn
3434

3535
COPY modelscope_agent /app/modelscope_agent
36-
ENV PYTHONPATH $PYTHONPATH:/app/modelscope_agent:/app/tool_service
36+
ENV PYTHONPATH $PYTHONPATH:/app/modelscope_agent:/app/modelscope_agent_servers
3737
ENV BASE_TOOL_DIR /app/assets
3838

3939
# install tool_node
40-
COPY tool_service /app/tool_service
40+
COPY modelscope_agent_servers /app/modelscope_agent_servers
4141

4242

4343
#ENTRYPOINT exec uvicorn tool_service.tool_node.api:app --host 0.0.0.0 --port $PORT

modelscope_agent/agent.py

+5-5
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@ def __init__(self,
1818
name: Optional[str] = None,
1919
description: Optional[str] = None,
2020
instruction: Union[str, dict] = None,
21-
use_api: bool = False,
21+
use_tool_api: bool = False,
2222
**kwargs):
2323
"""
2424
init tools/llm/instruction for one agent
@@ -34,7 +34,7 @@ def __init__(self,
3434
name: the name of agent
3535
description: the description of agent, which is used for multi_agent
3636
instruction: the system instruction of this agent
37-
use_api: whether to use the tool service api, else to use the tool cls instance
37+
use_tool_api: whether to use the tool service api, else to use the tool cls instance
3838
kwargs: other potential parameters
3939
"""
4040
if isinstance(llm, Dict):
@@ -43,7 +43,7 @@ def __init__(self,
4343
else:
4444
self.llm = llm
4545
self.stream = True
46-
self.use_api = use_api
46+
self.use_tool_api = use_tool_api
4747

4848
self.function_list = []
4949
self.function_map = {}
@@ -122,12 +122,12 @@ def _register_tool(self,
122122
tool_class_with_tenant = TOOL_REGISTRY[tool_name]
123123

124124
# check if the tenant_id of tool instance or tool service are exists
125-
# TODO: change from use_api=True to False, to get the tenant_id of the tool changes to
125+
# TODO: change from use_tool_api=True to False, to get the tenant_id of the tool changes to
126126
if tenant_id in tool_class_with_tenant:
127127
return
128128

129129
try:
130-
if self.use_api:
130+
if self.use_tool_api:
131131
# get service proxy as tool instance, call method will call remote tool service
132132
tool_instance = ToolServiceProxy(tool_name, tool_cfg,
133133
tenant_id)

modelscope_agent/agents/role_play.py

+45-9
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@
55
from modelscope_agent import Agent
66
from modelscope_agent.agent_env_util import AgentEnvMixin
77
from modelscope_agent.llm.base import BaseChatModel
8+
from modelscope_agent.tools.base import BaseTool
89
from modelscope_agent.utils.tokenization_utils import count_tokens
910
from modelscope_agent.utils.utils import check_and_limit_input_length
1011

@@ -107,6 +108,11 @@
107108
'en': '. you can use tools: [{tool_names}]',
108109
}
109110

111+
SPECIAL_PREFIX_TEMPLATE_TOOL_FOR_CHAT = {
112+
'zh': '。你必须使用工具中的一个或多个:[{tool_names}]',
113+
'en': '. you must use one or more tools: [{tool_names}]',
114+
}
115+
110116
SPECIAL_PREFIX_TEMPLATE_KNOWLEDGE = {
111117
'zh': '。请查看前面的知识库',
112118
'en': '. Please read the knowledge base at the beginning',
@@ -146,10 +152,26 @@ def _run(self,
146152
lang: str = 'zh',
147153
**kwargs):
148154

149-
self.tool_descs = '\n\n'.join(tool.function_plain_text
150-
for tool in self.function_map.values())
151-
self.tool_names = ','.join(tool.name
152-
for tool in self.function_map.values())
155+
chat_mode = kwargs.get('chat_mode', False)
156+
tools = kwargs.get('tools', None)
157+
tool_choice = kwargs.get('tool_choice', 'auto')
158+
159+
if tools is not None:
160+
self.tool_descs = BaseTool.parser_function(tools)
161+
tool_name_list = []
162+
for tool in tools:
163+
func_info = tool.get('function', {})
164+
if func_info == {}:
165+
continue
166+
if 'name' in func_info:
167+
tool_name_list.append(func_info['name'])
168+
self.tool_names = ','.join(tool_name_list)
169+
else:
170+
self.tool_descs = '\n\n'.join(
171+
tool.function_plain_text
172+
for tool in self.function_map.values())
173+
self.tool_names = ','.join(tool.name
174+
for tool in self.function_map.values())
153175

154176
self.system_prompt = ''
155177
self.query_prefix = ''
@@ -172,7 +194,7 @@ def _run(self,
172194
'knowledge'] = SPECIAL_PREFIX_TEMPLATE_KNOWLEDGE[lang]
173195

174196
# concat tools information
175-
if self.function_map and not self.llm.support_function_calling():
197+
if self.tool_descs and not self.llm.support_function_calling():
176198
self.system_prompt += TOOL_TEMPLATE[lang].format(
177199
tool_descs=self.tool_descs, tool_names=self.tool_names)
178200
self.query_prefix_dict['tool'] = SPECIAL_PREFIX_TEMPLATE_TOOL[
@@ -215,10 +237,18 @@ def _run(self,
215237
messages.extend(history)
216238

217239
# concat the new messages
218-
messages.append({
219-
'role': 'user',
220-
'content': self.query_prefix + user_request
221-
})
240+
if chat_mode and tool_choice == 'required':
241+
required_prefix = SPECIAL_PREFIX_TEMPLATE_TOOL_FOR_CHAT[
242+
lang].format(tool_names=self.tool_names)
243+
messages.append({
244+
'role': 'user',
245+
'content': required_prefix + user_request
246+
})
247+
else:
248+
messages.append({
249+
'role': 'user',
250+
'content': self.query_prefix + user_request
251+
})
222252

223253
planning_prompt = ''
224254
if self.llm.support_raw_prompt() and hasattr(self.llm,
@@ -265,6 +295,12 @@ def _run(self,
265295
else:
266296
assert 'llm_result must be an instance of dict or str'
267297

298+
if chat_mode:
299+
if use_tool and tool_choice != 'none':
300+
return f'Action: {action}\nAction Input: {action_input}\nResult: {output}'
301+
else:
302+
return f'Result: {output}'
303+
268304
# yield output
269305
if use_tool:
270306
if self.llm.support_function_calling():

modelscope_agent/constants.py

+1
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@
99
AGENT_REGISTRY_NAME = 'agent_center'
1010
TASK_CENTER_NAME = 'task_center'
1111
DEFAULT_TOOL_MANAGER_SERVICE_URL = 'http://localhost:31511'
12+
DEFAULT_ASSISTANT_SERVICE_URL = 'http://localhost:31512'
1213

1314

1415
class ApiNames(Enum):

modelscope_agent/memory/memory_with_retrieval_knowledge.py

+5-1
Original file line numberDiff line numberDiff line change
@@ -29,8 +29,12 @@ def __init__(self,
2929
description=description)
3030

3131
# allow vector storage to save knowledge
32+
embedding = kwargs.get('embedding', None)
3233
self.store_knowledge = KnowledgeVector(
33-
storage_path, name, use_cache=use_knowledge_cache)
34+
storage_path,
35+
name,
36+
use_cache=use_knowledge_cache,
37+
embedding=embedding)
3438

3539
def _run(self,
3640
query: str = None,
File renamed without changes.

modelscope_agent/rag/base.py

+50
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,50 @@
1+
import os
2+
from typing import Any, Dict, List, Union
3+
4+
from llama_index.core import SimpleDirectoryReader, VectorStoreIndex
5+
from llama_index.core.llama_pack.base import BaseLlamaPack
6+
from llama_index.core.readers.base import BaseReader
7+
8+
9+
class Knowledge(BaseLlamaPack):
10+
""" rag pipeline.
11+
12+
从不同的源加载知识,支持:文件夹路径(str),文件路径列表(list),将不同源配置到不同的召回方式(dict).
13+
Automatically select the best file reader given file extensions.
14+
15+
Args:
16+
knowledge_source: Path to the directory,或文件路径列表,或指定召回方式的文件路径。
17+
cache_dir: 缓存indexing后的信息。
18+
"""
19+
20+
def __init__(self,
21+
knowledge_source: Union[List, str, Dict],
22+
cache_dir: str = './run',
23+
**kwargs) -> None:
24+
25+
# extra_readers = self.get_extra_readers()
26+
self.documents = []
27+
if isinstance(knowledge_source, str):
28+
if os.path.exists(knowledge_source):
29+
self.documents.append(
30+
SimpleDirectoryReader(
31+
input_dir=knowledge_source,
32+
recursive=True).load_data())
33+
34+
self.documents = SimpleDirectoryReader(
35+
input_files=knowledge_source).load_data()
36+
37+
def get_extra_readers(self) -> Dict[str, BaseReader]:
38+
return {}
39+
40+
def get_modules(self) -> Dict[str, Any]:
41+
"""Get modules for rewrite."""
42+
return {
43+
'node_parser': self.node_parser,
44+
'recursive_retriever': self.recursive_retriever,
45+
'query_engines': self.query_engines,
46+
'reader': self.path_reader,
47+
}
48+
49+
def run(self, query: str, **kwargs) -> str:
50+
return self.query_engine.query(query, **kwargs)

modelscope_agent/rag/emb/dashscope.py

+89
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,89 @@
1+
import os
2+
from enum import Enum
3+
from http import HTTPStatus
4+
from typing import Any, List, Optional
5+
6+
import dashscope
7+
from llama_index.legacy.bridge.pydantic import Field
8+
from llama_index.legacy.callbacks import CallbackManager
9+
from llama_index.legacy.core.embeddings.base import (DEFAULT_EMBED_BATCH_SIZE,
10+
BaseEmbedding)
11+
12+
# Enums for validation and type safety
13+
DashscopeModelName = [
14+
'text-embedding-v1',
15+
'text-embedding-v2',
16+
]
17+
18+
19+
# Assuming BaseEmbedding is a Pydantic model and handles its own initializations
20+
class DashscopeEmbedding(BaseEmbedding):
21+
"""DashscopeEmbedding uses the dashscope API to generate embeddings for text."""
22+
23+
def __init__(
24+
self,
25+
model_name: str = 'text-embedding-v2',
26+
embed_batch_size: int = DEFAULT_EMBED_BATCH_SIZE,
27+
callback_manager: Optional[CallbackManager] = None,
28+
):
29+
"""
30+
A class representation for generating embeddings using the dashscope API.
31+
32+
Args:
33+
model_name (str): The name of the model to be used for generating embeddings. The class ensures that
34+
this model is supported and that the input type provided is compatible with the model.
35+
"""
36+
37+
assert os.environ.get(
38+
'DASHSCOPE_API_KEY',
39+
None), 'DASHSCOPE_API_KEY should be set in environ.'
40+
41+
# Validate model_name and input_type
42+
if model_name not in DashscopeModelName:
43+
raise ValueError(f'model {model_name} is not supported.')
44+
45+
super().__init__(
46+
model_name=model_name,
47+
embed_batch_size=embed_batch_size,
48+
callback_manager=callback_manager,
49+
)
50+
51+
@classmethod
52+
def class_name(cls) -> str:
53+
return 'DashscopeEmbedding'
54+
55+
def _embed(self,
56+
texts: List[str],
57+
text_type='document') -> List[List[float]]:
58+
"""Embed sentences using dashscope."""
59+
resp = dashscope.TextEmbedding.call(
60+
input=texts,
61+
model=self.model_name,
62+
text_type=text_type,
63+
)
64+
if resp.status_code == HTTPStatus.OK:
65+
res = resp.output['embeddings']
66+
else:
67+
raise ValueError(f'call dashscope api failed: {resp}')
68+
69+
return [list(map(float, e['embedding'])) for e in res]
70+
71+
def _get_query_embedding(self, query: str) -> List[float]:
72+
"""Get query embedding."""
73+
return self._embed([query], text_type='query')[0]
74+
75+
async def _aget_query_embedding(self, query: str) -> List[float]:
76+
"""Get query embedding async."""
77+
return self._get_query_embedding(query)
78+
79+
def _get_text_embedding(self, text: str) -> List[float]:
80+
"""Get text embedding."""
81+
return self._embed([text], text_type='document')[0]
82+
83+
async def _aget_text_embedding(self, text: str) -> List[float]:
84+
"""Get text embedding async."""
85+
return self._get_text_embedding(text)
86+
87+
def _get_text_embeddings(self, texts: List[str]) -> List[List[float]]:
88+
"""Get text embeddings."""
89+
return self._embed(texts, text_type='document')

0 commit comments

Comments
 (0)