1
1
from __future__ import annotations
2
2
3
- import aiohttp
3
+ import typing
4
4
import sqlalchemy
5
5
6
6
from . import entities , requester
7
7
from ...core import app
8
+ from ...core import entities as core_entities
9
+ from .. import entities as llm_entities
10
+ from ..tools import entities as tools_entities
8
11
from ...discover import engine
9
12
from . import token
10
13
from ...entity .persistence import model as persistence_model
@@ -58,14 +61,6 @@ def __init__(self, ap: app.Application):
58
61
self .llm_models = []
59
62
self .requester_components = []
60
63
self .requester_dict = {}
61
-
62
- async def get_model_by_name (self , name : str ) -> entities .LLMModelInfo :
63
- """通过名称获取模型
64
- """
65
- for model in self .model_list :
66
- if model .name == name :
67
- return model
68
- raise ValueError (f"无法确定模型 { name } 的信息,请在元数据中配置" )
69
64
70
65
async def initialize (self ):
71
66
self .requester_components = self .ap .discover .get_components_by_kind ('LLMAPIRequester' )
@@ -92,9 +87,9 @@ async def load_model_from_db(self):
92
87
93
88
# load models
94
89
for llm_model in llm_models :
95
- await self .load_model (llm_model )
90
+ await self .load_llm_model (llm_model )
96
91
97
- async def load_model (self , model_info : persistence_model .LLMModel | sqlalchemy .Row [persistence_model .LLMModel ] | dict ):
92
+ async def load_llm_model (self , model_info : persistence_model .LLMModel | sqlalchemy .Row [persistence_model .LLMModel ] | dict ):
98
93
"""加载模型"""
99
94
100
95
if isinstance (model_info , sqlalchemy .Row ):
@@ -113,10 +108,24 @@ async def load_model(self, model_info: persistence_model.LLMModel | sqlalchemy.R
113
108
config = model_info .requester_config
114
109
)
115
110
)
116
- print (runtime_llm_model , runtime_llm_model .model_entity .name , "loaded" )
117
111
self .llm_models .append (runtime_llm_model )
118
112
119
- async def remove_model (self , model_uuid : str ):
113
+ async def get_model_by_name (self , name : str ) -> entities .LLMModelInfo : # deprecated
114
+ """通过名称获取模型
115
+ """
116
+ for model in self .model_list :
117
+ if model .name == name :
118
+ return model
119
+ raise ValueError (f"无法确定模型 { name } 的信息,请在元数据中配置" )
120
+
121
+ async def get_model_by_uuid (self , uuid : str ) -> entities .LLMModelInfo :
122
+ """通过uuid获取模型"""
123
+ for model in self .llm_models :
124
+ if model .model_entity .uuid == uuid :
125
+ return model
126
+ raise ValueError (f"model { uuid } not found" )
127
+
128
+ async def remove_llm_model (self , model_uuid : str ):
120
129
"""移除模型"""
121
130
for model in self .llm_models :
122
131
if model .model_entity .uuid == model_uuid :
@@ -136,3 +145,13 @@ def get_available_requester_info_by_name(self, name: str) -> dict | None:
136
145
if component .metadata .name == name :
137
146
return component .to_plain_dict ()
138
147
return None
148
+
149
+ async def invoke_llm (
150
+ self ,
151
+ query : core_entities .Query ,
152
+ model_uuid : str ,
153
+ messages : list [llm_entities .Message ],
154
+ funcs : list [tools_entities .LLMFunction ] = None ,
155
+ ) -> llm_entities .Message :
156
+ pass
157
+
0 commit comments