Skip to content

Commit 32bd3e6

Browse files
committed
debug chatbot' service
1 parent e19e3bb commit 32bd3e6

File tree

9 files changed

+30
-30
lines changed

9 files changed

+30
-30
lines changed

muagent/chat/agent_chat.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -174,7 +174,7 @@ def chat_iterator(message: Message, local_memory: Memory, isDetailed=False):
174174
result["related_nodes"] = related_nodes
175175

176176
# logger.debug(f"{result['figures'].keys()}, isDetailed: {isDetailed}")
177-
message_str = step_content
177+
message_str = final_content
178178
if self.stream:
179179
for token in message_str:
180180
result["answer"] = token
@@ -238,8 +238,8 @@ def achat(
238238
custom_phase_configs, custom_chain_configs, custom_role_configs)
239239

240240
#
241-
params = locals()
242-
params.pop("self")
241+
# params = locals()
242+
# params.pop("self")
243243
# embed_config: EmbedConfig = EmbedConfig(**params)
244244
# llm_config: LLMConfig = LLMConfig(**params)
245245

@@ -302,7 +302,7 @@ def chat_iterator(message: Message, local_memory: Memory, isDetailed=False):
302302
step_content = local_memory.to_str_messages(content_key='step_content', filter_roles=["human"])
303303
step_content = "\n\n".join([f"{v}" for parsed_output in local_memory.get_parserd_output_list() for k, v in parsed_output.items() if k not in ["Action Status", "human", "user"]])
304304
# logger.debug(f"{local_memory.get_parserd_output_list()}")
305-
final_content = message.role_content
305+
final_content = step_content or message.role_content
306306
result = {
307307
"answer": "",
308308
"db_docs": [str(doc) for doc in message.db_docs],
@@ -322,7 +322,7 @@ def chat_iterator(message: Message, local_memory: Memory, isDetailed=False):
322322
result["related_nodes"] = related_nodes
323323

324324
# logger.debug(f"{result['figures'].keys()}, isDetailed: {isDetailed}")
325-
message_str = step_content
325+
message_str = final_content
326326
if self.stream:
327327
for token in message_str:
328328
result["answer"] = token

muagent/chat/base_chat.py

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
import asyncio, json, os
44
from typing import List, AsyncIterable
55

6-
from langchain import LLMChain
6+
from langchain.chains.llm import LLMChain
77
from langchain.callbacks import AsyncIteratorCallbackHandler
88
from langchain.prompts.chat import ChatPromptTemplate
99

@@ -55,8 +55,8 @@ def chat(
5555
embed_config: EmbedConfig = Body({}, description="embedding_model config"),
5656
**kargs
5757
):
58-
params = locals()
59-
params.pop("self", None)
58+
# params = locals()
59+
# params.pop("self", None)
6060
# llm_config: LLMConfig = LLMConfig(**params)
6161
# embed_config: EmbedConfig = EmbedConfig(**params)
6262
self.engine_name = engine_name if isinstance(engine_name, str) else engine_name.default
@@ -78,6 +78,7 @@ def _chat(self, query: str, history: List[History], llm_config: LLMConfig, embed
7878
def chat_iterator(query: str, history: List[History]):
7979
# model = getChatModel()
8080
model = getChatModelFromConfig(llm_config)
81+
model = model.llm
8182

8283
result, content = self.create_task(query, history, model, llm_config, embed_config, **kargs)
8384
logger.info('result={}'.format(result))
@@ -142,6 +143,7 @@ async def chat_iterator(query, history):
142143
callback = AsyncIteratorCallbackHandler()
143144
# model = getChatModel()
144145
model = getChatModelFromConfig(llm_config)
146+
model = model.llm
145147

146148
task, result = self.create_atask(query, history, model, llm_config, embed_config, callback)
147149
if self.stream:
@@ -166,7 +168,7 @@ def create_task(self, query: str, history: List[History], model, llm_config: LLM
166168
content = chain({"input": query})
167169
return {"answer": "", "docs": ""}, content
168170

169-
def create_atask(self, query, history, model, llm_config: LLMConfig, embed_config: EmbedConfig, callback: AsyncIteratorCallbackHandler):
171+
def create_atask(self, query, history: List[History], model, llm_config: LLMConfig, embed_config: EmbedConfig, callback: AsyncIteratorCallbackHandler):
170172
chat_prompt = ChatPromptTemplate.from_messages(
171173
[i.to_msg_tuple() for i in history] + [("human", "{input}")]
172174
)

muagent/chat/code_chat.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@
1111
from typing import List
1212
from fastapi.responses import StreamingResponse
1313

14-
from langchain import LLMChain
14+
from langchain.chains.llm import LLMChain
1515
from langchain.callbacks import AsyncIteratorCallbackHandler
1616
from langchain.prompts.chat import ChatPromptTemplate
1717

@@ -129,8 +129,8 @@ def chat(
129129
use_nh: bool =Body(True, description=""),
130130
**kargs
131131
):
132-
params = locals()
133-
params.pop("self")
132+
# params = locals()
133+
# params.pop("self")
134134
# llm_config: LLMConfig = LLMConfig(**params)
135135
# embed_config: EmbedConfig = EmbedConfig(**params)
136136
self.engine_name = engine_name if isinstance(engine_name, str) else engine_name.default
@@ -151,6 +151,7 @@ def _chat(self, query: str, history: List[History], llm_config: LLMConfig, embed
151151
def chat_iterator(query: str, history: List[History]):
152152
# model = getChatModel()
153153
model = getChatModelFromConfig(llm_config)
154+
model = model.llm
154155

155156
result, content = self.create_task(query, history, model, llm_config, embed_config, local_graph_path, **kargs)
156157
# logger.info('result={}'.format(result))

muagent/chat/knowledge_chat.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
from urllib.parse import urlencode
44
from typing import List
55

6-
from langchain import LLMChain
6+
from langchain.chains.llm import LLMChain
77
from langchain.callbacks import AsyncIteratorCallbackHandler
88
from langchain.prompts.chat import ChatPromptTemplate
99

@@ -75,7 +75,7 @@ def _process(self, query: str, history: List[History], model, llm_config: LLMCon
7575
result = {"answer": "", "docs": source_documents}
7676
return chain, context, result
7777

78-
def create_task(self, query: str, history: List[History], model, llm_config: LLMConfig, embed_config: EmbedConfig, ):
78+
def create_task(self, query: str, history: List[History], model, llm_config: LLMConfig, embed_config: EmbedConfig, **kargs):
7979
'''构建 llm 生成任务'''
8080
logger.debug(f"query: {query}, history: {history}")
8181
chain, context, result = self._process(query, history, model, llm_config, embed_config)

muagent/chat/llm_chat.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
import asyncio
22
from typing import List
33

4-
from langchain import LLMChain
4+
from langchain.chains.llm import LLMChain
55
from langchain.callbacks import AsyncIteratorCallbackHandler
66
from langchain.prompts.chat import ChatPromptTemplate
77

@@ -31,7 +31,7 @@ def create_task(self, query: str, history: List[History], model, llm_config: LLM
3131
content = chain({"input": query})
3232
return {"answer": "", "docs": ""}, content
3333

34-
def create_atask(self, query, history, model, llm_config: LLMConfig, embed_config: EmbedConfig, callback: AsyncIteratorCallbackHandler):
34+
def create_atask(self, query, history: List[History], model, llm_config: LLMConfig, embed_config: EmbedConfig, callback: AsyncIteratorCallbackHandler):
3535
chat_prompt = ChatPromptTemplate.from_messages(
3636
[i.to_msg_tuple() for i in history] + [("human", "{input}")]
3737
)

muagent/chat/search_chat.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,9 @@
11
import os, asyncio
22
from typing import List, Optional, Dict
33

4-
from langchain import LLMChain
4+
from langchain.chains.llm import LLMChain
55
from langchain.callbacks import AsyncIteratorCallbackHandler
6-
from langchain_community.utilities import BingSearchAPIWrapper, DuckDuckGoSearchAPIWrapper
6+
from langchain.utilities import BingSearchAPIWrapper, DuckDuckGoSearchAPIWrapper
77
from langchain.prompts.chat import ChatPromptTemplate
88
from langchain_community.docstore.document import Document
99

muagent/connector/configs/prompts/qa_template_prompt.py

Lines changed: 6 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -5,20 +5,18 @@
55
Based on the information provided, please answer the origin query concisely and professionally.
66
Attention: Follow the input format and response output format
77
8-
#### Input Format
9-
10-
**Origin Query:** the initial question or objective that the user wanted to achieve
11-
12-
**Context:** the current status and history of the tasks to determine if Origin Query has been achieved.
13-
14-
**DocInfos:**: the relevant doc information or code information, if this is empty, don't refer to this.
15-
168
#### Response Output Format
179
**Action Status:** Set to 'Continued' or 'Stopped'.
1810
**Answer:** Response to the user's origin query based on Context and DocInfos. If DocInfos is empty, you can ignore it.
1911
If the answer cannot be derived from the given Context and DocInfos, please say 'The question cannot be answered based on the information provided' and do not add any fabricated elements to the answer.
2012
"""
2113

14+
# **Origin Query:** the initial question or objective that the user wanted to achieve
15+
16+
# **Context:** the current status and history of the tasks to determine if Origin Query has been achieved.
17+
18+
# **DocInfos:**: the relevant doc information or code information, if this is empty, don't refer to this.
19+
2220

2321
CODE_QA_PROMPT = """#### Agent Profile
2422

muagent/llm_models/openai_model.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,7 @@ def __init__(self, llm_config: LLMConfig, callBack: AsyncIteratorCallbackHandler
4747
VISIT_BIZ_LINE = os.environ.get("visit_biz_line")
4848
# zdatafront 提供的统一加密密钥
4949
aes_secret_key = os.environ.get("aes_secret_key")
50-
50+
# logger.debug(f"{VISIT_DOMAIN}, {VISIT_BIZ}, {VISIT_BIZ_LINE}, {aes_secret_key}")
5151
zdatafront_client = ZDataFrontClient(visit_domain=VISIT_DOMAIN, visit_biz=VISIT_BIZ, visit_biz_line=VISIT_BIZ_LINE, aes_secret_key=aes_secret_key)
5252
http_client = SyncProxyHttpClient(zdatafront_client=zdatafront_client, prefer_async=True)
5353
except Exception as e:
@@ -112,8 +112,7 @@ def __init__(self, llm_config: LLMConfig, callBack: AsyncIteratorCallbackHandler
112112
)
113113

114114

115-
def getChatModelFromConfig(llm_config: LLMConfig, callBack: AsyncIteratorCallbackHandler = None, ) -> Union[ChatOpenAI, LLM]:
116-
115+
def getChatModelFromConfig(llm_config: LLMConfig, callBack: AsyncIteratorCallbackHandler = None, ) -> Union[ChatOpenAI, LLM, CustomLLMModel]:
117116
if llm_config and llm_config.llm and isinstance(llm_config.llm, LLM):
118117
return CustomLLMModel(llm=llm_config.llm)
119118
elif llm_config:

muagent/service/service_factory.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -145,5 +145,5 @@ def get_kb_doc_details(kb_name: str, kb_root_path) -> List[Dict]:
145145
for i, v in enumerate(result.values()):
146146
v['No'] = i + 1
147147
data.append(v)
148-
148+
149149
return data

0 commit comments

Comments
 (0)