Skip to content

Commit

Permalink
up (#174)
Browse files Browse the repository at this point in the history
  • Loading branch information
yaojin3616 authored Dec 1, 2023
2 parents 9271a14 + 6c0e01b commit 325c675
Show file tree
Hide file tree
Showing 7 changed files with 40 additions and 17 deletions.
1 change: 1 addition & 0 deletions src/backend/bisheng/api/v1/callback.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,6 +114,7 @@ async def on_text(self, text: str, **kwargs: Any) -> Any:
log = ChatResponse(message=text, intermediate_steps=kwargs['log'],
type=kwargs['type'], category=kwargs['category'])
await self.websocket.send_json(log.dict())
logger.debug(f'on_text text={text} kwargs={kwargs}')

async def on_agent_action(self, action: AgentAction, **kwargs: Any):
log = f'Thought: {action.log}'
Expand Down
2 changes: 2 additions & 0 deletions src/backend/bisheng/api/v1/endpoints.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,8 @@ def getn_env():
env['uns_support'] = uns_support
else:
env['uns_support'] = list(knowledge.filetype_load_map.keys())
if settings.settings.get_from_db('office_url'):
env['office_url'] = settings.settings.get_from_db('office_url')
return {'data': env}


Expand Down
2 changes: 1 addition & 1 deletion src/backend/bisheng/api/v1/knowledge.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@ async def process_knowledge(*,
result = []
for path in file_path:
filepath, file_name = file_download(path)
md5_ = filepath.rsplit('/', 1)[1]
md5_ = filepath.rsplit('/', 1)[1].split('.')[0]
# 是否包含重复文件
repeat = session.exec(select(KnowledgeFile
).where(KnowledgeFile.md5 == md5_, KnowledgeFile.status == 2,
Expand Down
2 changes: 1 addition & 1 deletion src/backend/bisheng/cache/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -229,7 +229,7 @@ def save_download_file(file_byte, folder_name, filename):
def file_download(file_path: str):
"""download file and return path"""
if not os.path.isfile(file_path) and _is_valid_url(file_path):
r = requests.get(file_path)
r = requests.get(file_path, verify=False)

if r.status_code != 200:
raise ValueError(
Expand Down
14 changes: 11 additions & 3 deletions src/backend/bisheng/chat/handlers.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,19 +53,27 @@ async def process_report(self, session: ChatManager,
session.chat_history.add_message(client_id, chat_id, chat_message)

# process message
langchain_object = session.in_memory_cache.get(key)
chat_inputs = {'inputs': chat_inputs, 'is_begin': False}
result = await self.process_message(session, client_id, chat_id, chat_inputs, user_id)
# judge end type
start_resp = ChatResponse(type='start', user_id=user_id)
await session.send_json(client_id, chat_id, start_resp)

if langchain_object.stop_status():
start_resp.category = 'divider'
response = ChatResponse(message='主动退出', type='end',
category='divider', user_id=user_id)
await session.send_json(client_id, chat_id, response)

# build report
db_session = next(get_session())
template = db_session.exec(select(Report).where(
Report.flow_id == client_id).order_by(Report.id.desc())).first()
if not template:
logger.error('template not support')
return
start_resp = ChatResponse(type='start', user_id=user_id)
await session.send_json(client_id, chat_id, start_resp)

langchain_object = session.in_memory_cache.get(key)
template_muban = mino_client.get_share_link(template.object_name)
report_name = langchain_object.report_name
report_name = report_name if report_name.endswith('.docx') else f'{report_name}.docx'
Expand Down
20 changes: 8 additions & 12 deletions src/backend/bisheng/chat/manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -169,8 +169,9 @@ async def handle_websocket(self, client_id: str, chat_id: str, websocket: WebSoc
# autogen continue last session,
action, is_begin = 'autogen', False

start_resp = ChatResponse(type='begin', category='system', user_id=user_id)
step_resp = ChatResponse(type='end', category='system', user_id=user_id)
if is_begin:
start_resp = ChatResponse(type='begin', category='system', user_id=user_id)
await self.send_json(client_id, chat_id, start_resp)
start_resp.type = 'start'

Expand All @@ -183,8 +184,7 @@ async def handle_websocket(self, client_id: str, chat_id: str, websocket: WebSoc
self.set_cache(langchain_obj_key, None) # rebuild object
has_file = any(['InputFile' in nd.get('id') for nd in node_data])
if has_file:
step_resp = ChatResponse(intermediate_steps='File upload complete and begin to parse', # noqa
type='end', category='system', user_id=user_id)
step_resp.intermediate_steps = 'File upload complete and begin to parse'
await self.send_json(client_id, chat_id, start_resp)
await self.send_json(client_id, chat_id, step_resp, add=False)
await self.send_json(client_id, chat_id, start_resp)
Expand All @@ -199,10 +199,9 @@ async def handle_websocket(self, client_id: str, chat_id: str, websocket: WebSoc
user_id, gragh_data)
except Exception as e:
logger.exception(e)
step_resp = ChatResponse(intermediate_steps='Input data is parsed fail',
type='end', category='system', user_id=user_id)
step_resp.intermediate_steps = f'Input data is parsed fail. error={str(e)}'
if has_file:
step_resp.intermediate_steps = 'File is parsed fail'
step_resp.intermediate_steps = f'File is parsed fail. error={str(e)}'
await self.send_json(client_id, chat_id, step_resp)
start_resp.type = 'close'
await self.send_json(client_id, chat_id, start_resp)
Expand All @@ -216,23 +215,20 @@ async def handle_websocket(self, client_id: str, chat_id: str, websocket: WebSoc
langchain_obj = self.in_memory_cache.get(langchain_obj_key)
if isinstance(langchain_obj, Report):
action = 'report'
elif 'data' in payload['inputs']:
elif action != 'autogen' and 'data' in payload['inputs']:
action = 'auto_file' # has input data, default is file process
# default not set, for autogen set before

if has_file:
if not batch_question:
if action == 'auto_file':
# no question
step_resp = ChatResponse(type='end',
intermediate_steps='File parsing complete',
category='system', user_id=user_id)
step_resp.intermediate_steps = 'File parsing complete'
await self.send_json(client_id, chat_id, step_resp)
start_resp.type = 'close'
await self.send_json(client_id, chat_id, start_resp)
continue
step_resp = ChatResponse(intermediate_steps='File parsing complete. Analysis starting', # noqa
type='end', category='system', user_id=user_id)
step_resp.intermediate_steps = 'File parsing complete. Analysis starting'
await self.send_json(client_id, chat_id, step_resp, add=False)
if action == 'auto_file':
payload['inputs']['questions'] = [question for question in batch_question]
Expand Down
16 changes: 16 additions & 0 deletions src/bisheng-langchain/bisheng_langchain/input_output/output.py
Original file line number Diff line number Diff line change
Expand Up @@ -175,8 +175,12 @@ def _call(
) -> Dict[str, str]:
intermedia_steps = []
outputs = {}
self.stop_flag = False
if self.chains:
for i, chain in enumerate(self.chains):
if 'node_id' not in chain:
logger.info(f"report_skip_nonsence_chain chain={chain['object']}")
continue
if not isinstance(chain['object'], Chain):
raise TypeError(
f"{chain['object']} not be runnable Chain object"
Expand Down Expand Up @@ -217,8 +221,14 @@ async def _acall(
intermedia_steps = []
outputs = {}
await run_manager.on_text(text='', log='', type='end', category='processing') # end father start
self.stop_flag = False
if self.chains:
for i, chain in enumerate(self.chains):
if 'node_id' not in chain:
logger.info(f"report_skip_nonsence_chain chain={chain['object']}")
continue
if self.stop_flag:
break
if not isinstance(chain['object'], Chain):
raise TypeError(
f"{chain['object']} not be runnable Chain object"
Expand Down Expand Up @@ -251,3 +261,9 @@ async def _acall(
await run_manager.on_text(text='', log='', type='start', category='processing')
return {self.output_key: outputs, self.input_key: self.report_name,
'intermediate_steps': intermedia_steps}

def stop(self):
self.stop_flag = True

def stop_status(self):
return self.stop_flag

0 comments on commit 325c675

Please sign in to comment.