Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -7,7 +7,11 @@ from langchain_core.vectorstores import InMemoryVectorStore
|
|
| 7 |
import os
|
| 8 |
from langchain_core.chat_history import InMemoryChatMessageHistory, BaseChatMessageHistory
|
| 9 |
import time
|
|
|
|
|
|
|
|
|
|
| 10 |
from graph import get_graph
|
|
|
|
| 11 |
|
| 12 |
if 'read_file' not in st.session_state:
|
| 13 |
st.session_state.read_file = False
|
|
@@ -77,7 +81,8 @@ if st.session_state.read_file:
|
|
| 77 |
'next_node': None,
|
| 78 |
'history': get_session_by_id('chat42').messages,
|
| 79 |
},
|
| 80 |
-
stream_mode='messages'
|
|
|
|
| 81 |
)
|
| 82 |
|
| 83 |
get_session_by_id('chat42').add_message(HumanMessage(content=prompt))
|
|
@@ -93,20 +98,29 @@ if st.session_state.read_file:
|
|
| 93 |
placeholders = {}
|
| 94 |
prompt_message_placeholder = st.empty()
|
| 95 |
|
| 96 |
-
|
| 97 |
-
|
| 98 |
-
|
| 99 |
-
|
| 100 |
-
|
| 101 |
-
|
| 102 |
-
|
| 103 |
-
|
| 104 |
-
|
| 105 |
-
|
| 106 |
-
|
| 107 |
-
|
| 108 |
-
|
| 109 |
-
|
| 110 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 111 |
prompt_message_placeholder.write(full_response)
|
|
|
|
| 112 |
get_session_by_id('chat42').add_message(AIMessage(content=full_response))
|
|
|
|
| 7 |
import os
|
| 8 |
from langchain_core.chat_history import InMemoryChatMessageHistory, BaseChatMessageHistory
|
| 9 |
import time
|
| 10 |
+
|
| 11 |
+
from langgraph.errors import GraphRecursionError
|
| 12 |
+
|
| 13 |
from graph import get_graph
|
| 14 |
+
from langchain_core.runnables import RunnableConfig
|
| 15 |
|
| 16 |
if 'read_file' not in st.session_state:
|
| 17 |
st.session_state.read_file = False
|
|
|
|
| 81 |
'next_node': None,
|
| 82 |
'history': get_session_by_id('chat42').messages,
|
| 83 |
},
|
| 84 |
+
stream_mode='messages',
|
| 85 |
+
config = RunnableConfig(recursion_limit=4)
|
| 86 |
)
|
| 87 |
|
| 88 |
get_session_by_id('chat42').add_message(HumanMessage(content=prompt))
|
|
|
|
| 98 |
placeholders = {}
|
| 99 |
prompt_message_placeholder = st.empty()
|
| 100 |
|
| 101 |
+
try:
|
| 102 |
+
for msg in get_message():
|
| 103 |
+
full_response += msg
|
| 104 |
+
if '<tool>' in full_response:
|
| 105 |
+
with tool_placeholder.status('Reading document...', expanded=True):
|
| 106 |
+
if 'tool_message_placeholder' not in placeholders:
|
| 107 |
+
placeholders['tool_message_placeholder'] = st.empty()
|
| 108 |
+
placeholders['tool_message_placeholder'].write(full_response
|
| 109 |
+
.replace('<tool>', '')
|
| 110 |
+
.replace('</tool>', '')
|
| 111 |
+
.replace('retriever', 'Retrieving document'))
|
| 112 |
+
prompt_message_placeholder.empty()
|
| 113 |
+
if '</tool>' in full_response:
|
| 114 |
+
full_response = ''
|
| 115 |
+
continue
|
| 116 |
+
else:
|
| 117 |
+
prompt_message_placeholder.write(full_response)
|
| 118 |
+
except GraphRecursionError:
|
| 119 |
+
message = 'Não consegui responder a sua pergunta. 😥 Poderia me perguntar outra coisa?'
|
| 120 |
+
full_response = ''
|
| 121 |
+
for letter in message:
|
| 122 |
+
full_response += letter
|
| 123 |
+
time.sleep(0.015)
|
| 124 |
prompt_message_placeholder.write(full_response)
|
| 125 |
+
|
| 126 |
get_session_by_id('chat42').add_message(AIMessage(content=full_response))
|