Hi @IntelligenzaArtificiale
Great work! Exactly what I was looking for.
I have used the streamlit link in the ReadMe, as well as using the repo locally on my PC, but i get the following error:
`ChatError: Failed to parse response: {"type":"status","status":"started"}
Traceback (most recent call last):
File "C:\Users\myPC\PycharmProjects\AllForOneChatbot\venv\Lib\site-packages\hugchat\hugchat.py", line 263, in chat
obj = json.loads(res[1:-1])
^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\myPC\AppData\Local\Programs\Python\Python311\Lib\json_init_.py", line 346, in loads
return _default_decoder.decode(s)
^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\myPC\AppData\Local\Programs\Python\Python311\Lib\json\decoder.py", line 340, in decode
raise JSONDecodeError("Extra data", s, end)
json.decoder.JSONDecodeError: Extra data: line 1 column 7 (char 6)
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "C:\Users\myPC\PycharmProjects\AllForOneChatbot\venv\Lib\site-packages\streamlit\runtime\scriptrunner\script_runner.py", line 552, in run_scr
ipt
exec(code, module.dict)
File "C:\Users\myPC\PycharmProjects\AllForOneChatbot\streamlit_app.py", line 970, in
response = generate_response(input_text)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\myPC\PycharmProjects\AllForOneChatbot\streamlit_app.py", line 957, in generate_response
response = st.session_state['chatbot'].chat(final_prompt, temperature=temperature, top_p=top_p,
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\myPC\PycharmProjects\AllForOneChatbot\venv\Lib\site-packages\hugchat\hugchat.py", line 267, in chat
raise ChatError(f"Failed to parse response: {res}")
hugchat.exceptions.ChatError: Failed to parse response: {"type":"status","status":"started"}
2023-09-28 10:07:54.567 Uncaught app exception
Traceback (most recent call last):
File "C:\Users\myPC\PycharmProjects\AllForOneChatbot\venv\Lib\site-packages\hugchat\hugchat.py", line 263, in chat
obj = json.loads(res[1:-1])
^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\myPC\AppData\Local\Programs\Python\Python311\Lib\json_init.py", line 346, in loads
return _default_decoder.decode(s)
^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\myPC\AppData\Local\Programs\Python\Python311\Lib\json\decoder.py", line 340, in decode
raise JSONDecodeError("Extra data", s, end)
json.decoder.JSONDecodeError: Extra data: line 1 column 7 (char 6)
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "C:\Users\myPC\PycharmProjects\AllForOneChatbot\venv\Lib\site-packages\streamlit\runtime\scriptrunner\script_runner.py", line 552, in _run_scr
ipt
exec(code, module.dict)
File "C:\Users\myPC\PycharmProjects\AllForOneChatbot\streamlit_app.py", line 970, in
response = generate_response(input_text)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\myPC\PycharmProjects\AllForOneChatbot\streamlit_app.py", line 909, in generate_response
result = st.session_state['yt']({"query": prompt})
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\myPC\PycharmProjects\AllForOneChatbot\venv\Lib\site-packages\langchain\chains\base.py", line 166, in call
raise e
File "C:\Users\myPC\PycharmProjects\AllForOneChatbot\venv\Lib\site-packages\langchain\chains\base.py", line 160, in call
self._call(inputs, run_manager=run_manager)
File "C:\Users\myPC\PycharmProjects\AllForOneChatbot\venv\Lib\site-packages\langchain\chains\retrieval_qa\base.py", line 120, in _call
answer = self.combine_documents_chain.run(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\myPC\PycharmProjects\AllForOneChatbot\venv\Lib\site-packages\langchain\chains\base.py", line 293, in run
return self(kwargs, callbacks=callbacks, tags=tags)[_output_key]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\myPC\PycharmProjects\AllForOneChatbot\venv\Lib\site-packages\langchain\chains\base.py", line 166, in call
raise e
File "C:\Users\myPC\PycharmProjects\AllForOneChatbot\venv\Lib\site-packages\langchain\chains\base.py", line 160, in call
self._call(inputs, run_manager=run_manager)
File "C:\Users\myPC\PycharmProjects\AllForOneChatbot\venv\Lib\site-packages\langchain\chains\combine_documents\base.py", line 84, in _call
output, extra_return_dict = self.combine_docs(
^^^^^^^^^^^^^^^^^^
File "C:\Users\myPC\PycharmProjects\AllForOneChatbot\venv\Lib\site-packages\langchain\chains\combine_documents\stuff.py", line 87, in combine_docs
return self.llm_chain.predict(callbacks=callbacks, **inputs), {}
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\myPC\PycharmProjects\AllForOneChatbot\venv\Lib\site-packages\langchain\chains\llm.py", line 252, in predict
return self(kwargs, callbacks=callbacks)[self.output_key]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\myPC\PycharmProjects\AllForOneChatbot\venv\Lib\site-packages\langchain\chains\base.py", line 166, in call
raise e
File "C:\Users\myPC\PycharmProjects\AllForOneChatbot\venv\Lib\site-packages\langchain\chains\base.py", line 160, in call
self._call(inputs, run_manager=run_manager)
File "C:\Users\myPC\PycharmProjects\AllForOneChatbot\venv\Lib\site-packages\langchain\chains\llm.py", line 92, in _call
response = self.generate([inputs], run_manager=run_manager)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\myPC\PycharmProjects\AllForOneChatbot\venv\Lib\site-packages\langchain\chains\llm.py", line 102, in generate
return self.llm.generate_prompt(
^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\myPC\PycharmProjects\AllForOneChatbot\venv\Lib\site-packages\langchain\llms\base.py", line 141, in generate_prompt
return self.generate(prompt_strings, stop=stop, callbacks=callbacks, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\myPC\PycharmProjects\AllForOneChatbot\venv\Lib\site-packages\langchain\llms\base.py", line 227, in generate
output = self._generate_helper(
^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\myPC\PycharmProjects\AllForOneChatbot\venv\Lib\site-packages\langchain\llms\base.py", line 178, in _generate_helper
raise e
File "C:\Users\myPC\PycharmProjects\AllForOneChatbot\venv\Lib\site-packages\langchain\llms\base.py", line 165, in _generate_helper
self._generate(
File "C:\Users\myPC\PycharmProjects\AllForOneChatbot\venv\Lib\site-packages\langchain\llms\base.py", line 527, in _generate
else self._call(prompt, stop=stop, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\myPC\PycharmProjects\AllForOneChatbot\HuggingChatAPI.py", line 46, in _call
data = self.chatbot.chat(prompt, temperature=0.4, stream=False) # type: ignore
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\myPC\PycharmProjects\AllForOneChatbot\venv\Lib\site-packages\hugchat\hugchat.py", line 267, in chat
raise ChatError(f"Failed to parse response: {res}")
hugchat.exceptions.ChatError: Failed to parse response: {"type":"status","status":"started"}`
I have tried searching for the error, but there are no solutions online. I have a feeling it is to do with the huggingface API key, although I could be wrong.
Please can you advise me on what could be the issue?