Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -4,64 +4,44 @@ from streamlit_extras.colored_header import colored_header
|
|
4 |
from streamlit_extras.add_vertical_space import add_vertical_space
|
5 |
from hugchat import hugchat
|
6 |
|
7 |
-
st.set_page_config(page_title="
|
8 |
-
|
9 |
-
# image = "0_AI4xFlr8mYASsylX.png" # Replace with the actual path to your image
|
10 |
-
# st.image(image, caption="Robotic Llama", use_column_width=1)
|
11 |
|
12 |
with st.sidebar:
|
13 |
st.title('π€π¬ HugChat App')
|
14 |
st.markdown('''
|
15 |
-
##
|
16 |
-
This app is an LLM-powered chatbot built using:
|
17 |
- [Streamlit](<https://streamlit.io/>)
|
18 |
- [HugChat](<https://github.com/Soulter/hugging-chat-api>)
|
19 |
- [OpenAssistant/oasst-sft-6-llama-30b-xor](<https://huggingface.co/OpenAssistant/oasst-sft-6-llama-30b-xor>) LLM model
|
20 |
-
|
21 |
-
π‘ Note: No API key required!
|
22 |
''')
|
23 |
add_vertical_space(5)
|
24 |
-
st.write('Thanks Meta for LLAMA and hugging face- hugchat')
|
25 |
|
26 |
if 'generated' not in st.session_state:
|
27 |
-
st.session_state['generated'] = ["
|
28 |
if 'past' not in st.session_state:
|
29 |
-
st.session_state['past'] = ['Hi
|
30 |
-
|
31 |
input_container = st.container()
|
32 |
colored_header(label='', description='', color_name='blue-70')
|
33 |
response_container = st.container()
|
34 |
|
35 |
-
|
36 |
-
# User input
|
37 |
-
## Function for taking user provided prompt as input
|
38 |
def get_text():
|
39 |
input_text = st.text_input("You: ", "", key="input")
|
40 |
return input_text
|
41 |
-
|
42 |
-
|
43 |
-
## Applying the user input box
|
44 |
with input_container:
|
45 |
user_input = get_text()
|
46 |
-
|
47 |
-
|
48 |
-
# Response output
|
49 |
-
## Function for taking user prompt as input followed by producing AI generated responses
|
50 |
def generate_response(prompt):
|
51 |
chatbot = hugchat.ChatBot()
|
52 |
response = chatbot.chat(prompt)
|
53 |
return response
|
54 |
-
|
55 |
-
|
56 |
-
## Conditional display of AI generated responses as a function of user provided prompts
|
57 |
with response_container:
|
58 |
if user_input:
|
59 |
response = generate_response(user_input)
|
60 |
st.session_state.past.append(user_input)
|
61 |
st.session_state.generated.append(response)
|
62 |
-
|
63 |
if st.session_state['generated']:
|
64 |
for i in range(len(st.session_state['generated'])):
|
65 |
message(st.session_state['past'][i], is_user=True, key=str(i) + '_user')
|
66 |
-
message(st.session_state['generated'][i], key=str(i))
|
67 |
-
|
|
|
4 |
from streamlit_extras.add_vertical_space import add_vertical_space
|
5 |
from hugchat import hugchat
|
6 |
|
7 |
+
st.set_page_config(page_title="OpenAssistant-Chatbot-FTW-Open-Source")
|
|
|
|
|
|
|
8 |
|
9 |
with st.sidebar:
|
10 |
st.title('π€π¬ HugChat App')
|
11 |
st.markdown('''
|
12 |
+
## π‘LLM-powered chatbot built using:
|
|
|
13 |
- [Streamlit](<https://streamlit.io/>)
|
14 |
- [HugChat](<https://github.com/Soulter/hugging-chat-api>)
|
15 |
- [OpenAssistant/oasst-sft-6-llama-30b-xor](<https://huggingface.co/OpenAssistant/oasst-sft-6-llama-30b-xor>) LLM model
|
|
|
|
|
16 |
''')
|
17 |
add_vertical_space(5)
|
|
|
18 |
|
19 |
if 'generated' not in st.session_state:
|
20 |
+
st.session_state['generated'] = ["Hi. Please enter a prompt or question with context on what you need."]
|
21 |
if 'past' not in st.session_state:
|
22 |
+
st.session_state['past'] = ['Hi.']
|
|
|
23 |
input_container = st.container()
|
24 |
colored_header(label='', description='', color_name='blue-70')
|
25 |
response_container = st.container()
|
26 |
|
|
|
|
|
|
|
27 |
def get_text():
|
28 |
input_text = st.text_input("You: ", "", key="input")
|
29 |
return input_text
|
30 |
+
|
|
|
|
|
31 |
with input_container:
|
32 |
user_input = get_text()
|
33 |
+
|
|
|
|
|
|
|
34 |
def generate_response(prompt):
|
35 |
chatbot = hugchat.ChatBot()
|
36 |
response = chatbot.chat(prompt)
|
37 |
return response
|
38 |
+
|
|
|
|
|
39 |
with response_container:
|
40 |
if user_input:
|
41 |
response = generate_response(user_input)
|
42 |
st.session_state.past.append(user_input)
|
43 |
st.session_state.generated.append(response)
|
|
|
44 |
if st.session_state['generated']:
|
45 |
for i in range(len(st.session_state['generated'])):
|
46 |
message(st.session_state['past'][i], is_user=True, key=str(i) + '_user')
|
47 |
+
message(st.session_state['generated'][i], key=str(i))
|
|