eagle0504 commited on
Commit
fbf30c0
·
verified ·
1 Parent(s): ac6afd6

Update src/streamlit_app.py

Browse files
Files changed (1) hide show
  1. src/streamlit_app.py +83 -38
src/streamlit_app.py CHANGED
@@ -1,40 +1,85 @@
1
- import altair as alt
2
- import numpy as np
3
- import pandas as pd
4
  import streamlit as st
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5
 
6
- """
7
- # Welcome to Streamlit!
8
-
9
- Edit `/streamlit_app.py` to customize this app to your heart's desire :heart:.
10
- If you have any questions, checkout our [documentation](https://docs.streamlit.io) and [community
11
- forums](https://discuss.streamlit.io).
12
-
13
- In the meantime, below is an example of what you can do with just a few lines of code:
14
- """
15
-
16
- num_points = st.slider("Number of points in spiral", 1, 10000, 1100)
17
- num_turns = st.slider("Number of turns in spiral", 1, 300, 31)
18
-
19
- indices = np.linspace(0, 1, num_points)
20
- theta = 2 * np.pi * num_turns * indices
21
- radius = indices
22
-
23
- x = radius * np.cos(theta)
24
- y = radius * np.sin(theta)
25
-
26
- df = pd.DataFrame({
27
- "x": x,
28
- "y": y,
29
- "idx": indices,
30
- "rand": np.random.randn(num_points),
31
- })
32
-
33
- st.altair_chart(alt.Chart(df, height=700, width=700)
34
- .mark_point(filled=True)
35
- .encode(
36
- x=alt.X("x", axis=None),
37
- y=alt.Y("y", axis=None),
38
- color=alt.Color("idx", legend=None, scale=alt.Scale()),
39
- size=alt.Size("rand", legend=None, scale=alt.Scale(range=[1, 150])),
40
- ))
 
1
+ """Streamlit app for chatting with Hugging Face model using streaming inference."""
2
+
3
+ import os
4
  import streamlit as st
5
+ from huggingface_hub import InferenceClient
6
+
7
+ # Get token from environment variable set via Hugging Face Secrets
8
+ HF_TOKEN = os.environ["HF_TOKEN"] # Raises KeyError if not set
9
+
10
+ # Initialize Hugging Face Inference Client
11
+ client = InferenceClient(
12
+ provider="novita",
13
+ api_key=HF_TOKEN,
14
+ )
15
+
16
+
17
+ def init_session():
18
+ """Initialize session state variables for chat history."""
19
+ if "messages" not in st.session_state:
20
+ st.session_state.messages = [
21
+ {"role": "system", "content": "You are a helpful assistant."}
22
+ ]
23
+ if "user_input" not in st.session_state:
24
+ st.session_state.user_input = ""
25
+
26
+
27
+ def display_chat():
28
+ """Render chat messages on the Streamlit app."""
29
+ for msg in st.session_state.messages:
30
+ if msg["role"] == "user":
31
+ with st.chat_message("user"):
32
+ st.markdown(msg["content"])
33
+ elif msg["role"] == "assistant":
34
+ with st.chat_message("assistant"):
35
+ st.markdown(msg["content"])
36
+
37
+
38
+ def stream_response(model_name: str, messages: list[dict]) -> str:
39
+ """
40
+ Stream the model's response from Hugging Face InferenceClient.
41
+
42
+ Args:
43
+ model_name (str): The name of the model to use.
44
+ messages (list): List of messages in the conversation.
45
+
46
+ Returns:
47
+ str: Full response streamed from the model.
48
+ """
49
+ response = ""
50
+ with st.chat_message("assistant"):
51
+ msg_placeholder = st.empty()
52
+ for chunk in client.chat.completions.create(
53
+ model=model_name,
54
+ messages=messages,
55
+ stream=True,
56
+ ):
57
+ if chunk.choices[0].delta.content:
58
+ response += chunk.choices[0].delta.content
59
+ msg_placeholder.markdown(response)
60
+ return response
61
+
62
+
63
+ def main():
64
+ """Main function for Streamlit app."""
65
+ st.set_page_config(page_title="ERNIE Chat", page_icon="🧠", layout="centered")
66
+ st.title("💬 ERNIE Chat - Hugging Face")
67
+
68
+ init_session()
69
+ display_chat()
70
+
71
+ user_prompt = st.chat_input("Say something...")
72
+ if user_prompt:
73
+ # Append user message to session state
74
+ st.session_state.messages.append({"role": "user", "content": user_prompt})
75
+
76
+ # Stream and collect assistant response
77
+ model_name = "baidu/ERNIE-4.5-0.3B-PT"
78
+ assistant_reply = stream_response(model_name, st.session_state.messages)
79
+
80
+ # Append assistant message to session state
81
+ st.session_state.messages.append({"role": "assistant", "content": assistant_reply})
82
+
83
 
84
+ if __name__ == "__main__":
85
+ main()