halimbahae commited on
Commit
47a7cf1
·
verified ·
1 Parent(s): ab2cbfd

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +106 -0
app.py ADDED
@@ -0,0 +1,106 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ from transformers import pipeline
3
+ from scrapegraphai.graphs import SmartScraperGraph
4
+ import torch
5
+
6
+ # Page config
7
+ st.set_page_config(
8
+ page_title="Zephyr Chat & Scrape",
9
+ page_icon="🤖",
10
+ layout="wide"
11
+ )
12
+
13
+ # Initialize session state
14
+ if "messages" not in st.session_state:
15
+ st.session_state.messages = []
16
+
17
+ if "scrape_results" not in st.session_state:
18
+ st.session_state.scrape_results = None
19
+
20
+ # Load Zephyr model
21
+ @st.cache_resource
22
+ def load_model():
23
+ return pipeline(
24
+ "text-generation",
25
+ model="HuggingFaceH4/zephyr-7b-beta",
26
+ torch_dtype=torch.float16,
27
+ device_map="auto",
28
+ )
29
+
30
+ # Initialize the model
31
+ model = load_model()
32
+
33
+ # Sidebar for web scraping
34
+ with st.sidebar:
35
+ st.title("Web Scraping")
36
+ url = st.text_input("Enter URL to scrape")
37
+ scrape_prompt = st.text_input("What information do you want to extract?")
38
+
39
+ if st.button("Scrape"):
40
+ try:
41
+ # Configure scraper
42
+ graph_config = {
43
+ "llm": {
44
+ "model": "HuggingFaceH4/zephyr-7b-beta",
45
+ "temperature": 0.7,
46
+ },
47
+ "verbose": True
48
+ }
49
+
50
+ # Create scraper instance
51
+ scraper = SmartScraperGraph(
52
+ prompt=scrape_prompt,
53
+ source=url,
54
+ config=graph_config
55
+ )
56
+
57
+ # Run scraping
58
+ st.session_state.scrape_results = scraper.run()
59
+ st.success("Scraping completed!")
60
+
61
+ except Exception as e:
62
+ st.error(f"Error during scraping: {str(e)}")
63
+
64
+ # Main chat interface
65
+ st.title("Zephyr Chatbot 🤖")
66
+
67
+ # Display scraped results if available
68
+ if st.session_state.scrape_results:
69
+ st.subheader("Scraped Information")
70
+ st.json(st.session_state.scrape_results)
71
+
72
+ # Display chat messages
73
+ for message in st.session_state.messages:
74
+ with st.chat_message(message["role"]):
75
+ st.markdown(message["content"])
76
+
77
+ # Chat input
78
+ if prompt := st.chat_input("What's on your mind?"):
79
+ # Add user message to chat history
80
+ st.session_state.messages.append({"role": "user", "content": prompt})
81
+ with st.chat_message("user"):
82
+ st.markdown(prompt)
83
+
84
+ # Generate response
85
+ with st.chat_message("assistant"):
86
+ with st.spinner("Thinking..."):
87
+ # Include scraped content in context if available
88
+ context = ""
89
+ if st.session_state.scrape_results:
90
+ context = f"Scraped information: {str(st.session_state.scrape_results)}\n"
91
+
92
+ full_prompt = f"{context}User: {prompt}\nAssistant:"
93
+
94
+ response = model(
95
+ full_prompt,
96
+ max_length=1000,
97
+ temperature=0.7,
98
+ top_p=0.95,
99
+ repetition_penalty=1.15
100
+ )[0]["generated_text"]
101
+
102
+ # Clean up response to get only the assistant's reply
103
+ response = response.split("Assistant:")[-1].strip()
104
+
105
+ st.markdown(response)
106
+ st.session_state.messages.append({"role": "assistant", "content": response})