antfraia commited on
Commit
c7f1dd0
·
1 Parent(s): 9b72476

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +40 -65
app.py CHANGED
@@ -1,30 +1,46 @@
1
  import streamlit as st
2
  import pandas as pd
3
- import requests
4
  from apify_client import ApifyClient
 
5
 
6
  # Function to fetch Google Maps info using the antonces~gmaps actor
7
  def fetch_google_maps_info(website_name):
8
  apify_client = ApifyClient("apify_api_uz0y556N4IG2aLcESj67kmnGSUpHF12XAkLp")
9
  run_input = {"searchStringsArray": [website_name]}
10
- run = apify_client.actor("nwua9Gu5YrADL7ZDj").call(run_input=run_input)
11
  items = list(apify_client.dataset(run["defaultDatasetId"]).iterate_items())
12
  return items[0] if items else None
13
 
14
- # Function to fetch weather info from OpenWeatherMap API
15
- def fetch_weather_info(lat, lon):
16
- API_KEY = "91b23cab82ee530b2052c8757e343b0d"
17
- url = f"https://api.openweathermap.org/data/3.0/onecall?lat={lat}&lon={lon}&exclude=hourly,daily&appid={API_KEY}"
18
- response = requests.get(url)
19
- return response.json()
20
-
21
- # Function to fetch website content using the antonces~web-scraper-task actor
22
- def fetch_website_content(website_url):
23
- apify_client = ApifyClient("apify_api_uz0y556N4IG2aLcESj67kmnGSUpHF12XAkLp")
24
- run_input = {}
25
- run = apify_client.actor("moJRLRc85AitArpNN").call(run_input=run_input)
26
- items = list(apify_client.dataset(run["defaultDatasetId"]).iterate_items())
27
- return items if items else None
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
28
 
29
  # Streamlit app for Data Visualization
30
  st.title("Data Visualization")
@@ -33,60 +49,19 @@ st.title("Data Visualization")
33
  website_name = st.text_input("Enter a website / company name:")
34
 
35
  if website_name:
36
- # Initialize the progress bar
37
- progress_bar = st.progress(0)
38
-
39
  # Fetch Google Maps data
40
  google_maps_data = fetch_google_maps_info(website_name)
41
- progress_bar.progress(50)
42
-
43
  if google_maps_data:
44
- # Display website link
45
- website_link = google_maps_data.get('website')
46
- st.text_area("Website Link:", website_link)
47
-
48
- # Display location and fetch weather info
49
- lat = google_maps_data["location"]["lat"]
50
- lng = google_maps_data["location"]["lng"]
51
- st.map(pd.DataFrame({'lat': [lat], 'lon': [lng]}))
52
- weather_data = fetch_weather_info(lat, lng)
53
- current_weather = weather_data.get("current", {})
54
- temp = current_weather.get('temp')
55
- temp_in_celsius = temp - 273.15
56
- st.write(f"**Location:** {lat}, {lng}")
57
- st.write(f"**Temperature:** {temp_in_celsius:.2f}°C")
58
- st.write(f"**Weather:** {current_weather.get('weather')[0].get('description')}")
59
 
60
- # Display Occupancy Data
61
- st.subheader("Occupancy Data")
62
- occupancy_data = google_maps_data.get('popularTimesHistogram', {})
63
- for day, day_data in occupancy_data.items():
64
- hours = [entry['hour'] for entry in day_data]
65
- occupancy = [entry['occupancyPercent'] for entry in day_data]
66
- st.write(day)
67
- st.bar_chart(pd.Series(occupancy, index=hours))
68
-
69
- # Display Review Count and Distribution
70
- st.subheader("Review Count and Distribution")
71
- st.write(f"Total Reviews Count: {google_maps_data['reviewsCount']}")
72
- review_distribution = google_maps_data.get('reviewsDistribution', {})
73
- st.bar_chart(pd.Series(review_distribution))
74
 
75
- # Display Reviews Table
76
- st.subheader("Customer Reviews")
77
- reviews = google_maps_data.get('reviews', [])
78
- review_df = pd.DataFrame(reviews)
79
  st.table(review_df[['name', 'text', 'publishAt', 'likesCount', 'stars']])
80
-
81
- # Fetch and Display Website Content
82
- st.subheader("Website Content")
83
- website_content_data = fetch_website_content(website_link)
84
- progress_bar.progress(100)
85
-
86
- if website_content_data:
87
- website_df = pd.DataFrame(website_content_data)
88
- st.table(website_df)
89
- else:
90
- st.write("Unable to retrieve website content.")
91
  else:
92
  st.write("No results found for this website / company name on Google Maps.")
 
1
  import streamlit as st
2
  import pandas as pd
 
3
  from apify_client import ApifyClient
4
+ import requests
5
 
6
  # Function to fetch Google Maps info using the antonces~gmaps actor
7
  def fetch_google_maps_info(website_name):
8
  apify_client = ApifyClient("apify_api_uz0y556N4IG2aLcESj67kmnGSUpHF12XAkLp")
9
  run_input = {"searchStringsArray": [website_name]}
10
+ run = apify_client.actor("antonces~gmaps").call(run_input=run_input)
11
  items = list(apify_client.dataset(run["defaultDatasetId"]).iterate_items())
12
  return items[0] if items else None
13
 
14
+ # Function to fetch customer reviews using the new API
15
+ def fetch_customer_reviews(location_query):
16
+ client = ApifyClient("apify_api_uz0y556N4IG2aLcESj67kmnGSUpHF12XAkLp")
17
+ run_input = {
18
+ "searchStringsArray": ["restaurant"],
19
+ "locationQuery": location_query,
20
+ "maxCrawledPlacesPerSearch": 50,
21
+ "language": "en",
22
+ "maxImages": None,
23
+ "onlyDataFromSearchPage": False,
24
+ "includeWebResults": False,
25
+ "deeperCityScrape": False,
26
+ "maxReviews": None,
27
+ "oneReviewPerRow": False,
28
+ "reviewsSort": "newest",
29
+ "reviewsFilterString": "",
30
+ "scrapeReviewerName": True,
31
+ "scrapeReviewerId": True,
32
+ "scrapeReviewerUrl": True,
33
+ "scrapeReviewId": True,
34
+ "scrapeReviewUrl": True,
35
+ "scrapeResponseFromOwnerText": True,
36
+ "countryCode": None,
37
+ "searchMatching": "all",
38
+ "placeMinimumStars": "",
39
+ "skipClosedPlaces": False,
40
+ "allPlacesNoSearchAction": "",
41
+ }
42
+ run = client.actor("mc9KJTQJg3zfQpANg/nwua9Gu5YrADL7ZDj").call(run_input=run_input)
43
+ return list(client.dataset(run["defaultDatasetId"]).iterate_items())
44
 
45
  # Streamlit app for Data Visualization
46
  st.title("Data Visualization")
 
49
  website_name = st.text_input("Enter a website / company name:")
50
 
51
  if website_name:
 
 
 
52
  # Fetch Google Maps data
53
  google_maps_data = fetch_google_maps_info(website_name)
54
+
 
55
  if google_maps_data:
56
+ location_query = google_maps_data.get("locationQuery")
57
+ reviews_data = fetch_customer_reviews(location_query)
 
 
 
 
 
 
 
 
 
 
 
 
 
58
 
59
+ # Display Google Maps data
60
+ # ... (use the original display code for Google Maps data here) ...
 
 
 
 
 
 
 
 
 
 
 
 
61
 
62
+ # Display reviews_data
63
+ review_df = pd.DataFrame(reviews_data)
64
+ st.subheader("Customer Reviews from New API")
 
65
  st.table(review_df[['name', 'text', 'publishAt', 'likesCount', 'stars']])
 
 
 
 
 
 
 
 
 
 
 
66
  else:
67
  st.write("No results found for this website / company name on Google Maps.")