Spaces:
Sleeping
Sleeping
Upload 3 files
Browse files- Dockerfile +16 -0
- app.py +38 -0
- requirements.txt +4 -0
Dockerfile
ADDED
@@ -0,0 +1,16 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# read the doc: https://huggingface.co/docs/hub/spaces-sdks-docker
|
2 |
+
# you will also find guides on how best to write your Dockerfile
|
3 |
+
|
4 |
+
FROM python:3.9
|
5 |
+
|
6 |
+
RUN useradd -m -u 1000 user
|
7 |
+
USER user
|
8 |
+
ENV PATH="/home/user/.local/bin:$PATH"
|
9 |
+
|
10 |
+
WORKDIR /app
|
11 |
+
|
12 |
+
COPY --chown=user ./requirements.txt requirements.txt
|
13 |
+
RUN pip install --no-cache-dir --upgrade -r requirements.txt
|
14 |
+
|
15 |
+
COPY --chown=user . /app
|
16 |
+
CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7860"]
|
app.py
ADDED
@@ -0,0 +1,38 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from fastapi import FastAPI, HTTPException
|
2 |
+
import requests
|
3 |
+
from bs4 import BeautifulSoup
|
4 |
+
|
5 |
+
app = FastAPI()
|
6 |
+
|
7 |
+
def scrape_web_page(url):
|
8 |
+
try:
|
9 |
+
# Send a GET request to the URL
|
10 |
+
response = requests.get(url)
|
11 |
+
response.raise_for_status() # Raise an exception for bad response status
|
12 |
+
|
13 |
+
# Parse the HTML content using BeautifulSoup
|
14 |
+
soup = BeautifulSoup(response.content, 'html.parser')
|
15 |
+
|
16 |
+
# Find and extract relevant content from the HTML
|
17 |
+
text_content = soup.get_text(separator='\n').strip() # Get all text content, strip whitespace
|
18 |
+
|
19 |
+
return text_content
|
20 |
+
except requests.exceptions.RequestException as e:
|
21 |
+
print(f"Error fetching URL: {e}")
|
22 |
+
return None
|
23 |
+
|
24 |
+
@app.get("/")
|
25 |
+
def read_root():
|
26 |
+
return {"message": "Welcome to the FastAPI server!"}
|
27 |
+
|
28 |
+
@app.get("/scrape/")
|
29 |
+
def scrape(url: str):
|
30 |
+
scraped_content = scrape_web_page(url)
|
31 |
+
if scraped_content:
|
32 |
+
return {"url": url, "scraped_content": scraped_content}
|
33 |
+
else:
|
34 |
+
raise HTTPException(status_code=500, detail="Failed to scrape content")
|
35 |
+
|
36 |
+
if __name__ == "__main__":
|
37 |
+
import uvicorn
|
38 |
+
uvicorn.run(app, host="0.0.0.0", port=8000)
|
requirements.txt
ADDED
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
1 |
+
fastapi
|
2 |
+
uvicorn
|
3 |
+
requests
|
4 |
+
beautifulsoup4
|