Spaces:
Sleeping
Sleeping
Commit
·
f638bd7
1
Parent(s):
9b22eda
updating api
Browse files- .gitignore +1 -0
- Dockerfile +16 -0
- api.py +21 -0
- dps_challenge_notebook.ipynb +0 -0
- lib/encoder.pkl +3 -0
- lib/model.pkl +3 -0
- requirements.txt +4 -0
- routers/Prediction/prediction_route.py +55 -0
- utils/ProcessingClass.py +57 -0
.gitignore
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
__pycache__
|
Dockerfile
ADDED
@@ -0,0 +1,16 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Read the doc: https://huggingface.co/docs/hub/spaces-sdks-docker
|
2 |
+
# you will also find guides on how best to write your Dockerfile
|
3 |
+
|
4 |
+
FROM python:3.9
|
5 |
+
|
6 |
+
RUN useradd -m -u 1000 user
|
7 |
+
USER user
|
8 |
+
ENV PATH="/home/user/.local/bin:$PATH"
|
9 |
+
|
10 |
+
WORKDIR /app
|
11 |
+
|
12 |
+
COPY --chown=user ./requirements.txt requirements.txt
|
13 |
+
RUN pip install --no-cache-dir --upgrade -r requirements.txt
|
14 |
+
|
15 |
+
COPY --chown=user . /app
|
16 |
+
CMD ["uvicorn", "api:app", "--host", "0.0.0.0", "--port", "7860"]
|
api.py
ADDED
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import pandas as pd
|
2 |
+
from fastapi import FastAPI, Response
|
3 |
+
from fastapi.middleware.cors import CORSMiddleware
|
4 |
+
from routers.Prediction.prediction_route import router as predict_router
|
5 |
+
|
6 |
+
|
7 |
+
app = FastAPI()
|
8 |
+
|
9 |
+
app.add_middleware(
|
10 |
+
CORSMiddleware,
|
11 |
+
allow_origins=["*"],
|
12 |
+
allow_credentials=True,
|
13 |
+
allow_methods=["GET", "POST", "PUT", "DELETE"],
|
14 |
+
allow_headers=["*"],
|
15 |
+
)
|
16 |
+
|
17 |
+
app.include_router(predict_router)
|
18 |
+
|
19 |
+
@app.get('/')
|
20 |
+
def _default_router():
|
21 |
+
return Response('Server is running!')
|
dps_challenge_notebook.ipynb
ADDED
The diff for this file is too large to render.
See raw diff
|
|
lib/encoder.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4aea61c11ea7f04fa415b58ab1de7afde98b47de958665b19f623fe9e4685f26
|
3 |
+
size 1141
|
lib/model.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:52f8f71d5b5eafa2bdf5921622bb442b5ce3a8708427a151eca3db3487671f45
|
3 |
+
size 729566
|
requirements.txt
ADDED
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
1 |
+
xgboost
|
2 |
+
fastapi
|
3 |
+
uvicorn
|
4 |
+
pandas
|
routers/Prediction/prediction_route.py
ADDED
@@ -0,0 +1,55 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from fastapi import APIRouter
|
2 |
+
from pydantic import BaseModel
|
3 |
+
from utils.ProcessingClass import PreProcessingClass
|
4 |
+
import pickle
|
5 |
+
|
6 |
+
class RequestType(BaseModel):
|
7 |
+
MONATSZAHL: str
|
8 |
+
AUSPRAEGUNG: str
|
9 |
+
JAHR: int
|
10 |
+
MONAT: str
|
11 |
+
|
12 |
+
router = APIRouter(prefix='/predict')
|
13 |
+
|
14 |
+
global encoder, xgb_model
|
15 |
+
|
16 |
+
@router.on_event('startup')
|
17 |
+
def _loadPickleFiles():
|
18 |
+
|
19 |
+
with open("lib\encoder.pkl", 'rb') as file:
|
20 |
+
global encoder
|
21 |
+
encoder = pickle.load(file)
|
22 |
+
|
23 |
+
with open("lib\model.pkl", 'rb') as file:
|
24 |
+
global xgb_model
|
25 |
+
xgb_model = pickle.load(file)
|
26 |
+
|
27 |
+
print("Pickle Files Loaded. Ready for Inference!")
|
28 |
+
|
29 |
+
def _do_inference(df):
|
30 |
+
|
31 |
+
global xgb_model
|
32 |
+
|
33 |
+
return xgb_model.predict(df)
|
34 |
+
|
35 |
+
|
36 |
+
@router.post("/")
|
37 |
+
def predict(data: RequestType):
|
38 |
+
|
39 |
+
global encoder
|
40 |
+
|
41 |
+
pc = PreProcessingClass(
|
42 |
+
MONATSZAHL = data.MONATSZAHL,
|
43 |
+
AUSPRAEGUNG = data.AUSPRAEGUNG,
|
44 |
+
JAHR = data.JAHR,
|
45 |
+
MONAT = data.MONAT,
|
46 |
+
encoder = encoder
|
47 |
+
)
|
48 |
+
|
49 |
+
date_processed_df = pc._convert_date()
|
50 |
+
|
51 |
+
final_df = pc._one_hot(date_processed_df)
|
52 |
+
|
53 |
+
results = _do_inference(final_df)
|
54 |
+
|
55 |
+
return {"Final Predictions": results.tolist()[0]}
|
utils/ProcessingClass.py
ADDED
@@ -0,0 +1,57 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import pandas as pd
|
2 |
+
|
3 |
+
class PreProcessingClass:
|
4 |
+
def __init__(self, MONATSZAHL, AUSPRAEGUNG, JAHR, MONAT, encoder):
|
5 |
+
|
6 |
+
self.parent_df = pd.DataFrame({
|
7 |
+
'MONATSZAHL': [MONATSZAHL],
|
8 |
+
'AUSPRAEGUNG': [AUSPRAEGUNG],
|
9 |
+
'JAHR': [JAHR],
|
10 |
+
'MONAT': [MONAT]
|
11 |
+
})
|
12 |
+
|
13 |
+
self.encoder = encoder
|
14 |
+
|
15 |
+
def _convert_date(self, column_name='MONAT', special_value='Summe'):
|
16 |
+
|
17 |
+
day_mapping = {
|
18 |
+
'01': 'January',
|
19 |
+
'02': 'February',
|
20 |
+
'03': 'March',
|
21 |
+
'04': 'April',
|
22 |
+
'05': 'May',
|
23 |
+
'06': 'June',
|
24 |
+
'07': 'July',
|
25 |
+
'08': 'August',
|
26 |
+
'09': 'September',
|
27 |
+
'10': 'October',
|
28 |
+
'11': 'November',
|
29 |
+
'12': 'December'
|
30 |
+
}
|
31 |
+
|
32 |
+
data_copy = self.parent_df.copy()
|
33 |
+
data_copy[column_name] = data_copy[column_name].apply(lambda x: day_mapping[x[4:]] if x != special_value else x)
|
34 |
+
|
35 |
+
return data_copy
|
36 |
+
|
37 |
+
def _one_hot(self, data):
|
38 |
+
|
39 |
+
columns_to_encode = ['MONATSZAHL', 'AUSPRAEGUNG', 'JAHR', 'MONAT']
|
40 |
+
|
41 |
+
data_copy = data.copy()
|
42 |
+
|
43 |
+
encoded_columns = self.encoder.transform(data[columns_to_encode])
|
44 |
+
|
45 |
+
encoded_column_names = self.encoder.get_feature_names_out(columns_to_encode)
|
46 |
+
|
47 |
+
encoded_df = pd.DataFrame(encoded_columns, columns=encoded_column_names, index=data.index)
|
48 |
+
|
49 |
+
final_df = pd.concat([data.drop(columns=columns_to_encode),encoded_df], axis=1)
|
50 |
+
|
51 |
+
return final_df
|
52 |
+
|
53 |
+
|
54 |
+
|
55 |
+
|
56 |
+
|
57 |
+
|