Spaces:
Running
Running
Commit
·
2589dc0
1
Parent(s):
5f6ac1e
Update main.py
Browse files
main.py
CHANGED
@@ -3,6 +3,7 @@ from fastapi.middleware.cors import CORSMiddleware # Importa il middleware CORS
|
|
3 |
from pydantic import BaseModel
|
4 |
from huggingface_hub import InferenceClient
|
5 |
from datetime import datetime
|
|
|
6 |
|
7 |
app = FastAPI()
|
8 |
client = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1")
|
@@ -49,7 +50,7 @@ def read_root(request: Request, input_data: InputData):
|
|
49 |
generated_response = generate(input_text, history, temperature, max_new_tokens, top_p, repetition_penalty)
|
50 |
return {"response": generated_response}
|
51 |
|
52 |
-
@app.
|
53 |
def generate_image():
|
54 |
client = Client("https://openskyml-fast-sdxl-stable-diffusion-xl.hf.space/--replicas/545b5tw7n/")
|
55 |
result = client.predict(
|
@@ -62,7 +63,7 @@ def generate_image():
|
|
62 |
453666937,
|
63 |
fn_index=0
|
64 |
)
|
65 |
-
return result
|
66 |
|
67 |
@app.get("/")
|
68 |
def read_general():
|
|
|
3 |
from pydantic import BaseModel
|
4 |
from huggingface_hub import InferenceClient
|
5 |
from datetime import datetime
|
6 |
+
from gradio_client import Client
|
7 |
|
8 |
app = FastAPI()
|
9 |
client = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1")
|
|
|
50 |
generated_response = generate(input_text, history, temperature, max_new_tokens, top_p, repetition_penalty)
|
51 |
return {"response": generated_response}
|
52 |
|
53 |
+
@app.post("/Immagine")
|
54 |
def generate_image():
|
55 |
client = Client("https://openskyml-fast-sdxl-stable-diffusion-xl.hf.space/--replicas/545b5tw7n/")
|
56 |
result = client.predict(
|
|
|
63 |
453666937,
|
64 |
fn_index=0
|
65 |
)
|
66 |
+
return {"response": result}
|
67 |
|
68 |
@app.get("/")
|
69 |
def read_general():
|