File size: 1,646 Bytes
e950800 a2db262 4dd8b73 e950800 4dd8b73 e950800 a2db262 4dd8b73 a2db262 daca96e d920417 a2db262 a1c08f3 a2db262 daca96e a2db262 e950800 07dc9d1 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 |
from typing import Union
from fastapi import FastAPI
import asyncio
from groq import Groq, AsyncGroq
from fastapi import FastAPI, File, UploadFile
from fastapi.responses import HTMLResponse
import shutil
import os
# client = AsyncGroq(
# api_key="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",
# )
# SYSTEM_PROMPT = """
# Ты ассистент, помогай людям!
# """
app = FastAPI()
import google.generativeai as genai
import os
genai.configure(api_key="AIzaSyBGhEOy-JYMzGtTcRjBjP51OGR168WKRFw")
# client = AsyncGroq(
# api_key="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",
# )
# messages=messages,
# model="llama3-70b-8192",
# )
# return chat_completion.choices[0].message.content
@app.post("/upload-image/{prompt}")
async def upload_image(prompt: str, file: UploadFile = File(...)):
os.makedirs("uploads", exist_ok=True)
file_location = f"uploads/{file.filename}"
with open(file_location, "wb") as buffer:
shutil.copyfileobj(file.file, buffer)
myfile = genai.upload_file(file_location)
model = genai.GenerativeModel("gemini-1.5-pro-latest")
result = model.generate_content(
[myfile, "\n\n", prompt]
)
return result.text
# @app.post("/get_response")
# async def read_root(messages: list[dict]):
# messages.insert(0, {
# "role": "system",
# "content": SYSTEM_PROMPT
# }
# )
# chat_completion = await client.chat.completions.create(
# messages=messages,
# model="llama3-70b-8192",
# )
# return chat_completion.choices[0].message.content
|