Spaces:
Sleeping
Sleeping
Pavan Naik
commited on
Commit
·
000b4ac
1
Parent(s):
1dab07a
Initial project template.
Browse files- Makefile +21 -0
- requirements.txt +4 -0
- setup.py +10 -0
- src/app.py +73 -0
- tests/test_app.py +0 -0
Makefile
ADDED
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
install:
|
2 |
+
pip install -e .
|
3 |
+
pip install -r requirements.txt
|
4 |
+
|
5 |
+
lint:
|
6 |
+
black src tests
|
7 |
+
flake8 src tests
|
8 |
+
PYTHONPATH=. pylint src/ tests/
|
9 |
+
mypy src tests
|
10 |
+
|
11 |
+
test:
|
12 |
+
pytest tests/ -v --cov=src/ --cov-report=term-missing
|
13 |
+
|
14 |
+
clean:
|
15 |
+
rm -rf build/
|
16 |
+
rm -rf dist/
|
17 |
+
rm -rf *.egg-info
|
18 |
+
find . -type d -name __pycache__ -exec rm -rf {} +
|
19 |
+
find . -type f -name "*.pyc" -delete
|
20 |
+
|
21 |
+
all: install lint test clean
|
requirements.txt
ADDED
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
1 |
+
gradio
|
2 |
+
vllm
|
3 |
+
langchain
|
4 |
+
langchain_community
|
setup.py
ADDED
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from setuptools import setup, find_packages
|
2 |
+
|
3 |
+
setup(
|
4 |
+
name="my-project",
|
5 |
+
version="0.1",
|
6 |
+
packages=find_packages(),
|
7 |
+
install_requires=[
|
8 |
+
# Add your production dependencies here
|
9 |
+
],
|
10 |
+
)
|
src/app.py
ADDED
@@ -0,0 +1,73 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import gradio as gr
|
2 |
+
from langchain_community.llms import VLLM
|
3 |
+
|
4 |
+
# Initialize vLLM
|
5 |
+
llm = VLLM(
|
6 |
+
model="meta-llama/Llama-3.2-1B",
|
7 |
+
trust_remote_code=True,
|
8 |
+
max_new_tokens=512,
|
9 |
+
top_k=10,
|
10 |
+
top_p=0.95,
|
11 |
+
temperature=0.8,
|
12 |
+
)
|
13 |
+
|
14 |
+
# Recipe prompt template from previous example
|
15 |
+
RECIPE_PROMPT = """You are a skilled chef and culinary expert. Create a detailed recipe for {dish_name} with the following format:
|
16 |
+
|
17 |
+
Recipe: {dish_name}
|
18 |
+
|
19 |
+
INGREDIENTS: List each ingredient with exact quantity and estimated price (USD)
|
20 |
+
|
21 |
+
NUTRITIONAL INFO:
|
22 |
+
1. Total calories
|
23 |
+
2. Protein
|
24 |
+
3. Carbs
|
25 |
+
4. Fat
|
26 |
+
5. Serving size
|
27 |
+
|
28 |
+
COOKING INSTRUCTIONS:
|
29 |
+
1. Step-by-step numbered process
|
30 |
+
2. Include cooking temperatures and times
|
31 |
+
3. Note any specific techniques or tips
|
32 |
+
|
33 |
+
CULTURAL BACKGROUND:
|
34 |
+
1. Origin of dish
|
35 |
+
2. Traditional serving occasions
|
36 |
+
3. Cultural significance
|
37 |
+
4. Regional variations
|
38 |
+
|
39 |
+
PREPARATION TIME:
|
40 |
+
1. Prep time
|
41 |
+
2. Cooking time
|
42 |
+
3. Total time
|
43 |
+
|
44 |
+
DIFFICULTY LEVEL: [Easy/Medium/Hard]
|
45 |
+
|
46 |
+
TOOLS NEEDED: List essential kitchen equipment
|
47 |
+
|
48 |
+
TIPS:
|
49 |
+
1. Storage recommendations
|
50 |
+
2. Substitution options
|
51 |
+
3. Serving suggestions
|
52 |
+
|
53 |
+
"""
|
54 |
+
|
55 |
+
|
56 |
+
def generate_recipe(dish_name):
|
57 |
+
prompt = RECIPE_PROMPT.format(dish_name=dish_name)
|
58 |
+
response = llm(prompt)
|
59 |
+
return response
|
60 |
+
|
61 |
+
|
62 |
+
# Gradio interface
|
63 |
+
demo = gr.Interface(
|
64 |
+
fn=generate_recipe,
|
65 |
+
inputs=gr.Textbox(label="Enter dish name"),
|
66 |
+
outputs=gr.Textbox(label="Generated Recipe", lines=20),
|
67 |
+
title="Recipe Genie",
|
68 |
+
description="AI-powered recipe generator with costs, nutrition facts, and cultural insights.",
|
69 |
+
examples=["Pad Thai", "Butter Chicken", "Paella"],
|
70 |
+
)
|
71 |
+
|
72 |
+
if __name__ == "__main__":
|
73 |
+
demo.launch()
|
tests/test_app.py
ADDED
File without changes
|