Spaces:
Sleeping
Sleeping
Pavan Naik
commited on
Commit
·
4f2dcca
1
Parent(s):
d12d397
update model to instruct
Browse files
requirements.txt
CHANGED
@@ -1,5 +1,6 @@
|
|
1 |
huggingface-hub
|
2 |
transformers
|
|
|
3 |
|
4 |
|
5 |
# Linting and formatting
|
|
|
1 |
huggingface-hub
|
2 |
transformers
|
3 |
+
gradio
|
4 |
|
5 |
|
6 |
# Linting and formatting
|
src/__pycache__/__init__.cpython-312.pyc
DELETED
Binary file (137 Bytes)
|
|
src/__pycache__/app.cpython-312.pyc
DELETED
Binary file (1.59 kB)
|
|
src/app.py
CHANGED
@@ -3,33 +3,34 @@ from transformers import pipeline
|
|
3 |
import os
|
4 |
from huggingface_hub import login
|
5 |
|
6 |
-
login(token=os.getenv(
|
7 |
|
8 |
|
9 |
def setup_pipeline():
|
10 |
-
|
11 |
-
|
12 |
-
|
13 |
-
|
14 |
-
|
15 |
-
|
16 |
|
17 |
def generate_recipe(dish_name):
|
18 |
if not dish_name:
|
19 |
return "Please enter a dish name"
|
20 |
-
|
21 |
try:
|
22 |
-
|
23 |
prompt = f"""Create a recipe for {dish_name} including:
|
24 |
- Ingredients with quantities
|
25 |
- Steps to cook
|
26 |
- Cultural background"""
|
27 |
-
|
28 |
result = generator(prompt, max_length=500, num_return_sequences=1)
|
29 |
-
return result[0][
|
30 |
except Exception as e:
|
31 |
return f"Error: {str(e)}"
|
32 |
-
|
|
|
33 |
generator = setup_pipeline()
|
34 |
|
35 |
demo = gr.Interface(
|
@@ -37,9 +38,8 @@ demo = gr.Interface(
|
|
37 |
inputs=gr.Textbox(label="Enter dish name"),
|
38 |
outputs=gr.Textbox(label="Generated Recipe", lines=20),
|
39 |
title="RecipeGenie",
|
40 |
-
description="AI-powered recipe generator"
|
41 |
)
|
42 |
|
43 |
if __name__ == "__main__":
|
44 |
demo.launch()
|
45 |
-
|
|
|
3 |
import os
|
4 |
from huggingface_hub import login
|
5 |
|
6 |
+
login(token=os.getenv("HF_TOKEN"))
|
7 |
|
8 |
|
9 |
def setup_pipeline():
|
10 |
+
return pipeline(
|
11 |
+
"text-generation",
|
12 |
+
model="meta-llama/Llama-3.2-1B-instruct", # Smaller model suitable for CPU
|
13 |
+
device=-1, # Force CPU
|
14 |
+
)
|
15 |
+
|
16 |
|
17 |
def generate_recipe(dish_name):
|
18 |
if not dish_name:
|
19 |
return "Please enter a dish name"
|
20 |
+
|
21 |
try:
|
22 |
+
|
23 |
prompt = f"""Create a recipe for {dish_name} including:
|
24 |
- Ingredients with quantities
|
25 |
- Steps to cook
|
26 |
- Cultural background"""
|
27 |
+
|
28 |
result = generator(prompt, max_length=500, num_return_sequences=1)
|
29 |
+
return result[0]["generated_text"]
|
30 |
except Exception as e:
|
31 |
return f"Error: {str(e)}"
|
32 |
+
|
33 |
+
|
34 |
generator = setup_pipeline()
|
35 |
|
36 |
demo = gr.Interface(
|
|
|
38 |
inputs=gr.Textbox(label="Enter dish name"),
|
39 |
outputs=gr.Textbox(label="Generated Recipe", lines=20),
|
40 |
title="RecipeGenie",
|
41 |
+
description="AI-powered recipe generator",
|
42 |
)
|
43 |
|
44 |
if __name__ == "__main__":
|
45 |
demo.launch()
|
|
tests/__pycache__/__init__.cpython-312.pyc
DELETED
Binary file (139 Bytes)
|
|
tests/__pycache__/test_app.cpython-312-pytest-8.3.4.pyc
DELETED
Binary file (408 Bytes)
|
|
tests/test_app.py
CHANGED
@@ -2,33 +2,36 @@ import pytest
|
|
2 |
from unittest.mock import Mock, patch
|
3 |
from src.app import generate_recipe, setup_pipeline
|
4 |
|
5 |
-
|
6 |
-
|
7 |
-
|
8 |
-
|
9 |
-
|
10 |
-
|
11 |
-
|
12 |
-
|
13 |
-
|
14 |
-
|
15 |
-
|
16 |
-
|
17 |
-
|
18 |
-
|
19 |
-
|
20 |
-
|
21 |
-
|
22 |
-
|
23 |
-
|
24 |
-
|
25 |
-
|
26 |
-
|
27 |
-
|
28 |
-
|
29 |
-
|
30 |
-
|
31 |
-
|
32 |
-
|
33 |
-
|
34 |
-
|
|
|
|
|
|
|
|
2 |
from unittest.mock import Mock, patch
|
3 |
from src.app import generate_recipe, setup_pipeline
|
4 |
|
5 |
+
|
6 |
+
@pytest.fixture
|
7 |
+
def mock_pipeline():
|
8 |
+
with patch("src.app.pipeline") as mock:
|
9 |
+
mock_generator = Mock()
|
10 |
+
mock_generator.return_value = [{"generated_text": "Test recipe output"}]
|
11 |
+
mock.return_value = mock_generator
|
12 |
+
yield mock
|
13 |
+
|
14 |
+
|
15 |
+
def test_empty_input():
|
16 |
+
result = generate_recipe("")
|
17 |
+
assert "Please enter a dish name" in result
|
18 |
+
|
19 |
+
|
20 |
+
def test_generate_recipe_success(mock_pipeline):
|
21 |
+
result = generate_recipe("Pasta")
|
22 |
+
assert isinstance(result, str)
|
23 |
+
assert "Pasta" in result
|
24 |
+
|
25 |
+
|
26 |
+
def test_generate_recipe_exception():
|
27 |
+
with patch("src.app.generator", side_effect=Exception("Test error")):
|
28 |
+
result = generate_recipe("Pasta")
|
29 |
+
assert "Error:" in result
|
30 |
+
|
31 |
+
|
32 |
+
def test_pipeline_creation():
|
33 |
+
with patch("src.app.pipeline") as mock_pipeline:
|
34 |
+
setup_pipeline()
|
35 |
+
mock_pipeline.assert_called_once_with(
|
36 |
+
"text-generation", model="meta-llama/Llama-3.2-1B-instruct", device=-1
|
37 |
+
)
|