Emmanuel Frimpong Asante
commited on
Commit
·
29db0fc
1
Parent(s):
c579c97
update space
Browse files- .gitignore +1 -0
- .idea/workspace.xml +21 -19
- backend/app/services/llama_service.py +1 -1
.gitignore
CHANGED
|
@@ -1,2 +1,3 @@
|
|
| 1 |
.venv
|
| 2 |
.idea
|
|
|
|
|
|
| 1 |
.venv
|
| 2 |
.idea
|
| 3 |
+
.env
|
.idea/workspace.xml
CHANGED
|
@@ -5,8 +5,10 @@
|
|
| 5 |
</component>
|
| 6 |
<component name="ChangeListManager">
|
| 7 |
<list default="true" id="27c9ae1a-a6fa-4472-8bcd-a7087620894b" name="Changes" comment="update space">
|
|
|
|
|
|
|
| 8 |
<change beforePath="$PROJECT_DIR$/.idea/workspace.xml" beforeDir="false" afterPath="$PROJECT_DIR$/.idea/workspace.xml" afterDir="false" />
|
| 9 |
-
<change beforePath="$PROJECT_DIR$/backend/app/
|
| 10 |
</list>
|
| 11 |
<option name="SHOW_DIALOG" value="false" />
|
| 12 |
<option name="HIGHLIGHT_CONFLICTS" value="true" />
|
|
@@ -128,22 +130,6 @@
|
|
| 128 |
<workItem from="1730454506390" duration="12672000" />
|
| 129 |
<workItem from="1730494385249" duration="17097000" />
|
| 130 |
</task>
|
| 131 |
-
<task id="LOCAL-00113" summary="update space">
|
| 132 |
-
<option name="closed" value="true" />
|
| 133 |
-
<created>1730483618538</created>
|
| 134 |
-
<option name="number" value="00113" />
|
| 135 |
-
<option name="presentableId" value="LOCAL-00113" />
|
| 136 |
-
<option name="project" value="LOCAL" />
|
| 137 |
-
<updated>1730483618538</updated>
|
| 138 |
-
</task>
|
| 139 |
-
<task id="LOCAL-00114" summary="update space">
|
| 140 |
-
<option name="closed" value="true" />
|
| 141 |
-
<created>1730484026472</created>
|
| 142 |
-
<option name="number" value="00114" />
|
| 143 |
-
<option name="presentableId" value="LOCAL-00114" />
|
| 144 |
-
<option name="project" value="LOCAL" />
|
| 145 |
-
<updated>1730484026472</updated>
|
| 146 |
-
</task>
|
| 147 |
<task id="LOCAL-00115" summary="update space">
|
| 148 |
<option name="closed" value="true" />
|
| 149 |
<created>1730484388981</created>
|
|
@@ -520,7 +506,23 @@
|
|
| 520 |
<option name="project" value="LOCAL" />
|
| 521 |
<updated>1731746145508</updated>
|
| 522 |
</task>
|
| 523 |
-
<
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 524 |
<servers />
|
| 525 |
</component>
|
| 526 |
<component name="TypeScriptGeneratedFilesManager">
|
|
@@ -544,6 +546,6 @@
|
|
| 544 |
<option name="LAST_COMMIT_MESSAGE" value="update space" />
|
| 545 |
</component>
|
| 546 |
<component name="com.intellij.coverage.CoverageDataManagerImpl">
|
| 547 |
-
<SUITE FILE_PATH="coverage/Generative_AI_with_poultry_disease_detection_system_v2$Unnamed.coverage" NAME="Unnamed Coverage Results" MODIFIED="
|
| 548 |
</component>
|
| 549 |
</project>
|
|
|
|
| 5 |
</component>
|
| 6 |
<component name="ChangeListManager">
|
| 7 |
<list default="true" id="27c9ae1a-a6fa-4472-8bcd-a7087620894b" name="Changes" comment="update space">
|
| 8 |
+
<change beforePath="$PROJECT_DIR$/.env" beforeDir="false" afterPath="$PROJECT_DIR$/.env" afterDir="false" />
|
| 9 |
+
<change beforePath="$PROJECT_DIR$/.gitignore" beforeDir="false" afterPath="$PROJECT_DIR$/.gitignore" afterDir="false" />
|
| 10 |
<change beforePath="$PROJECT_DIR$/.idea/workspace.xml" beforeDir="false" afterPath="$PROJECT_DIR$/.idea/workspace.xml" afterDir="false" />
|
| 11 |
+
<change beforePath="$PROJECT_DIR$/backend/app/services/llama_service.py" beforeDir="false" afterPath="$PROJECT_DIR$/backend/app/services/llama_service.py" afterDir="false" />
|
| 12 |
</list>
|
| 13 |
<option name="SHOW_DIALOG" value="false" />
|
| 14 |
<option name="HIGHLIGHT_CONFLICTS" value="true" />
|
|
|
|
| 130 |
<workItem from="1730454506390" duration="12672000" />
|
| 131 |
<workItem from="1730494385249" duration="17097000" />
|
| 132 |
</task>
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 133 |
<task id="LOCAL-00115" summary="update space">
|
| 134 |
<option name="closed" value="true" />
|
| 135 |
<created>1730484388981</created>
|
|
|
|
| 506 |
<option name="project" value="LOCAL" />
|
| 507 |
<updated>1731746145508</updated>
|
| 508 |
</task>
|
| 509 |
+
<task id="LOCAL-00162" summary="update space">
|
| 510 |
+
<option name="closed" value="true" />
|
| 511 |
+
<created>1731746527431</created>
|
| 512 |
+
<option name="number" value="00162" />
|
| 513 |
+
<option name="presentableId" value="LOCAL-00162" />
|
| 514 |
+
<option name="project" value="LOCAL" />
|
| 515 |
+
<updated>1731746527431</updated>
|
| 516 |
+
</task>
|
| 517 |
+
<task id="LOCAL-00163" summary="update space">
|
| 518 |
+
<option name="closed" value="true" />
|
| 519 |
+
<created>1731746870945</created>
|
| 520 |
+
<option name="number" value="00163" />
|
| 521 |
+
<option name="presentableId" value="LOCAL-00163" />
|
| 522 |
+
<option name="project" value="LOCAL" />
|
| 523 |
+
<updated>1731746870945</updated>
|
| 524 |
+
</task>
|
| 525 |
+
<option name="localTasksCounter" value="164" />
|
| 526 |
<servers />
|
| 527 |
</component>
|
| 528 |
<component name="TypeScriptGeneratedFilesManager">
|
|
|
|
| 546 |
<option name="LAST_COMMIT_MESSAGE" value="update space" />
|
| 547 |
</component>
|
| 548 |
<component name="com.intellij.coverage.CoverageDataManagerImpl">
|
| 549 |
+
<SUITE FILE_PATH="coverage/Generative_AI_with_poultry_disease_detection_system_v2$Unnamed.coverage" NAME="Unnamed Coverage Results" MODIFIED="1731747013474" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="false" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="" />
|
| 550 |
</component>
|
| 551 |
</project>
|
backend/app/services/llama_service.py
CHANGED
|
@@ -33,7 +33,7 @@ def load_llama_model():
|
|
| 33 |
global llama_model, llama_tokenizer
|
| 34 |
try:
|
| 35 |
logger.info("Loading Llama 3.2 model and tokenizer.")
|
| 36 |
-
model_name = "meta-llama/Llama-2-
|
| 37 |
|
| 38 |
# Load tokenizer and model
|
| 39 |
llama_tokenizer = AutoTokenizer.from_pretrained(model_name)
|
|
|
|
| 33 |
global llama_model, llama_tokenizer
|
| 34 |
try:
|
| 35 |
logger.info("Loading Llama 3.2 model and tokenizer.")
|
| 36 |
+
model_name = "meta-llama/Llama-3.2-1B" # Updated model name
|
| 37 |
|
| 38 |
# Load tokenizer and model
|
| 39 |
llama_tokenizer = AutoTokenizer.from_pretrained(model_name)
|