axel-darmouni commited on
Commit
6122d6e
·
1 Parent(s): 28da508

updating model to mistral latest

Browse files
Files changed (5) hide show
  1. README.md +3 -3
  2. agent.py +9 -4
  3. app.py +1 -1
  4. followup_agent.py +1 -1
  5. tools/retrieval_tools.py +3 -2
README.md CHANGED
@@ -67,7 +67,7 @@ tag: agent-demo-track
67
  ### 1. Prerequisites
68
 
69
  - Python 3.8+
70
- - Google Gemini API key
71
 
72
  ### 2. Installation
73
 
@@ -85,7 +85,7 @@ pip install -r requirements.txt
85
  Create a `.env` file in the project root:
86
 
87
  ```bash
88
- GEMINI_API_KEY=your_gemini_api_key_here
89
  ```
90
 
91
  ### 4. Launch the Application
@@ -170,7 +170,7 @@ After the initial analysis is complete:
170
 
171
  - **Frontend**: Gradio with custom CSS and real-time progress
172
  - **AI Agents**:
173
- - Primary SmolAgents powered by Gemini 2.5 Flash
174
  - Specialized follow-up agent for interactive analysis ⭐
175
  - **Search**: BM25 keyword matching with TF-IDF preprocessing
176
  - **Translation**: LLM-powered bilingual query translation
 
67
  ### 1. Prerequisites
68
 
69
  - Python 3.8+
70
+ - Mistral API key
71
 
72
  ### 2. Installation
73
 
 
85
  Create a `.env` file in the project root:
86
 
87
  ```bash
88
+ Mistral_API_KEY=your_Mistral_api_key_here
89
  ```
90
 
91
  ### 4. Launch the Application
 
170
 
171
  - **Frontend**: Gradio with custom CSS and real-time progress
172
  - **AI Agents**:
173
+ - Primary SmolAgents powered by Mistral Medium Latest
174
  - Specialized follow-up agent for interactive analysis ⭐
175
  - **Search**: BM25 keyword matching with TF-IDF preprocessing
176
  - **Translation**: LLM-powered bilingual query translation
agent.py CHANGED
@@ -26,7 +26,7 @@ def create_web_agent(step_callback):
26
  search_tool = DuckDuckGoSearchTool()
27
  model = LiteLLMModel(
28
  #model_id="gemini/gemini-2.5-flash-preview-05-20",
29
- model="mistralai/mistral-medium-latest",
30
  api_key=os.getenv("MISTRAL_API_KEY"),
31
  )
32
  web_agent = CodeAgent(
@@ -68,9 +68,14 @@ def generate_prompt(user_query=None, initial_search_results=None):
68
  - Get dataset description using get_dataset_description
69
 
70
  3. **Visualization Creation**:
71
- - If geographic data (departments/regions) is available, create a map of France
72
- - Create 3 additional non-map visualizations
73
- - Save all visualizations as PNG files
 
 
 
 
 
74
 
75
  4. **Report Generation**:
76
  - Write insightful analysis text for each visualization
 
26
  search_tool = DuckDuckGoSearchTool()
27
  model = LiteLLMModel(
28
  #model_id="gemini/gemini-2.5-flash-preview-05-20",
29
+ model_id="mistral/mistral-medium-latest",
30
  api_key=os.getenv("MISTRAL_API_KEY"),
31
  )
32
  web_agent = CodeAgent(
 
68
  - Get dataset description using get_dataset_description
69
 
70
  3. **Visualization Creation**:
71
+ - **French Map Creation**: If you have data by French departments or regions, use the plot_departments_data tool:
72
+ * Call: plot_departments_data(data_dict, title, filename, color_scheme='viridis')
73
+ * data_dict format: {"department_name": value, "department_code": value}
74
+ * Supports both department names (e.g., "Paris", "Bouches-du-Rhône") and codes (e.g., "75", "13")
75
+ * Example: plot_departments_data({"Paris": 2161, "Lyon": 513}, "Population by Department", "population_map.png")
76
+ * The tool automatically saves the map as PNG in generated_data folder
77
+ - Create 3 additional non-map visualizations using matplotlib/seaborn
78
+ - Save all visualizations as PNG files in generated_data folder
79
 
80
  4. **Report Generation**:
81
  - Write insightful analysis text for each visualization
app.py CHANGED
@@ -33,7 +33,7 @@ def initialize_models():
33
  try:
34
  model = LiteLLMModel(
35
  #model_id="gemini/gemini-2.5-flash-preview-05-20",
36
- model="mistralai/mistral-medium-latest",
37
  api_key=os.getenv("MISTRAL_API_KEY")
38
  )
39
  llm_translator = CodeAgent(tools=[], model=model, max_steps=1)
 
33
  try:
34
  model = LiteLLMModel(
35
  #model_id="gemini/gemini-2.5-flash-preview-05-20",
36
+ model_id="mistral/mistral-medium-latest",
37
  api_key=os.getenv("MISTRAL_API_KEY")
38
  )
39
  llm_translator = CodeAgent(tools=[], model=model, max_steps=1)
followup_agent.py CHANGED
@@ -23,7 +23,7 @@ def create_followup_agent():
23
  search_tool = DuckDuckGoSearchTool()
24
  model = LiteLLMModel(
25
  #model_id="gemini/gemini-2.5-flash-preview-05-20",
26
- model="mistralai/mistral-medium-latest",
27
  api_key=os.getenv("MISTRAL_API_KEY"),
28
  )
29
 
 
23
  search_tool = DuckDuckGoSearchTool()
24
  model = LiteLLMModel(
25
  #model_id="gemini/gemini-2.5-flash-preview-05-20",
26
+ model_id="mistral/mistral-medium-latest",
27
  api_key=os.getenv("MISTRAL_API_KEY"),
28
  )
29
 
tools/retrieval_tools.py CHANGED
@@ -33,8 +33,9 @@ def _initialize_retrieval_system():
33
  if _llm_translator is None:
34
  try:
35
  model = LiteLLMModel(
36
- model_id="gemini/gemini-2.5-flash-preview-05-20",
37
- api_key=os.getenv("GEMINI_API_KEY")
 
38
  )
39
  _llm_translator = CodeAgent(tools=[], model=model, max_steps=1)
40
  print("✅ LLM translator initialized")
 
33
  if _llm_translator is None:
34
  try:
35
  model = LiteLLMModel(
36
+ #model_id="gemini/gemini-2.5-flash-preview-05-20",
37
+ model_id="mistral/mistral-medium-latest",
38
+ api_key=os.getenv("MISTRAL_API_KEY")
39
  )
40
  _llm_translator = CodeAgent(tools=[], model=model, max_steps=1)
41
  print("✅ LLM translator initialized")