beyzacodeway commited on
Commit
939b7f7
·
verified ·
1 Parent(s): dbd3785

Upload 6 files

Browse files
Files changed (6) hide show
  1. LICENSE +21 -0
  2. Makefile +67 -0
  3. README.md +112 -12
  4. langgraph.json +8 -0
  5. pyproject.toml +65 -0
  6. requirements.txt +30 -0
LICENSE ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ MIT License
2
+
3
+ Copyright (c) 2024 LangChain
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
Makefile ADDED
@@ -0,0 +1,67 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ .PHONY: all format lint test tests test_watch integration_tests docker_tests help extended_tests
2
+
3
+ # Default target executed when no arguments are given to make.
4
+ all: help
5
+
6
+ # Define a variable for the test file path.
7
+ TEST_FILE ?= tests/unit_tests/
8
+
9
+ test:
10
+ python -m pytest $(TEST_FILE)
11
+
12
+ integration_tests:
13
+ python -m pytest tests/integration_tests
14
+
15
+ test_watch:
16
+ python -m ptw --snapshot-update --now . -- -vv tests/unit_tests
17
+
18
+ test_profile:
19
+ python -m pytest -vv tests/unit_tests/ --profile-svg
20
+
21
+ extended_tests:
22
+ python -m pytest --only-extended $(TEST_FILE)
23
+
24
+
25
+ ######################
26
+ # LINTING AND FORMATTING
27
+ ######################
28
+
29
+ # Define a variable for Python and notebook files.
30
+ PYTHON_FILES=src/
31
+ MYPY_CACHE=.mypy_cache
32
+ lint format: PYTHON_FILES=.
33
+ lint_diff format_diff: PYTHON_FILES=$(shell git diff --name-only --diff-filter=d main | grep -E '\.py$$|\.ipynb$$')
34
+ lint_package: PYTHON_FILES=src
35
+ lint_tests: PYTHON_FILES=tests
36
+ lint_tests: MYPY_CACHE=.mypy_cache_test
37
+
38
+ lint lint_diff lint_package lint_tests:
39
+ python -m ruff check .
40
+ [ "$(PYTHON_FILES)" = "" ] || python -m ruff format $(PYTHON_FILES) --diff
41
+ [ "$(PYTHON_FILES)" = "" ] || python -m ruff check --select I $(PYTHON_FILES)
42
+ [ "$(PYTHON_FILES)" = "" ] || python -m mypy --strict $(PYTHON_FILES)
43
+ [ "$(PYTHON_FILES)" = "" ] || mkdir -p $(MYPY_CACHE) && python -m mypy --strict $(PYTHON_FILES) --cache-dir $(MYPY_CACHE)
44
+
45
+ format format_diff:
46
+ ruff format $(PYTHON_FILES)
47
+ ruff check --select I --fix $(PYTHON_FILES)
48
+
49
+ spell_check:
50
+ codespell --toml pyproject.toml
51
+
52
+ spell_fix:
53
+ codespell --toml pyproject.toml -w
54
+
55
+ ######################
56
+ # HELP
57
+ ######################
58
+
59
+ help:
60
+ @echo '----'
61
+ @echo 'format - run code formatters'
62
+ @echo 'lint - run linters'
63
+ @echo 'test - run unit tests'
64
+ @echo 'tests - run unit tests'
65
+ @echo 'test TEST_FILE=<test_file> - run all tests in file'
66
+ @echo 'test_watch - run unit tests in watch mode'
67
+
README.md CHANGED
@@ -1,12 +1,112 @@
1
- ---
2
- title: Ai Data Question Answer Bot
3
- emoji: 😻
4
- colorFrom: indigo
5
- colorTo: blue
6
- sdk: gradio
7
- sdk_version: 5.45.0
8
- app_file: app.py
9
- pinned: false
10
- ---
11
-
12
- Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # AI Data Analyst Chat
2
+
3
+ [![CI](https://github.com/langchain-ai/new-langgraph-project/actions/workflows/unit-tests.yml/badge.svg)](https://github.com/langchain-ai/new-langgraph-project/actions/workflows/unit-tests.yml)
4
+ [![Integration Tests](https://github.com/langchain-ai/new-langgraph-project/actions/workflows/integration-tests.yml/badge.svg)](https://github.com/langchain-ai/new-langgraph-project/actions/workflows/integration-tests.yml)
5
+
6
+ This is an AI-powered data analyst that can have interactive conversations about your datasets. Built using [LangGraph](https://github.com/langchain-ai/langgraph), it provides both a chat interface for ongoing conversations and a single-question mode for quick analysis.
7
+
8
+ <div align="center">
9
+ <img src="./static/studio_ui.png" alt="Graph view in LangGraph studio UI" width="75%" />
10
+ </div>
11
+
12
+ ## Features
13
+
14
+ - **Interactive Chat Interface**: Have ongoing conversations with the AI analyst about your data
15
+ - **Data Analysis**: Automatically generates Python code to analyze your datasets
16
+ - **Visualization**: Creates charts and visualizations based on your questions
17
+ - **Report Generation**: Generates markdown reports with analysis results
18
+ - **Conversation Memory**: Maintains context across multiple questions in a chat session
19
+
20
+ The core logic is defined in `src/question-answer/graph.py` and includes multiple agents that work together to provide comprehensive data analysis.
21
+
22
+ ## Getting Started
23
+
24
+ 1. Install dependencies:
25
+
26
+ ```bash
27
+ cd path/to/your/app
28
+ pip install -e . "langgraph-cli[inmem]"
29
+ ```
30
+
31
+ 2. (Optional) Create a `.env` file if you need to use secrets:
32
+
33
+ ```bash
34
+ cp .env.example .env
35
+ ```
36
+
37
+ If you want to enable LangSmith tracing, add your LangSmith API key to the `.env` file:
38
+
39
+ ```text
40
+ # .env
41
+ LANGSMITH_API_KEY=lsv2...
42
+ ```
43
+
44
+ 3. **Run the Chat Interface** (Recommended):
45
+
46
+ ```bash
47
+ cd src/question-answer
48
+ python graph.py
49
+ ```
50
+
51
+ This will start an interactive chat session where you can ask questions about your dataset. Type `quit` or `exit` to end the conversation.
52
+
53
+ 4. **Alternative: Run Single Question Mode**:
54
+
55
+ ```bash
56
+ cd src/question-answer
57
+ python -c "from graph import main; main()"
58
+ ```
59
+
60
+ 5. **Start the LangGraph Server** (for LangGraph Studio):
61
+
62
+ ```shell
63
+ langgraph dev
64
+ ```
65
+
66
+ For more information on getting started with LangGraph Server, [see here](https://langchain-ai.github.io/langgraph/tutorials/langgraph-platform/local-server/).
67
+
68
+ ## Usage Examples
69
+
70
+ ### Chat Interface
71
+ When you run the chat interface, you can have conversations like:
72
+
73
+ ```
74
+ 🤖 AI Data Analyst Chat
75
+ ==================================================
76
+ Ask me anything about your dataset! Type 'quit' or 'exit' to end the conversation.
77
+ ==================================================
78
+
79
+ 👤 You: What are the top 5 cereals by protein content?
80
+
81
+ 🤖 AI: Let me analyze that for you...
82
+ [Analysis results and charts are generated]
83
+
84
+ 👤 You: Can you show me a comparison of sugar vs calories?
85
+
86
+ 🤖 AI: Let me analyze that for you...
87
+ [New analysis building on previous context]
88
+
89
+ 👤 You: quit
90
+
91
+ 🤖 AI: Goodbye! Thanks for chatting with me.
92
+ ```
93
+
94
+ ### Single Question Mode
95
+ For quick one-off analysis, you can modify the question in the `main()` function and run it directly.
96
+
97
+ ## How to customize
98
+
99
+ 1. **Define runtime context**: Modify the `Context` class in the `graph.py` file to expose the arguments you want to configure per assistant. For example, in a chatbot application you may want to define a dynamic system prompt or LLM to use. For more information on runtime context in LangGraph, [see here](https://langchain-ai.github.io/langgraph/agents/context/?h=context#static-runtime-context).
100
+
101
+ 2. **Extend the graph**: The core logic of the application is defined in [graph.py](./src/agent/graph.py). You can modify this file to add new nodes, edges, or change the flow of information.
102
+
103
+ ## Development
104
+
105
+ While iterating on your graph in LangGraph Studio, you can edit past state and rerun your app from previous states to debug specific nodes. Local changes will be automatically applied via hot reload.
106
+
107
+ Follow-up requests extend the same thread. You can create an entirely new thread, clearing previous history, using the `+` button in the top right.
108
+
109
+ For more advanced features and examples, refer to the [LangGraph documentation](https://langchain-ai.github.io/langgraph/). These resources can help you adapt this template for your specific use case and build more sophisticated conversational agents.
110
+
111
+ LangGraph Studio also integrates with [LangSmith](https://smith.langchain.com/) for more in-depth tracing and collaboration with teammates, allowing you to analyze and optimize your chatbot's performance.
112
+
langgraph.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dependencies": ["."],
3
+ "graphs": {
4
+ "agent": "./src/agent/graph.py:graph"
5
+ },
6
+ "env": ".env",
7
+ "image_distro": "wolfi"
8
+ }
pyproject.toml ADDED
@@ -0,0 +1,65 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [project]
2
+ name = "agent"
3
+ version = "0.0.1"
4
+ description = "Starter template for making a new agent LangGraph."
5
+ authors = [
6
+ { name = "William Fu-Hinthorn", email = "[email protected]" },
7
+ ]
8
+ readme = "README.md"
9
+ license = { text = "MIT" }
10
+ requires-python = ">=3.9"
11
+ dependencies = [
12
+ "langgraph>=0.2.6",
13
+ "python-dotenv>=1.0.1",
14
+ ]
15
+
16
+
17
+ [project.optional-dependencies]
18
+ dev = ["mypy>=1.11.1", "ruff>=0.6.1"]
19
+
20
+ [build-system]
21
+ requires = ["setuptools>=73.0.0", "wheel"]
22
+ build-backend = "setuptools.build_meta"
23
+
24
+ [tool.setuptools]
25
+ packages = ["langgraph.templates.agent", "agent"]
26
+ [tool.setuptools.package-dir]
27
+ "langgraph.templates.agent" = "src/agent"
28
+ "agent" = "src/agent"
29
+
30
+
31
+ [tool.setuptools.package-data]
32
+ "*" = ["py.typed"]
33
+
34
+ [tool.ruff]
35
+ lint.select = [
36
+ "E", # pycodestyle
37
+ "F", # pyflakes
38
+ "I", # isort
39
+ "D", # pydocstyle
40
+ "D401", # First line should be in imperative mood
41
+ "T201",
42
+ "UP",
43
+ ]
44
+ lint.ignore = [
45
+ "UP006",
46
+ "UP007",
47
+ # We actually do want to import from typing_extensions
48
+ "UP035",
49
+ # Relax the convention by _not_ requiring documentation for every function parameter.
50
+ "D417",
51
+ "E501",
52
+ ]
53
+ [tool.ruff.lint.per-file-ignores]
54
+ "tests/*" = ["D", "UP"]
55
+ [tool.ruff.lint.pydocstyle]
56
+ convention = "google"
57
+
58
+ [dependency-groups]
59
+ dev = [
60
+ "anyio>=4.7.0",
61
+ "langgraph-cli[inmem]>=0.2.8",
62
+ "mypy>=1.13.0",
63
+ "pytest>=8.3.5",
64
+ "ruff>=0.8.2",
65
+ ]
requirements.txt ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Core dependencies for Hugging Face Spaces (Python 3.10 compatible)
2
+ gradio==4.20.0
3
+ pandas==2.2.3
4
+ numpy==1.26.4
5
+ matplotlib==3.10.3
6
+ seaborn==0.13.2
7
+ scikit-learn==1.6.1
8
+ plotly==6.0.1
9
+ kaleido==1.0.0
10
+
11
+ # LangGraph and LangChain dependencies
12
+ langgraph
13
+ langchain
14
+ langchain-core
15
+ langchain-community
16
+
17
+ # Additional data processing
18
+ scipy==1.13.1
19
+ Pillow==10.4.0
20
+ openpyxl==3.1.5
21
+
22
+ # Web and API dependencies
23
+ requests==2.32.4
24
+ aiohttp==3.12.14
25
+
26
+ # Daytona sandbox
27
+ daytona
28
+
29
+ # Fix Pydantic compatibility
30
+ pydantic==2.7.4