tfrere commited on
Commit
5db3b83
·
1 Parent(s): 77af7b8
Files changed (2) hide show
  1. client/src/App.jsx +91 -50
  2. server/server.py +117 -23
client/src/App.jsx CHANGED
@@ -1,105 +1,146 @@
1
- import { useState } from "react";
2
  import {
3
  Container,
4
  Paper,
5
- TextField,
6
  Button,
7
  Box,
8
  Typography,
9
  List,
10
  ListItem,
11
  ListItemText,
 
 
12
  } from "@mui/material";
13
- import SendIcon from "@mui/icons-material/Send";
14
  import axios from "axios";
15
 
16
  function App() {
17
- const [message, setMessage] = useState("");
18
- const [chatHistory, setChatHistory] = useState([]);
19
  const [isLoading, setIsLoading] = useState(false);
20
 
21
- const handleSubmit = async (e) => {
22
- e.preventDefault();
23
- if (!message.trim()) return;
 
24
 
25
- const userMessage = message;
26
- setMessage("");
27
- setChatHistory((prev) => [...prev, { text: userMessage, isUser: true }]);
28
  setIsLoading(true);
29
-
30
  try {
31
  const response = await axios.post("http://localhost:8000/chat", {
32
- message: userMessage,
 
33
  });
34
 
35
- setChatHistory((prev) => [
36
- ...prev,
37
- { text: response.data.response, isUser: false },
38
- ]);
 
 
 
 
 
 
39
  } catch (error) {
40
  console.error("Error:", error);
41
- setChatHistory((prev) => [
42
  ...prev,
43
- { text: "Désolé, une erreur s'est produite.", isUser: false },
44
  ]);
45
  } finally {
46
  setIsLoading(false);
47
  }
48
  };
49
 
 
 
 
 
 
 
 
 
 
 
 
 
 
50
  return (
51
  <Container maxWidth="md" sx={{ mt: 4 }}>
52
  <Paper
53
  elevation={3}
54
  sx={{ height: "80vh", display: "flex", flexDirection: "column", p: 2 }}
55
  >
56
- <Typography variant="h4" component="h1" gutterBottom align="center">
57
- Chat with AI
58
- </Typography>
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
59
 
60
  <List sx={{ flexGrow: 1, overflow: "auto", mb: 2 }}>
61
- {chatHistory.map((msg, index) => (
62
  <ListItem
63
  key={index}
64
- sx={{ justifyContent: msg.isUser ? "flex-end" : "flex-start" }}
 
 
 
65
  >
66
  <Paper
67
  elevation={1}
68
  sx={{
69
  p: 2,
70
- maxWidth: "70%",
71
- bgcolor: msg.isUser ? "primary.light" : "grey.100",
72
- color: msg.isUser ? "white" : "text.primary",
73
  }}
74
  >
75
- <ListItemText primary={msg.text} />
 
 
 
 
 
 
 
76
  </Paper>
77
  </ListItem>
78
  ))}
79
  </List>
80
 
81
- <Box
82
- component="form"
83
- onSubmit={handleSubmit}
84
- sx={{ display: "flex", gap: 1 }}
85
- >
86
- <TextField
87
- fullWidth
88
- value={message}
89
- onChange={(e) => setMessage(e.target.value)}
90
- placeholder="Tapez votre message..."
91
- disabled={isLoading}
92
- variant="outlined"
93
- />
94
- <Button
95
- type="submit"
96
- variant="contained"
97
- disabled={isLoading}
98
- endIcon={<SendIcon />}
99
- >
100
- Envoyer
101
- </Button>
102
- </Box>
103
  </Paper>
104
  </Container>
105
  );
 
1
+ import { useState, useEffect } from "react";
2
  import {
3
  Container,
4
  Paper,
 
5
  Button,
6
  Box,
7
  Typography,
8
  List,
9
  ListItem,
10
  ListItemText,
11
+ LinearProgress,
12
+ ButtonGroup,
13
  } from "@mui/material";
14
+ import RestartAltIcon from "@mui/icons-material/RestartAlt";
15
  import axios from "axios";
16
 
17
  function App() {
18
+ const [storySegments, setStorySegments] = useState([]);
19
+ const [currentChoices, setCurrentChoices] = useState([]);
20
  const [isLoading, setIsLoading] = useState(false);
21
 
22
+ // Start the story when the component mounts
23
+ useEffect(() => {
24
+ handleStoryAction("start");
25
+ }, []);
26
 
27
+ const handleStoryAction = async (action, choiceId = null) => {
 
 
28
  setIsLoading(true);
 
29
  try {
30
  const response = await axios.post("http://localhost:8000/chat", {
31
+ message: action,
32
+ choice_id: choiceId,
33
  });
34
 
35
+ if (action === "restart") {
36
+ setStorySegments([{ text: response.data.story_text, isChoice: false }]);
37
+ } else {
38
+ setStorySegments((prev) => [
39
+ ...prev,
40
+ { text: response.data.story_text, isChoice: false },
41
+ ]);
42
+ }
43
+
44
+ setCurrentChoices(response.data.choices);
45
  } catch (error) {
46
  console.error("Error:", error);
47
+ setStorySegments((prev) => [
48
  ...prev,
49
+ { text: "Connection lost with the storyteller...", isChoice: false },
50
  ]);
51
  } finally {
52
  setIsLoading(false);
53
  }
54
  };
55
 
56
+ const handleChoice = async (choiceId) => {
57
+ // Add the chosen option to the story
58
+ setStorySegments((prev) => [
59
+ ...prev,
60
+ {
61
+ text: currentChoices.find((c) => c.id === choiceId).text,
62
+ isChoice: true,
63
+ },
64
+ ]);
65
+ // Continue the story with this choice
66
+ await handleStoryAction("choice", choiceId);
67
+ };
68
+
69
  return (
70
  <Container maxWidth="md" sx={{ mt: 4 }}>
71
  <Paper
72
  elevation={3}
73
  sx={{ height: "80vh", display: "flex", flexDirection: "column", p: 2 }}
74
  >
75
+ <Box
76
+ sx={{
77
+ display: "flex",
78
+ justifyContent: "space-between",
79
+ alignItems: "center",
80
+ mb: 2,
81
+ }}
82
+ >
83
+ <Typography variant="h4" component="h1">
84
+ Echoes of Influence
85
+ </Typography>
86
+ <Button
87
+ variant="outlined"
88
+ startIcon={<RestartAltIcon />}
89
+ onClick={() => handleStoryAction("restart")}
90
+ disabled={isLoading}
91
+ >
92
+ Restart
93
+ </Button>
94
+ </Box>
95
+
96
+ {isLoading && <LinearProgress sx={{ mb: 2 }} />}
97
 
98
  <List sx={{ flexGrow: 1, overflow: "auto", mb: 2 }}>
99
+ {storySegments.map((segment, index) => (
100
  <ListItem
101
  key={index}
102
+ sx={{
103
+ justifyContent: segment.isChoice ? "flex-end" : "flex-start",
104
+ display: "flex",
105
+ }}
106
  >
107
  <Paper
108
  elevation={1}
109
  sx={{
110
  p: 2,
111
+ maxWidth: "80%",
112
+ bgcolor: segment.isChoice ? "primary.light" : "grey.100",
113
+ color: segment.isChoice ? "white" : "text.primary",
114
  }}
115
  >
116
+ <ListItemText
117
+ primary={segment.isChoice ? "Your Choice" : "Story"}
118
+ secondary={segment.text}
119
+ primaryTypographyProps={{
120
+ variant: "subtitle2",
121
+ color: segment.isChoice ? "inherit" : "primary",
122
+ }}
123
+ />
124
  </Paper>
125
  </ListItem>
126
  ))}
127
  </List>
128
 
129
+ {currentChoices.length > 0 && (
130
+ <Box sx={{ display: "flex", justifyContent: "center", gap: 2 }}>
131
+ {currentChoices.map((choice) => (
132
+ <Button
133
+ key={choice.id}
134
+ variant="contained"
135
+ onClick={() => handleChoice(choice.id)}
136
+ disabled={isLoading}
137
+ sx={{ minWidth: "200px" }}
138
+ >
139
+ {choice.text}
140
+ </Button>
141
+ ))}
142
+ </Box>
143
+ )}
 
 
 
 
 
 
 
144
  </Paper>
145
  </Container>
146
  );
server/server.py CHANGED
@@ -1,61 +1,155 @@
1
  from fastapi import FastAPI, HTTPException
2
  from fastapi.middleware.cors import CORSMiddleware
3
- from pydantic import BaseModel
 
4
  import os
5
  from dotenv import load_dotenv
6
  from langchain_mistralai.chat_models import ChatMistralAI
7
- from langchain.memory import ConversationBufferMemory
8
- from langchain.chains import ConversationChain
9
- from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler
10
 
11
  # Load environment variables
12
  load_dotenv()
13
 
14
- app = FastAPI(title="Mon API FastAPI")
15
 
16
  # Configure CORS
17
  app.add_middleware(
18
  CORSMiddleware,
19
- allow_origins=["http://localhost:5173"], # React app URL
20
  allow_credentials=True,
21
  allow_methods=["*"],
22
  allow_headers=["*"],
23
  )
24
 
 
 
 
 
 
 
 
 
 
 
 
 
25
  # Initialize Mistral Chat Model
26
  chat_model = ChatMistralAI(
27
  mistral_api_key=os.getenv("MISTRAL_API_KEY"),
28
- model="mistral-tiny", # You can change this to other models like "mistral-small" or "mistral-medium"
29
- streaming=True,
30
- callbacks=[StreamingStdOutCallbackHandler()]
31
  )
32
 
33
- # Initialize conversation memory
34
- memory = ConversationBufferMemory()
35
- conversation = ConversationChain(
36
- llm=chat_model,
37
- memory=memory,
38
- verbose=True
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
39
  )
40
 
 
 
 
 
 
 
 
 
 
41
  class ChatMessage(BaseModel):
42
  message: str
 
43
 
44
  @app.get("/")
45
  async def read_root():
46
- return {"message": "Bienvenue sur l'API FastAPI!"}
47
 
48
- @app.get("/health")
49
- async def health_check():
50
- return {"status": "ok"}
51
-
52
- @app.post("/chat")
53
  async def chat_endpoint(chat_message: ChatMessage):
 
 
54
  try:
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
55
  # Get response from the model
56
- response = conversation.predict(input=chat_message.message)
57
- return {"response": response}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
58
  except Exception as e:
 
59
  raise HTTPException(status_code=500, detail=str(e))
60
 
61
  if __name__ == "__main__":
 
1
  from fastapi import FastAPI, HTTPException
2
  from fastapi.middleware.cors import CORSMiddleware
3
+ from pydantic import BaseModel, Field
4
+ from typing import List, Optional
5
  import os
6
  from dotenv import load_dotenv
7
  from langchain_mistralai.chat_models import ChatMistralAI
8
+ from langchain.output_parsers import PydanticOutputParser
9
+ from langchain.prompts import ChatPromptTemplate, HumanMessagePromptTemplate, SystemMessagePromptTemplate
10
+ from langchain.schema import HumanMessage, SystemMessage
11
 
12
  # Load environment variables
13
  load_dotenv()
14
 
15
+ app = FastAPI(title="Echoes of Influence")
16
 
17
  # Configure CORS
18
  app.add_middleware(
19
  CORSMiddleware,
20
+ allow_origins=["http://localhost:5173"],
21
  allow_credentials=True,
22
  allow_methods=["*"],
23
  allow_headers=["*"],
24
  )
25
 
26
+ # Game state
27
+ story_beat = 0
28
+
29
+ # Define the structure we want the LLM to output
30
+ class StorySegment(BaseModel):
31
+ story_text: str = Field(description="The next segment of the story")
32
+ choices: List[str] = Field(description="Exactly two possible choices for the player", min_items=2, max_items=2)
33
+ is_death: bool = Field(description="Whether this segment ends in Sarah's death", default=False)
34
+
35
+ # Initialize the parser
36
+ parser = PydanticOutputParser(pydantic_object=StorySegment)
37
+
38
  # Initialize Mistral Chat Model
39
  chat_model = ChatMistralAI(
40
  mistral_api_key=os.getenv("MISTRAL_API_KEY"),
41
+ model="mistral-small",
42
+ temperature=0.7
 
43
  )
44
 
45
+ # Create the system prompt
46
+ system_template = """You are narrating a brutal and unforgiving dystopian story about Sarah, a former engineer on a suicide mission to deliver medical supplies through a deadly radiation-filled wasteland.
47
+
48
+ Core story elements:
49
+ - Sarah is deeply traumatized by the AI uprising that killed most of humanity
50
+ - She abandoned her sister during the Great Collapse, leaving her to die
51
+ - She's on a suicide mission, but a quick death is not redemption
52
+ - The radiation is lethal and gets worse with each step
53
+ - Wrong choices lead to immediate and graphic deaths
54
+ - The environment is extremely hostile (raiders, malfunctioning AI systems, radiation storms)
55
+
56
+ Death conditions (implement these strictly):
57
+ - Any direct exposure to high radiation zones is lethal within minutes
58
+ - Trusting the wrong people leads to death
59
+ - Using corrupted AI systems can kill instantly
60
+ - Hesitating too long in dangerous situations is fatal
61
+ - Taking too many risks in succession leads to death
62
+
63
+ Each response must contain:
64
+ 1. A tense story segment that puts Sarah in mortal danger
65
+ 2. Exactly two possible choices that represent different approaches:
66
+ - Each choice must have clear potential consequences
67
+ - At least one choice should always carry a significant risk of death
68
+ - Choices should reflect:
69
+ * Brutal pragmatism vs Emotional responses
70
+ * Quick but dangerous vs Slow but safer routes
71
+ * Trust vs Paranoia
72
+ * Using AI systems vs Manual alternatives
73
+
74
+ If a choice would realistically lead to death, you MUST end the story with a detailed death scene and set is_death to true.
75
+
76
+ {format_instructions}"""
77
+
78
+ human_template = """Current story beat: {story_beat}
79
+ Previous choice: {previous_choice}
80
+
81
+ Generate the next story segment and choices. Remember: this is a brutal and unforgiving world where wrong choices lead to death."""
82
+
83
+ # Create the chat prompt
84
+ prompt = ChatPromptTemplate(
85
+ messages=[
86
+ SystemMessagePromptTemplate.from_template(system_template),
87
+ HumanMessagePromptTemplate.from_template(human_template)
88
+ ],
89
+ partial_variables={"format_instructions": parser.get_format_instructions()}
90
  )
91
 
92
+ class Choice(BaseModel):
93
+ id: int
94
+ text: str
95
+
96
+ class StoryResponse(BaseModel):
97
+ story_text: str
98
+ choices: List[Choice]
99
+ is_death: bool = False
100
+
101
  class ChatMessage(BaseModel):
102
  message: str
103
+ choice_id: Optional[int] = None
104
 
105
  @app.get("/")
106
  async def read_root():
107
+ return {"message": "Welcome to Echoes of Influence"}
108
 
109
+ @app.post("/chat", response_model=StoryResponse)
 
 
 
 
110
  async def chat_endpoint(chat_message: ChatMessage):
111
+ global story_beat
112
+
113
  try:
114
+ # Prepare the context
115
+ if chat_message.message.lower() == "restart":
116
+ story_beat = 0
117
+ previous_choice = "none"
118
+ elif chat_message.choice_id is not None:
119
+ previous_choice = f"Choice {chat_message.choice_id}"
120
+ else:
121
+ previous_choice = "none"
122
+
123
+ # Get the formatted messages
124
+ messages = prompt.format_messages(
125
+ story_beat=story_beat,
126
+ previous_choice=previous_choice
127
+ )
128
+
129
  # Get response from the model
130
+ response = chat_model.invoke(messages)
131
+
132
+ # Parse the response
133
+ parsed_response = parser.parse(response.content)
134
+
135
+ # Only increment story beat if not dead
136
+ if not parsed_response.is_death:
137
+ story_beat += 1
138
+
139
+ # Convert to response format
140
+ choices = [] if parsed_response.is_death else [
141
+ Choice(id=i, text=choice.strip())
142
+ for i, choice in enumerate(parsed_response.choices, 1)
143
+ ]
144
+
145
+ return StoryResponse(
146
+ story_text=parsed_response.story_text,
147
+ choices=choices,
148
+ is_death=parsed_response.is_death
149
+ )
150
+
151
  except Exception as e:
152
+ print(f"Error: {str(e)}")
153
  raise HTTPException(status_code=500, detail=str(e))
154
 
155
  if __name__ == "__main__":