tommytracx commited on
Commit
a45c90b
·
verified ·
1 Parent(s): 0cd664c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +330 -117
app.py CHANGED
@@ -12,11 +12,16 @@ import re
12
  app = Flask(__name__)
13
 
14
  # Configure logging with file output in a writable directory
15
- log_path = '/app/openwebui.log'
16
- os.makedirs(os.path.dirname(log_path), exist_ok=True)
17
- log_handler = RotatingFileHandler(log_path, maxBytes=1000000, backupCount=5)
18
- log_handler.setFormatter(logging.Formatter('%(asctime)s - %(levelname)s - %(message)s'))
19
- logging.getLogger().addHandler(log_handler)
 
 
 
 
 
20
  logging.getLogger().setLevel(logging.INFO)
21
 
22
  # Configuration
@@ -28,7 +33,7 @@ TEMPERATURE = float(os.getenv('TEMPERATURE', '0.7'))
28
  class OllamaManager:
29
  def __init__(self, base_url: str):
30
  self.base_url = base_url.rstrip('/')
31
- self.available_models = ALLOWED_MODELS # Initialize with allowed models
32
  self.refresh_models()
33
 
34
  def refresh_models(self) -> None:
@@ -38,14 +43,19 @@ class OllamaManager:
38
  response.raise_for_status()
39
  data = response.json()
40
  models = [model['name'] for model in data.get('models', [])]
41
- # Filter models to only include those in ALLOWED_MODELS
42
  self.available_models = [model for model in models if model in ALLOWED_MODELS]
43
  if not self.available_models:
44
  self.available_models = ALLOWED_MODELS
45
  logging.warning("No allowed models found in API response, using ALLOWED_MODELS")
46
  logging.info(f"Available models: {self.available_models}")
 
 
 
 
 
 
47
  except Exception as e:
48
- logging.error(f"Error refreshing models: {e}")
49
  self.available_models = ALLOWED_MODELS
50
 
51
  def list_models(self) -> List[str]:
@@ -55,6 +65,7 @@ class OllamaManager:
55
  def generate(self, model_name: str, prompt: str, stream: bool = False, **kwargs) -> Any:
56
  """Generate text using a model, with optional streaming."""
57
  if model_name not in self.available_models:
 
58
  return {"status": "error", "message": f"Model {model_name} not available"}
59
 
60
  try:
@@ -72,14 +83,21 @@ class OllamaManager:
72
  response = requests.post(f"{self.base_url}/api/generate", json=payload, timeout=120)
73
  response.raise_for_status()
74
  data = response.json()
 
75
  return {
76
  "status": "success",
77
  "response": data.get('response', ''),
78
  "model": model_name,
79
  "usage": data.get('usage', {})
80
  }
 
 
 
 
 
 
81
  except Exception as e:
82
- logging.error(f"Error generating response: {e}")
83
  return {"status": "error", "message": str(e)}
84
 
85
  def health_check(self) -> Dict[str, Any]:
@@ -87,15 +105,22 @@ class OllamaManager:
87
  try:
88
  response = requests.get(f"{self.base_url}/api/tags", timeout=10)
89
  response.raise_for_status()
 
90
  return {"status": "healthy", "available_models": len(self.available_models)}
 
 
 
 
 
 
91
  except Exception as e:
92
- logging.error(f"Health check failed: {e}")
93
  return {"status": "unhealthy", "error": str(e)}
94
 
95
  # Initialize Ollama manager
96
  ollama_manager = OllamaManager(OLLAMA_BASE_URL)
97
 
98
- # HTML template for the chat interface with improved UI and Sandpack
99
  HTML_TEMPLATE = '''
100
  <!DOCTYPE html>
101
  <html lang="en">
@@ -115,28 +140,32 @@ HTML_TEMPLATE = '''
115
  </script>
116
  <style>
117
  :root {
118
- --primary-color: #667eea;
119
- --secondary-color: #764ba2;
120
  --text-color: #333;
121
- --bg-color: #fafbfc;
122
  --message-bg-user: var(--primary-color);
123
  --message-bg-assistant: white;
124
  --avatar-user: var(--primary-color);
125
- --avatar-assistant: #28a745;
126
- --border-color: #e9ecef;
127
  --input-bg: white;
 
 
128
  }
129
  .dark-mode {
130
  --primary-color: #3b4a8c;
131
  --secondary-color: #4a2e6b;
132
- --text-color: #f0f0f0;
133
- --bg-color: #1a1a1a;
134
  --message-bg-user: var(--primary-color);
135
- --message-bg-assistant: #2a2a2a;
136
  --avatar-user: var(--primary-color);
137
- --avatar-assistant: #1a7a3a;
138
- --border-color: #4a4a4a;
139
- --input-bg: #3a3a3a;
 
 
140
  }
141
  * {
142
  margin: 0;
@@ -148,55 +177,110 @@ HTML_TEMPLATE = '''
148
  background: linear-gradient(135deg, var(--primary-color) 0%, var(--secondary-color) 100%);
149
  color: var(--text-color);
150
  min-height: 100vh;
151
- padding: 20px;
152
  }
153
  .container {
154
- max-width: 1200px;
155
- margin: 0 auto;
 
156
  background: var(--bg-color);
157
- border-radius: 20px;
158
- box-shadow: 0 20px 40px rgba(0,0,0,0.1);
159
- overflow: hidden;
160
- position: relative;
161
  }
162
- .theme-toggle {
163
- position: absolute;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
164
  top: 10px;
165
- right: 10px;
166
- background: none;
 
167
  border: none;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
168
  cursor: pointer;
169
- font-size: 1.2rem;
 
 
 
 
 
 
170
  color: white;
171
  }
 
 
 
 
 
 
172
  .header {
173
  background: linear-gradient(135deg, var(--primary-color) 0%, var(--secondary-color) 100%);
174
  color: white;
175
- padding: 30px;
176
  text-align: center;
 
 
 
 
 
 
 
 
 
 
 
177
  }
178
  .header h1 {
179
- font-size: 2.5rem;
180
  margin-bottom: 10px;
181
  font-weight: 700;
182
  }
183
  .header p {
184
- font-size: 1.1rem;
185
  opacity: 0.9;
186
  }
187
  .controls {
188
- padding: 20px 30px;
189
  background: var(--bg-color);
190
  border-bottom: 1px solid var(--border-color);
191
  display: flex;
192
- gap: 15px;
193
- align-items: center;
194
  flex-wrap: wrap;
 
195
  }
196
  .control-group {
197
  display: flex;
198
  align-items: center;
199
  gap: 8px;
 
 
200
  }
201
  .control-group label {
202
  font-weight: 600;
@@ -205,7 +289,8 @@ HTML_TEMPLATE = '''
205
  }
206
  .control-group select,
207
  .control-group input {
208
- padding: 8px 12px;
 
209
  border: 2px solid var(--border-color);
210
  border-radius: 8px;
211
  font-size: 14px;
@@ -219,7 +304,7 @@ HTML_TEMPLATE = '''
219
  border-color: var(--primary-color);
220
  }
221
  .chat-container {
222
- height: 500px;
223
  overflow-y: auto;
224
  padding: 20px;
225
  background: var(--bg-color);
@@ -254,7 +339,7 @@ HTML_TEMPLATE = '''
254
  background: var(--message-bg-assistant);
255
  padding: 15px 20px;
256
  border-radius: 18px;
257
- max-width: 70%;
258
  box-shadow: 0 2px 10px rgba(0,0,0,0.1);
259
  line-height: 1.5;
260
  color: var(--text-color);
@@ -278,6 +363,7 @@ HTML_TEMPLATE = '''
278
  cursor: pointer;
279
  font-size: 12px;
280
  color: var(--text-color);
 
281
  }
282
  .code-button:hover {
283
  background: rgba(0,0,0,0.2);
@@ -289,24 +375,25 @@ HTML_TEMPLATE = '''
289
  overflow: hidden;
290
  }
291
  .input-container {
292
- padding: 20px 30px;
293
  background: var(--bg-color);
294
  border-top: 1px solid var(--border-color);
295
  }
296
  .input-form {
297
  display: flex;
298
- gap: 15px;
 
299
  }
300
  .input-field {
301
  flex: 1;
302
- padding: 15px 20px;
303
  border: 2px solid var(--border-color);
304
  border-radius: 25px;
305
  font-size: 16px;
306
  transition: border-color 0.3s;
307
  resize: none;
308
  min-height: 50px;
309
- max-height: 120px;
310
  background: var(--input-bg);
311
  color: var(--text-color);
312
  }
@@ -315,7 +402,7 @@ HTML_TEMPLATE = '''
315
  border-color: var(--primary-color);
316
  }
317
  .send-button {
318
- padding: 15px 30px;
319
  background: linear-gradient(135deg, var(--primary-color) 0%, var(--secondary-color) 100%);
320
  color: white;
321
  border: none;
@@ -324,7 +411,7 @@ HTML_TEMPLATE = '''
324
  font-weight: 600;
325
  cursor: pointer;
326
  transition: transform 0.2s;
327
- min-width: 100px;
328
  }
329
  .send-button:hover {
330
  transform: translateY(-2px);
@@ -353,107 +440,173 @@ HTML_TEMPLATE = '''
353
  border-radius: 18px;
354
  color: #6c757d;
355
  font-style: italic;
 
356
  }
357
  @keyframes fadeIn {
358
  from { opacity: 0; transform: translateY(10px); }
359
  to { opacity: 1; transform: translateY(0); }
360
  }
361
  @media (max-width: 768px) {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
362
  .controls {
363
  flex-direction: column;
364
- align-items: stretch;
365
  }
366
  .control-group {
367
- justify-content: space-between;
 
 
 
 
 
368
  }
369
  .message-content {
370
- max-width: 85%;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
371
  }
372
  }
373
  </style>
374
  </head>
375
  <body>
 
376
  <div class="container">
377
- <div class="header">
378
- <h1>🤖 OpenWebUI</h1>
379
- <p>Chat with your local Ollama models through Hugging Face Spaces</p>
 
 
380
  </div>
381
-
382
- <div class="controls">
383
- <div class="control-group">
384
- <label for="model-select">Model:</label>
385
- <select id="model-select">
386
- <option value="">Select a model...</option>
387
- </select>
388
  </div>
389
- <div class="control-group">
390
- <label for="temperature">Temperature:</label>
391
- <input type="range" id="temperature" min="0" max="2" step="0.1" value="0.7">
392
- <span id="temp-value">0.7</span>
393
- </div>
394
- <div class="control-group">
395
- <label for="max-tokens">Max Tokens:</label>
396
- <input type="number" id="max-tokens" min="1" max="4096" value="2048">
 
 
 
 
 
 
 
 
 
 
 
397
  </div>
398
- </div>
399
-
400
- <div class="chat-container" id="chat-container">
401
- <div class="message assistant">
402
- <div class="message-avatar">AI</div>
403
- <div class="message-content">
404
- Hello! I'm your AI assistant powered by Ollama. How can I help you today?
405
  </div>
406
  </div>
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
407
  </div>
408
-
409
- <div class="typing-indicator" id="typing-indicator">
410
- AI is thinking...
411
- </div>
412
-
413
- <div class="input-container">
414
- <form class="input-form" id="chat-form">
415
- <textarea
416
- class="input-field"
417
- id="message-input"
418
- placeholder="Type your message here..."
419
- rows="1"
420
- ></textarea>
421
- <button type="submit" class="send-button" id="send-button">
422
- Send
423
- </button>
424
- </form>
425
- </div>
426
-
427
- <div class="status" id="status"></div>
428
  </div>
429
 
430
  <script type="module">
431
  import { Sandpack } from 'https://esm.sh/@codesandbox/sandpack-react@latest';
432
 
433
- let conversationHistory = [];
 
434
  let currentMessageDiv = null;
435
  let currentCodeBlocks = [];
436
 
437
  document.addEventListener('DOMContentLoaded', function() {
438
  loadModels();
 
439
  setupEventListeners();
440
  autoResizeTextarea();
441
  });
442
-
443
  function toggleTheme() {
444
  document.body.classList.toggle('dark-mode');
445
  const themeToggle = document.getElementById('theme-toggle');
446
  themeToggle.textContent = document.body.classList.contains('dark-mode') ? '☀️' : '🌙';
447
  localStorage.setItem('theme', document.body.classList.contains('dark-mode') ? 'dark' : 'light');
448
  }
449
-
450
  function loadTheme() {
451
  if (localStorage.getItem('theme') === 'dark') {
452
  document.body.classList.add('dark-mode');
453
  document.getElementById('theme-toggle').textContent = '☀️';
454
  }
455
  }
456
-
 
 
 
 
 
457
  async function loadModels() {
458
  const modelSelect = document.getElementById('model-select');
459
  modelSelect.innerHTML = '<option value="">Loading models...</option>';
@@ -485,7 +638,51 @@ HTML_TEMPLATE = '''
485
  showStatus('Failed to load models: ' + error.message, 'error');
486
  }
487
  }
488
-
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
489
  function setupEventListeners() {
490
  document.getElementById('chat-form').addEventListener('submit', handleSubmit);
491
  document.getElementById('temperature').addEventListener('input', function() {
@@ -493,13 +690,14 @@ HTML_TEMPLATE = '''
493
  });
494
  document.getElementById('message-input').addEventListener('input', autoResizeTextarea);
495
  document.getElementById('theme-toggle').addEventListener('click', toggleTheme);
 
496
  loadTheme();
497
  }
498
 
499
  function autoResizeTextarea() {
500
  const textarea = document.getElementById('message-input');
501
  textarea.style.height = 'auto';
502
- textarea.style.height = Math.min(textarea.scrollHeight, 120) + 'px';
503
  }
504
 
505
  async function handleSubmit(e) {
@@ -520,6 +718,12 @@ HTML_TEMPLATE = '''
520
  }
521
 
522
  addMessage(message, 'user');
 
 
 
 
 
 
523
  messageInput.value = '';
524
  autoResizeTextarea();
525
  showTypingIndicator(true);
@@ -566,11 +770,15 @@ HTML_TEMPLATE = '''
566
  }
567
 
568
  processCodeBlocks(currentMessageDiv, accumulatedResponse);
 
 
569
  showStatus(`Response generated using ${model}`, 'success');
570
  } catch (error) {
571
  showTypingIndicator(false);
572
  if (currentMessageDiv) {
573
  updateMessage(currentMessageDiv, 'Sorry, I encountered a network error.');
 
 
574
  } else {
575
  addMessage('Sorry, I encountered a network error.', 'assistant');
576
  }
@@ -578,7 +786,7 @@ HTML_TEMPLATE = '''
578
  }
579
  }
580
 
581
- function addMessage(content, sender, isStreaming = false) {
582
  const chatContainer = document.getElementById('chat-container');
583
  const messageDiv = document.createElement('div');
584
  messageDiv.className = `message ${sender}`;
@@ -596,8 +804,12 @@ HTML_TEMPLATE = '''
596
  chatContainer.appendChild(messageDiv);
597
  chatContainer.scrollTop = chatContainer.scrollHeight;
598
 
599
- if (!isStreaming) {
600
- conversationHistory.push({ role: sender, content: content });
 
 
 
 
601
  }
602
  return messageDiv;
603
  }
@@ -621,23 +833,19 @@ HTML_TEMPLATE = '''
621
  const code = match[2].trim();
622
  const startIndex = match.index;
623
 
624
- // Add text before the code block
625
  if (startIndex > lastIndex) {
626
  fragments.push({ type: 'text', content: content.slice(lastIndex, startIndex) });
627
  }
628
 
629
- // Add code block
630
  fragments.push({ type: 'code', language, content: code });
631
  currentCodeBlocks.push({ language, content: code });
632
  lastIndex = codeBlockRegex.lastIndex;
633
  }
634
 
635
- // Add remaining text
636
  if (lastIndex < content.length) {
637
  fragments.push({ type: 'text', content: content.slice(lastIndex) });
638
  }
639
 
640
- // Clear message content and rebuild with fragments
641
  messageContent.innerHTML = '';
642
  fragments.forEach((fragment, index) => {
643
  if (fragment.type === 'text') {
@@ -681,7 +889,7 @@ HTML_TEMPLATE = '''
681
  script.textContent = `
682
  import { Sandpack } from '@codesandbox/sandpack-react';
683
  import { createRoot } from 'react-dom';
684
- const root = createRoot(document.getElementById('sandpack-${index}'));
685
  root.render(
686
  React.createElement(Sandpack, {
687
  template: "${fragment.language === 'javascript' ? 'react' : fragment.language}",
@@ -699,14 +907,12 @@ HTML_TEMPLATE = '''
699
  `;
700
 
701
  const sandboxDiv = document.createElement('div');
702
- sandboxDiv.id = `sandpack-${index}`;
703
  codeContainer.appendChild(sandboxDiv);
704
  codeContainer.appendChild(script);
705
  messageContent.appendChild(codeContainer);
706
  }
707
  });
708
-
709
- conversationHistory.push({ role: 'assistant', content: content });
710
  }
711
 
712
  function showTypingIndicator(show) {
@@ -743,6 +949,7 @@ def chat():
743
  try:
744
  data = request.get_json()
745
  if not data or 'prompt' not in data or 'model' not in data:
 
746
  return jsonify({"status": "error", "message": "Prompt and model are required"}), 400
747
 
748
  prompt = data['prompt']
@@ -755,10 +962,15 @@ def chat():
755
 
756
  if stream and isinstance(result, requests.Response):
757
  def generate_stream():
758
- for chunk in result.iter_content(chunk_size=None):
759
- yield chunk
 
 
 
 
760
  return Response(generate_stream(), content_type='application/json')
761
  else:
 
762
  return jsonify(result), 200 if result["status"] == "success" else 500
763
  except Exception as e:
764
  logging.error(f"Chat endpoint error: {e}")
@@ -769,6 +981,7 @@ def get_models():
769
  """Get available models."""
770
  try:
771
  models = ollama_manager.list_models()
 
772
  return jsonify({
773
  "status": "success",
774
  "models": models,
@@ -794,7 +1007,7 @@ def health_check():
794
  "status": "unhealthy",
795
  "error": str(e),
796
  "timestamp": time.time()
797
- }), 500
798
 
799
  if __name__ == '__main__':
800
  app.run(host='0.0.0.0', port=7860, debug=False)
 
12
  app = Flask(__name__)
13
 
14
  # Configure logging with file output in a writable directory
15
+ log_dir = '/app/logs'
16
+ log_path = os.path.join(log_dir, 'openwebui.log')
17
+ try:
18
+ os.makedirs(log_dir, exist_ok=True)
19
+ log_handler = RotatingFileHandler(log_path, maxBytes=1000000, backupCount=5)
20
+ log_handler.setFormatter(logging.Formatter('%(asctime)s - %(levelname)s - %(message)s'))
21
+ logging.getLogger().addHandler(log_handler)
22
+ except Exception as e:
23
+ logging.getLogger().addHandler(logging.StreamHandler()) # Fallback to stderr
24
+ logging.error(f"Failed to initialize file logging: {e}")
25
  logging.getLogger().setLevel(logging.INFO)
26
 
27
  # Configuration
 
33
  class OllamaManager:
34
  def __init__(self, base_url: str):
35
  self.base_url = base_url.rstrip('/')
36
+ self.available_models = ALLOWED_MODELS
37
  self.refresh_models()
38
 
39
  def refresh_models(self) -> None:
 
43
  response.raise_for_status()
44
  data = response.json()
45
  models = [model['name'] for model in data.get('models', [])]
 
46
  self.available_models = [model for model in models if model in ALLOWED_MODELS]
47
  if not self.available_models:
48
  self.available_models = ALLOWED_MODELS
49
  logging.warning("No allowed models found in API response, using ALLOWED_MODELS")
50
  logging.info(f"Available models: {self.available_models}")
51
+ except requests.exceptions.ConnectionError as e:
52
+ logging.error(f"Connection error refreshing models: {e}")
53
+ self.available_models = ALLOWED_MODELS
54
+ except requests.exceptions.HTTPError as e:
55
+ logging.error(f"HTTP error refreshing models: {e}")
56
+ self.available_models = ALLOWED_MODELS
57
  except Exception as e:
58
+ logging.error(f"Unexpected error refreshing models: {e}")
59
  self.available_models = ALLOWED_MODELS
60
 
61
  def list_models(self) -> List[str]:
 
65
  def generate(self, model_name: str, prompt: str, stream: bool = False, **kwargs) -> Any:
66
  """Generate text using a model, with optional streaming."""
67
  if model_name not in self.available_models:
68
+ logging.warning(f"Attempted to generate with unavailable model: {model_name}")
69
  return {"status": "error", "message": f"Model {model_name} not available"}
70
 
71
  try:
 
83
  response = requests.post(f"{self.base_url}/api/generate", json=payload, timeout=120)
84
  response.raise_for_status()
85
  data = response.json()
86
+ logging.info(f"Generated response with model {model_name}")
87
  return {
88
  "status": "success",
89
  "response": data.get('response', ''),
90
  "model": model_name,
91
  "usage": data.get('usage', {})
92
  }
93
+ except requests.exceptions.ConnectionError as e:
94
+ logging.error(f"Connection error generating response: {e}")
95
+ return {"status": "error", "message": f"Connection error: {str(e)}"}
96
+ except requests.exceptions.HTTPError as e:
97
+ logging.error(f"HTTP error generating response: {e}")
98
+ return {"status": "error", "message": f"HTTP error: {str(e)}"}
99
  except Exception as e:
100
+ logging.error(f"Unexpected error generating response: {e}")
101
  return {"status": "error", "message": str(e)}
102
 
103
  def health_check(self) -> Dict[str, Any]:
 
105
  try:
106
  response = requests.get(f"{self.base_url}/api/tags", timeout=10)
107
  response.raise_for_status()
108
+ logging.info("Health check successful")
109
  return {"status": "healthy", "available_models": len(self.available_models)}
110
+ except requests.exceptions.ConnectionError as e:
111
+ logging.error(f"Health check connection error: {e}")
112
+ return {"status": "unhealthy", "error": f"Connection error: {str(e)}"}
113
+ except requests.exceptions.HTTPError as e:
114
+ logging.error(f"Health check HTTP error: {e}")
115
+ return {"status": "unhealthy", "error": f"HTTP error: {str(e)}"}
116
  except Exception as e:
117
+ logging.error(f"Health check unexpected error: {e}")
118
  return {"status": "unhealthy", "error": str(e)}
119
 
120
  # Initialize Ollama manager
121
  ollama_manager = OllamaManager(OLLAMA_BASE_URL)
122
 
123
+ # HTML template for the chat interface with comprehensive, mobile-optimized UI
124
  HTML_TEMPLATE = '''
125
  <!DOCTYPE html>
126
  <html lang="en">
 
140
  </script>
141
  <style>
142
  :root {
143
+ --primary-color: #5a4bff;
144
+ --secondary-color: #7b3fe4;
145
  --text-color: #333;
146
+ --bg-color: #f8fafc;
147
  --message-bg-user: var(--primary-color);
148
  --message-bg-assistant: white;
149
  --avatar-user: var(--primary-color);
150
+ --avatar-assistant: #2ea44f;
151
+ --border-color: #e2e8f0;
152
  --input-bg: white;
153
+ --sidebar-bg: #ffffff;
154
+ --sidebar-border: #e2e8f0;
155
  }
156
  .dark-mode {
157
  --primary-color: #3b4a8c;
158
  --secondary-color: #4a2e6b;
159
+ --text-color: #e2e8f0;
160
+ --bg-color: #1a202c;
161
  --message-bg-user: var(--primary-color);
162
+ --message-bg-assistant: #2d3748;
163
  --avatar-user: var(--primary-color);
164
+ --avatar-assistant: #276749;
165
+ --border-color: #4a5568;
166
+ --input-bg: #2d3748;
167
+ --sidebar-bg: #2d3748;
168
+ --sidebar-border: #4a5568;
169
  }
170
  * {
171
  margin: 0;
 
177
  background: linear-gradient(135deg, var(--primary-color) 0%, var(--secondary-color) 100%);
178
  color: var(--text-color);
179
  min-height: 100vh;
180
+ overflow-x: hidden;
181
  }
182
  .container {
183
+ display: flex;
184
+ max-width: 100%;
185
+ min-height: 100vh;
186
  background: var(--bg-color);
 
 
 
 
187
  }
188
+ .sidebar {
189
+ width: 250px;
190
+ background: var(--sidebar-bg);
191
+ border-right: 1px solid var(--sidebar-border);
192
+ padding: 20px;
193
+ position: fixed;
194
+ height: 100%;
195
+ transform: translateX(-100%);
196
+ transition: transform 0.3s ease;
197
+ z-index: 1000;
198
+ }
199
+ .sidebar.open {
200
+ transform: translateX(0);
201
+ }
202
+ .sidebar-toggle {
203
+ position: fixed;
204
  top: 10px;
205
+ left: 10px;
206
+ background: var(--primary-color);
207
+ color: white;
208
  border: none;
209
+ padding: 10px;
210
+ border-radius: 8px;
211
+ cursor: pointer;
212
+ z-index: 1100;
213
+ }
214
+ .sidebar h2 {
215
+ font-size: 1.5rem;
216
+ margin-bottom: 20px;
217
+ }
218
+ .chat-history {
219
+ list-style: none;
220
+ overflow-y: auto;
221
+ max-height: calc(100vh - 100px);
222
+ }
223
+ .chat-history-item {
224
+ padding: 10px;
225
+ border-radius: 8px;
226
+ margin-bottom: 10px;
227
  cursor: pointer;
228
+ transition: background 0.2s;
229
+ }
230
+ .chat-history-item:hover {
231
+ background: var(--border-color);
232
+ }
233
+ .chat-history-item.active {
234
+ background: var(--primary-color);
235
  color: white;
236
  }
237
+ .main-content {
238
+ flex: 1;
239
+ display: flex;
240
+ flex-direction: column;
241
+ min-height: 100vh;
242
+ }
243
  .header {
244
  background: linear-gradient(135deg, var(--primary-color) 0%, var(--secondary-color) 100%);
245
  color: white;
246
+ padding: 20px;
247
  text-align: center;
248
+ position: relative;
249
+ }
250
+ .theme-toggle {
251
+ position: absolute;
252
+ top: 20px;
253
+ right: 20px;
254
+ background: none;
255
+ border: none;
256
+ cursor: pointer;
257
+ font-size: 1.5rem;
258
+ color: white;
259
  }
260
  .header h1 {
261
+ font-size: 2rem;
262
  margin-bottom: 10px;
263
  font-weight: 700;
264
  }
265
  .header p {
266
+ font-size: 1rem;
267
  opacity: 0.9;
268
  }
269
  .controls {
270
+ padding: 15px 20px;
271
  background: var(--bg-color);
272
  border-bottom: 1px solid var(--border-color);
273
  display: flex;
274
+ gap: 10px;
 
275
  flex-wrap: wrap;
276
+ justify-content: center;
277
  }
278
  .control-group {
279
  display: flex;
280
  align-items: center;
281
  gap: 8px;
282
+ flex: 1;
283
+ min-width: 200px;
284
  }
285
  .control-group label {
286
  font-weight: 600;
 
289
  }
290
  .control-group select,
291
  .control-group input {
292
+ flex: 1;
293
+ padding: 10px;
294
  border: 2px solid var(--border-color);
295
  border-radius: 8px;
296
  font-size: 14px;
 
304
  border-color: var(--primary-color);
305
  }
306
  .chat-container {
307
+ flex: 1;
308
  overflow-y: auto;
309
  padding: 20px;
310
  background: var(--bg-color);
 
339
  background: var(--message-bg-assistant);
340
  padding: 15px 20px;
341
  border-radius: 18px;
342
+ max-width: 80%;
343
  box-shadow: 0 2px 10px rgba(0,0,0,0.1);
344
  line-height: 1.5;
345
  color: var(--text-color);
 
363
  cursor: pointer;
364
  font-size: 12px;
365
  color: var(--text-color);
366
+ transition: background 0.2s;
367
  }
368
  .code-button:hover {
369
  background: rgba(0,0,0,0.2);
 
375
  overflow: hidden;
376
  }
377
  .input-container {
378
+ padding: 15px 20px;
379
  background: var(--bg-color);
380
  border-top: 1px solid var(--border-color);
381
  }
382
  .input-form {
383
  display: flex;
384
+ gap: 10px;
385
+ align-items: center;
386
  }
387
  .input-field {
388
  flex: 1;
389
+ padding: 12px 15px;
390
  border: 2px solid var(--border-color);
391
  border-radius: 25px;
392
  font-size: 16px;
393
  transition: border-color 0.3s;
394
  resize: none;
395
  min-height: 50px;
396
+ max-height: 150px;
397
  background: var(--input-bg);
398
  color: var(--text-color);
399
  }
 
402
  border-color: var(--primary-color);
403
  }
404
  .send-button {
405
+ padding: 12px 20px;
406
  background: linear-gradient(135deg, var(--primary-color) 0%, var(--secondary-color) 100%);
407
  color: white;
408
  border: none;
 
411
  font-weight: 600;
412
  cursor: pointer;
413
  transition: transform 0.2s;
414
+ min-width: 80px;
415
  }
416
  .send-button:hover {
417
  transform: translateY(-2px);
 
440
  border-radius: 18px;
441
  color: #6c757d;
442
  font-style: italic;
443
+ margin: 20px;
444
  }
445
  @keyframes fadeIn {
446
  from { opacity: 0; transform: translateY(10px); }
447
  to { opacity: 1; transform: translateY(0); }
448
  }
449
  @media (max-width: 768px) {
450
+ .container {
451
+ flex-direction: column;
452
+ }
453
+ .sidebar {
454
+ width: 100%;
455
+ height: auto;
456
+ max-height: 80vh;
457
+ position: fixed;
458
+ top: 0;
459
+ left: 0;
460
+ transform: translateY(-100%);
461
+ border-right: none;
462
+ border-bottom: 1px solid var(--sidebar-border);
463
+ }
464
+ .sidebar.open {
465
+ transform: translateY(0);
466
+ }
467
+ .sidebar-toggle {
468
+ top: 10px;
469
+ left: 10px;
470
+ z-index: 1100;
471
+ }
472
+ .main-content {
473
+ margin-top: 60px;
474
+ }
475
  .controls {
476
  flex-direction: column;
477
+ gap: 15px;
478
  }
479
  .control-group {
480
+ flex-direction: column;
481
+ align-items: stretch;
482
+ }
483
+ .control-group select,
484
+ .control-group input {
485
+ width: 100%;
486
  }
487
  .message-content {
488
+ max-width: 90%;
489
+ }
490
+ .header {
491
+ padding: 15px;
492
+ }
493
+ .header h1 {
494
+ font-size: 1.8rem;
495
+ }
496
+ .header p {
497
+ font-size: 0.9rem;
498
+ }
499
+ .input-container {
500
+ padding: 10px 15px;
501
+ }
502
+ .send-button {
503
+ padding: 10px 15px;
504
+ min-width: 60px;
505
  }
506
  }
507
  </style>
508
  </head>
509
  <body>
510
+ <button class="sidebar-toggle" id="sidebar-toggle">☰</button>
511
  <div class="container">
512
+ <div class="sidebar" id="sidebar">
513
+ <h2>Chat History</h2>
514
+ <ul class="chat-history" id="chat-history">
515
+ <!-- Chat history items will be populated here -->
516
+ </ul>
517
  </div>
518
+ <div class="main-content">
519
+ <div class="header">
520
+ <button class="theme-toggle" id="theme-toggle">🌙</button>
521
+ <h1>🤖 OpenWebUI</h1>
522
+ <p>Chat with AI models powered by Ollama on Hugging Face Spaces</p>
 
 
523
  </div>
524
+
525
+ <div class="controls">
526
+ <div class="control-group">
527
+ <label for="model-select">Model:</label>
528
+ <select id="model-select">
529
+ <option value="">Select a model...</option>
530
+ </select>
531
+ </div>
532
+ <div class="control-group">
533
+ <label for="temperature">Temperature:</label>
534
+ <div style="flex: 1; display: flex; align-items: center; gap: 8px;">
535
+ <input type="range" id="temperature" min="0" max="2" step="0.1" value="0.7">
536
+ <span id="temp-value">0.7</span>
537
+ </div>
538
+ </div>
539
+ <div class="control-group">
540
+ <label for="max-tokens">Max Tokens:</label>
541
+ <input type="number" id="max-tokens" min="1" max="4096" value="2048">
542
+ </div>
543
  </div>
544
+
545
+ <div class="chat-container" id="chat-container">
546
+ <div class="message assistant">
547
+ <div class="message-avatar">AI</div>
548
+ <div class="message-content">
549
+ Hello! I'm your AI assistant powered by Ollama. How can I help you today?
550
+ </div>
551
  </div>
552
  </div>
553
+
554
+ <div class="typing-indicator" id="typing-indicator">
555
+ AI is thinking...
556
+ </div>
557
+
558
+ <div class="input-container">
559
+ <form class="input-form" id="chat-form">
560
+ <textarea
561
+ class="input-field"
562
+ id="message-input"
563
+ placeholder="Type your message here..."
564
+ rows="1"
565
+ ></textarea>
566
+ <button type="submit" class="send-button" id="send-button">
567
+ Send
568
+ </button>
569
+ </form>
570
+ </div>
571
+
572
+ <div class="status" id="status"></div>
573
  </div>
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
574
  </div>
575
 
576
  <script type="module">
577
  import { Sandpack } from 'https://esm.sh/@codesandbox/sandpack-react@latest';
578
 
579
+ let conversationHistory = JSON.parse(localStorage.getItem('chatHistory')) || [];
580
+ let currentConversationId = null;
581
  let currentMessageDiv = null;
582
  let currentCodeBlocks = [];
583
 
584
  document.addEventListener('DOMContentLoaded', function() {
585
  loadModels();
586
+ loadChatHistory();
587
  setupEventListeners();
588
  autoResizeTextarea();
589
  });
590
+
591
  function toggleTheme() {
592
  document.body.classList.toggle('dark-mode');
593
  const themeToggle = document.getElementById('theme-toggle');
594
  themeToggle.textContent = document.body.classList.contains('dark-mode') ? '☀️' : '🌙';
595
  localStorage.setItem('theme', document.body.classList.contains('dark-mode') ? 'dark' : 'light');
596
  }
597
+
598
  function loadTheme() {
599
  if (localStorage.getItem('theme') === 'dark') {
600
  document.body.classList.add('dark-mode');
601
  document.getElementById('theme-toggle').textContent = '☀️';
602
  }
603
  }
604
+
605
+ function toggleSidebar() {
606
+ const sidebar = document.getElementById('sidebar');
607
+ sidebar.classList.toggle('open');
608
+ }
609
+
610
  async function loadModels() {
611
  const modelSelect = document.getElementById('model-select');
612
  modelSelect.innerHTML = '<option value="">Loading models...</option>';
 
638
  showStatus('Failed to load models: ' + error.message, 'error');
639
  }
640
  }
641
+
642
+ function loadChatHistory() {
643
+ const chatHistoryList = document.getElementById('chat-history');
644
+ chatHistoryList.innerHTML = '';
645
+ conversationHistory.forEach((conv, index) => {
646
+ const li = document.createElement('li');
647
+ li.className = 'chat-history-item';
648
+ li.textContent = `Chat ${index + 1} - ${new Date(conv.timestamp).toLocaleString()}`;
649
+ li.dataset.convId = index;
650
+ li.addEventListener('click', () => loadConversation(index));
651
+ chatHistoryList.appendChild(li);
652
+ });
653
+ if (conversationHistory.length > 0) {
654
+ loadConversation(conversationHistory.length - 1);
655
+ }
656
+ }
657
+
658
+ function loadConversation(convId) {
659
+ currentConversationId = convId;
660
+ const chatContainer = document.getElementById('chat-container');
661
+ chatContainer.innerHTML = '';
662
+ const conversation = conversationHistory[convId];
663
+ conversation.messages.forEach(msg => {
664
+ const messageDiv = addMessage(msg.content, msg.role, false, false);
665
+ if (msg.role === 'assistant') {
666
+ processCodeBlocks(messageDiv, msg.content);
667
+ }
668
+ });
669
+ const historyItems = document.querySelectorAll('.chat-history-item');
670
+ historyItems.forEach(item => item.classList.remove('active'));
671
+ historyItems[convId].classList.add('active');
672
+ }
673
+
674
+ function saveConversation() {
675
+ if (currentConversationId === null) {
676
+ conversationHistory.push({
677
+ timestamp: Date.now(),
678
+ messages: []
679
+ });
680
+ currentConversationId = conversationHistory.length - 1;
681
+ }
682
+ localStorage.setItem('chatHistory', JSON.stringify(conversationHistory));
683
+ loadChatHistory();
684
+ }
685
+
686
  function setupEventListeners() {
687
  document.getElementById('chat-form').addEventListener('submit', handleSubmit);
688
  document.getElementById('temperature').addEventListener('input', function() {
 
690
  });
691
  document.getElementById('message-input').addEventListener('input', autoResizeTextarea);
692
  document.getElementById('theme-toggle').addEventListener('click', toggleTheme);
693
+ document.getElementById('sidebar-toggle').addEventListener('click', toggleSidebar);
694
  loadTheme();
695
  }
696
 
697
  function autoResizeTextarea() {
698
  const textarea = document.getElementById('message-input');
699
  textarea.style.height = 'auto';
700
+ textarea.style.height = Math.min(textarea.scrollHeight, 150) + 'px';
701
  }
702
 
703
  async function handleSubmit(e) {
 
718
  }
719
 
720
  addMessage(message, 'user');
721
+ if (currentConversationId === null) {
722
+ saveConversation();
723
+ }
724
+ conversationHistory[currentConversationId].messages.push({ role: 'user', content: message });
725
+ localStorage.setItem('chatHistory', JSON.stringify(conversationHistory));
726
+
727
  messageInput.value = '';
728
  autoResizeTextarea();
729
  showTypingIndicator(true);
 
770
  }
771
 
772
  processCodeBlocks(currentMessageDiv, accumulatedResponse);
773
+ conversationHistory[currentConversationId].messages.push({ role: 'assistant', content: accumulatedResponse });
774
+ localStorage.setItem('chatHistory', JSON.stringify(conversationHistory));
775
  showStatus(`Response generated using ${model}`, 'success');
776
  } catch (error) {
777
  showTypingIndicator(false);
778
  if (currentMessageDiv) {
779
  updateMessage(currentMessageDiv, 'Sorry, I encountered a network error.');
780
+ conversationHistory[currentConversationId].messages.push({ role: 'assistant', content: 'Sorry, I encountered a network error.' });
781
+ localStorage.setItem('chatHistory', JSON.stringify(conversationHistory));
782
  } else {
783
  addMessage('Sorry, I encountered a network error.', 'assistant');
784
  }
 
786
  }
787
  }
788
 
789
+ function addMessage(content, sender, isStreaming = false, save = true) {
790
  const chatContainer = document.getElementById('chat-container');
791
  const messageDiv = document.createElement('div');
792
  messageDiv.className = `message ${sender}`;
 
804
  chatContainer.appendChild(messageDiv);
805
  chatContainer.scrollTop = chatContainer.scrollHeight;
806
 
807
+ if (!isStreaming && save) {
808
+ if (currentConversationId === null) {
809
+ saveConversation();
810
+ }
811
+ conversationHistory[currentConversationId].messages.push({ role: sender, content: content });
812
+ localStorage.setItem('chatHistory', JSON.stringify(conversationHistory));
813
  }
814
  return messageDiv;
815
  }
 
833
  const code = match[2].trim();
834
  const startIndex = match.index;
835
 
 
836
  if (startIndex > lastIndex) {
837
  fragments.push({ type: 'text', content: content.slice(lastIndex, startIndex) });
838
  }
839
 
 
840
  fragments.push({ type: 'code', language, content: code });
841
  currentCodeBlocks.push({ language, content: code });
842
  lastIndex = codeBlockRegex.lastIndex;
843
  }
844
 
 
845
  if (lastIndex < content.length) {
846
  fragments.push({ type: 'text', content: content.slice(lastIndex) });
847
  }
848
 
 
849
  messageContent.innerHTML = '';
850
  fragments.forEach((fragment, index) => {
851
  if (fragment.type === 'text') {
 
889
  script.textContent = `
890
  import { Sandpack } from '@codesandbox/sandpack-react';
891
  import { createRoot } from 'react-dom';
892
+ const root = createRoot(document.getElementById('sandpack-${currentConversationId}-${index}'));
893
  root.render(
894
  React.createElement(Sandpack, {
895
  template: "${fragment.language === 'javascript' ? 'react' : fragment.language}",
 
907
  `;
908
 
909
  const sandboxDiv = document.createElement('div');
910
+ sandboxDiv.id = `sandpack-${currentConversationId}-${index}`;
911
  codeContainer.appendChild(sandboxDiv);
912
  codeContainer.appendChild(script);
913
  messageContent.appendChild(codeContainer);
914
  }
915
  });
 
 
916
  }
917
 
918
  function showTypingIndicator(show) {
 
949
  try:
950
  data = request.get_json()
951
  if not data or 'prompt' not in data or 'model' not in data:
952
+ logging.warning("Chat request missing 'prompt' or 'model' field")
953
  return jsonify({"status": "error", "message": "Prompt and model are required"}), 400
954
 
955
  prompt = data['prompt']
 
962
 
963
  if stream and isinstance(result, requests.Response):
964
  def generate_stream():
965
+ try:
966
+ for chunk in result.iter_content(chunk_size=None):
967
+ yield chunk
968
+ except Exception as e:
969
+ logging.error(f"Streaming error: {e}")
970
+ yield json.dumps({"status": "error", "message": str(e)}).encode()
971
  return Response(generate_stream(), content_type='application/json')
972
  else:
973
+ logging.info(f"Non-streaming chat response generated with model {model}")
974
  return jsonify(result), 200 if result["status"] == "success" else 500
975
  except Exception as e:
976
  logging.error(f"Chat endpoint error: {e}")
 
981
  """Get available models."""
982
  try:
983
  models = ollama_manager.list_models()
984
+ logging.info(f"Returning models: {models}")
985
  return jsonify({
986
  "status": "success",
987
  "models": models,
 
1007
  "status": "unhealthy",
1008
  "error": str(e),
1009
  "timestamp": time.time()
1010
+ }), 503
1011
 
1012
  if __name__ == '__main__':
1013
  app.run(host='0.0.0.0', port=7860, debug=False)