tommytracx commited on
Commit
0cd664c
·
verified ·
1 Parent(s): 9ed65ee

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +115 -322
app.py CHANGED
@@ -1,5 +1,5 @@
1
  # app.py
2
- from flask import Flask, request, jsonify, Response
3
  import os
4
  import requests
5
  import json
@@ -11,8 +11,10 @@ import re
11
 
12
  app = Flask(__name__)
13
 
14
- # Configure logging with file output
15
- log_handler = RotatingFileHandler('/home/ollama/openwebui.log', maxBytes=1000000, backupCount=5)
 
 
16
  log_handler.setFormatter(logging.Formatter('%(asctime)s - %(levelname)s - %(message)s'))
17
  logging.getLogger().addHandler(log_handler)
18
  logging.getLogger().setLevel(logging.INFO)
@@ -42,14 +44,8 @@ class OllamaManager:
42
  self.available_models = ALLOWED_MODELS
43
  logging.warning("No allowed models found in API response, using ALLOWED_MODELS")
44
  logging.info(f"Available models: {self.available_models}")
45
- except requests.exceptions.ConnectionError as e:
46
- logging.error(f"Connection error refreshing models: {e}")
47
- self.available_models = ALLOWED_MODELS
48
- except requests.exceptions.HTTPError as e:
49
- logging.error(f"HTTP error refreshing models: {e}")
50
- self.available_models = ALLOWED_MODELS
51
  except Exception as e:
52
- logging.error(f"Unexpected error refreshing models: {e}")
53
  self.available_models = ALLOWED_MODELS
54
 
55
  def list_models(self) -> List[str]:
@@ -59,7 +55,6 @@ class OllamaManager:
59
  def generate(self, model_name: str, prompt: str, stream: bool = False, **kwargs) -> Any:
60
  """Generate text using a model, with optional streaming."""
61
  if model_name not in self.available_models:
62
- logging.warning(f"Attempted to generate with unavailable model: {model_name}")
63
  return {"status": "error", "message": f"Model {model_name} not available"}
64
 
65
  try:
@@ -77,21 +72,14 @@ class OllamaManager:
77
  response = requests.post(f"{self.base_url}/api/generate", json=payload, timeout=120)
78
  response.raise_for_status()
79
  data = response.json()
80
- logging.info(f"Generated response with model {model_name}")
81
  return {
82
  "status": "success",
83
  "response": data.get('response', ''),
84
  "model": model_name,
85
  "usage": data.get('usage', {})
86
  }
87
- except requests.exceptions.ConnectionError as e:
88
- logging.error(f"Connection error generating response: {e}")
89
- return {"status": "error", "message": f"Connection error: {str(e)}"}
90
- except requests.exceptions.HTTPError as e:
91
- logging.error(f"HTTP error generating response: {e}")
92
- return {"status": "error", "message": f"HTTP error: {str(e)}"}
93
  except Exception as e:
94
- logging.error(f"Unexpected error generating response: {e}")
95
  return {"status": "error", "message": str(e)}
96
 
97
  def health_check(self) -> Dict[str, Any]:
@@ -99,22 +87,15 @@ class OllamaManager:
99
  try:
100
  response = requests.get(f"{self.base_url}/api/tags", timeout=10)
101
  response.raise_for_status()
102
- logging.info("Health check successful")
103
  return {"status": "healthy", "available_models": len(self.available_models)}
104
- except requests.exceptions.ConnectionError as e:
105
- logging.error(f"Health check connection error: {e}")
106
- return {"status": "unhealthy", "error": f"Connection error: {str(e)}"}
107
- except requests.exceptions.HTTPError as e:
108
- logging.error(f"Health check HTTP error: {e}")
109
- return {"status": "unhealthy", "error": f"HTTP error: {str(e)}"}
110
  except Exception as e:
111
- logging.error(f"Health check unexpected error: {e}")
112
  return {"status": "unhealthy", "error": str(e)}
113
 
114
  # Initialize Ollama manager
115
  ollama_manager = OllamaManager(OLLAMA_BASE_URL)
116
 
117
- # HTML template for the chat interface with comprehensive, mobile-optimized UI
118
  HTML_TEMPLATE = '''
119
  <!DOCTYPE html>
120
  <html lang="en">
@@ -134,32 +115,28 @@ HTML_TEMPLATE = '''
134
  </script>
135
  <style>
136
  :root {
137
- --primary-color: #5a4bff;
138
- --secondary-color: #7b3fe4;
139
  --text-color: #333;
140
- --bg-color: #f8fafc;
141
  --message-bg-user: var(--primary-color);
142
  --message-bg-assistant: white;
143
  --avatar-user: var(--primary-color);
144
- --avatar-assistant: #2ea44f;
145
- --border-color: #e2e8f0;
146
  --input-bg: white;
147
- --sidebar-bg: #ffffff;
148
- --sidebar-border: #e2e8f0;
149
  }
150
  .dark-mode {
151
  --primary-color: #3b4a8c;
152
  --secondary-color: #4a2e6b;
153
- --text-color: #e2e8f0;
154
- --bg-color: #1a202c;
155
  --message-bg-user: var(--primary-color);
156
- --message-bg-assistant: #2d3748;
157
  --avatar-user: var(--primary-color);
158
- --avatar-assistant: #276749;
159
- --border-color: #4a5568;
160
- --input-bg: #2d3748;
161
- --sidebar-bg: #2d3748;
162
- --sidebar-border: #4a5568;
163
  }
164
  * {
165
  margin: 0;
@@ -171,110 +148,55 @@ HTML_TEMPLATE = '''
171
  background: linear-gradient(135deg, var(--primary-color) 0%, var(--secondary-color) 100%);
172
  color: var(--text-color);
173
  min-height: 100vh;
174
- overflow-x: hidden;
175
  }
176
  .container {
177
- display: flex;
178
- max-width: 100%;
179
- min-height: 100vh;
180
  background: var(--bg-color);
 
 
 
 
181
  }
182
- .sidebar {
183
- width: 250px;
184
- background: var(--sidebar-bg);
185
- border-right: 1px solid var(--sidebar-border);
186
- padding: 20px;
187
- position: fixed;
188
- height: 100%;
189
- transform: translateX(-100%);
190
- transition: transform 0.3s ease;
191
- z-index: 1000;
192
- }
193
- .sidebar.open {
194
- transform: translateX(0);
195
- }
196
- .sidebar-toggle {
197
- position: fixed;
198
  top: 10px;
199
- left: 10px;
200
- background: var(--primary-color);
201
- color: white;
202
  border: none;
203
- padding: 10px;
204
- border-radius: 8px;
205
- cursor: pointer;
206
- z-index: 1100;
207
- }
208
- .sidebar h2 {
209
- font-size: 1.5rem;
210
- margin-bottom: 20px;
211
- }
212
- .chat-history {
213
- list-style: none;
214
- overflow-y: auto;
215
- max-height: calc(100vh - 100px);
216
- }
217
- .chat-history-item {
218
- padding: 10px;
219
- border-radius: 8px;
220
- margin-bottom: 10px;
221
  cursor: pointer;
222
- transition: background 0.2s;
223
- }
224
- .chat-history-item:hover {
225
- background: var(--border-color);
226
- }
227
- .chat-history-item.active {
228
- background: var(--primary-color);
229
  color: white;
230
  }
231
- .main-content {
232
- flex: 1;
233
- display: flex;
234
- flex-direction: column;
235
- min-height: 100vh;
236
- }
237
  .header {
238
  background: linear-gradient(135deg, var(--primary-color) 0%, var(--secondary-color) 100%);
239
  color: white;
240
- padding: 20px;
241
  text-align: center;
242
- position: relative;
243
- }
244
- .theme-toggle {
245
- position: absolute;
246
- top: 20px;
247
- right: 20px;
248
- background: none;
249
- border: none;
250
- cursor: pointer;
251
- font-size: 1.5rem;
252
- color: white;
253
  }
254
  .header h1 {
255
- font-size: 2rem;
256
  margin-bottom: 10px;
257
  font-weight: 700;
258
  }
259
  .header p {
260
- font-size: 1rem;
261
  opacity: 0.9;
262
  }
263
  .controls {
264
- padding: 15px 20px;
265
  background: var(--bg-color);
266
  border-bottom: 1px solid var(--border-color);
267
  display: flex;
268
- gap: 10px;
 
269
  flex-wrap: wrap;
270
- justify-content: center;
271
  }
272
  .control-group {
273
  display: flex;
274
  align-items: center;
275
  gap: 8px;
276
- flex: 1;
277
- min-width: 200px;
278
  }
279
  .control-group label {
280
  font-weight: 600;
@@ -283,8 +205,7 @@ HTML_TEMPLATE = '''
283
  }
284
  .control-group select,
285
  .control-group input {
286
- flex: 1;
287
- padding: 10px;
288
  border: 2px solid var(--border-color);
289
  border-radius: 8px;
290
  font-size: 14px;
@@ -298,7 +219,7 @@ HTML_TEMPLATE = '''
298
  border-color: var(--primary-color);
299
  }
300
  .chat-container {
301
- flex: 1;
302
  overflow-y: auto;
303
  padding: 20px;
304
  background: var(--bg-color);
@@ -333,7 +254,7 @@ HTML_TEMPLATE = '''
333
  background: var(--message-bg-assistant);
334
  padding: 15px 20px;
335
  border-radius: 18px;
336
- max-width: 80%;
337
  box-shadow: 0 2px 10px rgba(0,0,0,0.1);
338
  line-height: 1.5;
339
  color: var(--text-color);
@@ -357,7 +278,6 @@ HTML_TEMPLATE = '''
357
  cursor: pointer;
358
  font-size: 12px;
359
  color: var(--text-color);
360
- transition: background 0.2s;
361
  }
362
  .code-button:hover {
363
  background: rgba(0,0,0,0.2);
@@ -369,25 +289,24 @@ HTML_TEMPLATE = '''
369
  overflow: hidden;
370
  }
371
  .input-container {
372
- padding: 15px 20px;
373
  background: var(--bg-color);
374
  border-top: 1px solid var(--border-color);
375
  }
376
  .input-form {
377
  display: flex;
378
- gap: 10px;
379
- align-items: center;
380
  }
381
  .input-field {
382
  flex: 1;
383
- padding: 12px 15px;
384
  border: 2px solid var(--border-color);
385
  border-radius: 25px;
386
  font-size: 16px;
387
  transition: border-color 0.3s;
388
  resize: none;
389
  min-height: 50px;
390
- max-height: 150px;
391
  background: var(--input-bg);
392
  color: var(--text-color);
393
  }
@@ -396,7 +315,7 @@ HTML_TEMPLATE = '''
396
  border-color: var(--primary-color);
397
  }
398
  .send-button {
399
- padding: 12px 20px;
400
  background: linear-gradient(135deg, var(--primary-color) 0%, var(--secondary-color) 100%);
401
  color: white;
402
  border: none;
@@ -405,7 +324,7 @@ HTML_TEMPLATE = '''
405
  font-weight: 600;
406
  cursor: pointer;
407
  transition: transform 0.2s;
408
- min-width: 80px;
409
  }
410
  .send-button:hover {
411
  transform: translateY(-2px);
@@ -434,173 +353,107 @@ HTML_TEMPLATE = '''
434
  border-radius: 18px;
435
  color: #6c757d;
436
  font-style: italic;
437
- margin: 20px;
438
  }
439
  @keyframes fadeIn {
440
  from { opacity: 0; transform: translateY(10px); }
441
  to { opacity: 1; transform: translateY(0); }
442
  }
443
  @media (max-width: 768px) {
444
- .container {
445
- flex-direction: column;
446
- }
447
- .sidebar {
448
- width: 100%;
449
- height: auto;
450
- max-height: 80vh;
451
- position: fixed;
452
- top: 0;
453
- left: 0;
454
- transform: translateY(-100%);
455
- border-right: none;
456
- border-bottom: 1px solid var(--sidebar-border);
457
- }
458
- .sidebar.open {
459
- transform: translateY(0);
460
- }
461
- .sidebar-toggle {
462
- top: 10px;
463
- left: 10px;
464
- z-index: 1100;
465
- }
466
- .main-content {
467
- margin-top: 60px;
468
- }
469
  .controls {
470
- flex-direction: column;
471
- gap: 15px;
472
- }
473
- .control-group {
474
  flex-direction: column;
475
  align-items: stretch;
476
  }
477
- .control-group select,
478
- .control-group input {
479
- width: 100%;
480
  }
481
  .message-content {
482
- max-width: 90%;
483
- }
484
- .header {
485
- padding: 15px;
486
- }
487
- .header h1 {
488
- font-size: 1.8rem;
489
- }
490
- .header p {
491
- font-size: 0.9rem;
492
- }
493
- .input-container {
494
- padding: 10px 15px;
495
- }
496
- .send-button {
497
- padding: 10px 15px;
498
- min-width: 60px;
499
  }
500
  }
501
  </style>
502
  </head>
503
  <body>
504
- <button class="sidebar-toggle" id="sidebar-toggle">☰</button>
505
  <div class="container">
506
- <div class="sidebar" id="sidebar">
507
- <h2>Chat History</h2>
508
- <ul class="chat-history" id="chat-history">
509
- <!-- Chat history items will be populated here -->
510
- </ul>
511
  </div>
512
- <div class="main-content">
513
- <div class="header">
514
- <button class="theme-toggle" id="theme-toggle">🌙</button>
515
- <h1>🤖 OpenWebUI</h1>
516
- <p>Chat with AI models powered by Ollama on Hugging Face Spaces</p>
517
- </div>
518
-
519
- <div class="controls">
520
- <div class="control-group">
521
- <label for="model-select">Model:</label>
522
- <select id="model-select">
523
- <option value="">Select a model...</option>
524
- </select>
525
- </div>
526
- <div class="control-group">
527
- <label for="temperature">Temperature:</label>
528
- <div style="flex: 1; display: flex; align-items: center; gap: 8px;">
529
- <input type="range" id="temperature" min="0" max="2" step="0.1" value="0.7">
530
- <span id="temp-value">0.7</span>
531
- </div>
532
- </div>
533
- <div class="control-group">
534
- <label for="max-tokens">Max Tokens:</label>
535
- <input type="number" id="max-tokens" min="1" max="4096" value="2048">
536
- </div>
537
  </div>
538
-
539
- <div class="chat-container" id="chat-container">
540
- <div class="message assistant">
541
- <div class="message-avatar">AI</div>
542
- <div class="message-content">
543
- Hello! I'm your AI assistant powered by Ollama. How can I help you today?
544
- </div>
545
- </div>
546
  </div>
547
-
548
- <div class="typing-indicator" id="typing-indicator">
549
- AI is thinking...
550
  </div>
551
-
552
- <div class="input-container">
553
- <form class="input-form" id="chat-form">
554
- <textarea
555
- class="input-field"
556
- id="message-input"
557
- placeholder="Type your message here..."
558
- rows="1"
559
- ></textarea>
560
- <button type="submit" class="send-button" id="send-button">
561
- Send
562
- </button>
563
- </form>
564
  </div>
565
-
566
- <div class="status" id="status"></div>
567
  </div>
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
568
  </div>
569
 
570
  <script type="module">
571
  import { Sandpack } from 'https://esm.sh/@codesandbox/sandpack-react@latest';
572
 
573
- let conversationHistory = JSON.parse(localStorage.getItem('chatHistory')) || [];
574
- let currentConversationId = null;
575
  let currentMessageDiv = null;
576
  let currentCodeBlocks = [];
577
 
578
  document.addEventListener('DOMContentLoaded', function() {
579
  loadModels();
580
- loadChatHistory();
581
  setupEventListeners();
582
  autoResizeTextarea();
583
  });
584
-
585
  function toggleTheme() {
586
  document.body.classList.toggle('dark-mode');
587
  const themeToggle = document.getElementById('theme-toggle');
588
  themeToggle.textContent = document.body.classList.contains('dark-mode') ? '☀️' : '🌙';
589
  localStorage.setItem('theme', document.body.classList.contains('dark-mode') ? 'dark' : 'light');
590
  }
591
-
592
  function loadTheme() {
593
  if (localStorage.getItem('theme') === 'dark') {
594
  document.body.classList.add('dark-mode');
595
  document.getElementById('theme-toggle').textContent = '☀️';
596
  }
597
  }
598
-
599
- function toggleSidebar() {
600
- const sidebar = document.getElementById('sidebar');
601
- sidebar.classList.toggle('open');
602
- }
603
-
604
  async function loadModels() {
605
  const modelSelect = document.getElementById('model-select');
606
  modelSelect.innerHTML = '<option value="">Loading models...</option>';
@@ -632,51 +485,7 @@ HTML_TEMPLATE = '''
632
  showStatus('Failed to load models: ' + error.message, 'error');
633
  }
634
  }
635
-
636
- function loadChatHistory() {
637
- const chatHistoryList = document.getElementById('chat-history');
638
- chatHistoryList.innerHTML = '';
639
- conversationHistory.forEach((conv, index) => {
640
- const li = document.createElement('li');
641
- li.className = 'chat-history-item';
642
- li.textContent = `Chat ${index + 1} - ${new Date(conv.timestamp).toLocaleString()}`;
643
- li.dataset.convId = index;
644
- li.addEventListener('click', () => loadConversation(index));
645
- chatHistoryList.appendChild(li);
646
- });
647
- if (conversationHistory.length > 0) {
648
- loadConversation(conversationHistory.length - 1);
649
- }
650
- }
651
-
652
- function loadConversation(convId) {
653
- currentConversationId = convId;
654
- const chatContainer = document.getElementById('chat-container');
655
- chatContainer.innerHTML = '';
656
- const conversation = conversationHistory[convId];
657
- conversation.messages.forEach(msg => {
658
- const messageDiv = addMessage(msg.content, msg.role, false, false);
659
- if (msg.role === 'assistant') {
660
- processCodeBlocks(messageDiv, msg.content);
661
- }
662
- });
663
- const historyItems = document.querySelectorAll('.chat-history-item');
664
- historyItems.forEach(item => item.classList.remove('active'));
665
- historyItems[convId].classList.add('active');
666
- }
667
-
668
- function saveConversation() {
669
- if (!currentConversationId && currentConversationId !== 0) {
670
- conversationHistory.push({
671
- timestamp: Date.now(),
672
- messages: []
673
- });
674
- currentConversationId = conversationHistory.length - 1;
675
- }
676
- localStorage.setItem('chatHistory', JSON.stringify(conversationHistory));
677
- loadChatHistory();
678
- }
679
-
680
  function setupEventListeners() {
681
  document.getElementById('chat-form').addEventListener('submit', handleSubmit);
682
  document.getElementById('temperature').addEventListener('input', function() {
@@ -684,14 +493,13 @@ HTML_TEMPLATE = '''
684
  });
685
  document.getElementById('message-input').addEventListener('input', autoResizeTextarea);
686
  document.getElementById('theme-toggle').addEventListener('click', toggleTheme);
687
- document.getElementById('sidebar-toggle').addEventListener('click', toggleSidebar);
688
  loadTheme();
689
  }
690
 
691
  function autoResizeTextarea() {
692
  const textarea = document.getElementById('message-input');
693
  textarea.style.height = 'auto';
694
- textarea.style.height = Math.min(textarea.scrollHeight, 150) + 'px';
695
  }
696
 
697
  async function handleSubmit(e) {
@@ -712,12 +520,6 @@ HTML_TEMPLATE = '''
712
  }
713
 
714
  addMessage(message, 'user');
715
- if (currentConversationId === null) {
716
- saveConversation();
717
- }
718
- conversationHistory[currentConversationId].messages.push({ role: 'user', content: message });
719
- localStorage.setItem('chatHistory', JSON.stringify(conversationHistory));
720
-
721
  messageInput.value = '';
722
  autoResizeTextarea();
723
  showTypingIndicator(true);
@@ -764,15 +566,11 @@ HTML_TEMPLATE = '''
764
  }
765
 
766
  processCodeBlocks(currentMessageDiv, accumulatedResponse);
767
- conversationHistory[currentConversationId].messages.push({ role: 'assistant', content: accumulatedResponse });
768
- localStorage.setItem('chatHistory', JSON.stringify(conversationHistory));
769
  showStatus(`Response generated using ${model}`, 'success');
770
  } catch (error) {
771
  showTypingIndicator(false);
772
  if (currentMessageDiv) {
773
  updateMessage(currentMessageDiv, 'Sorry, I encountered a network error.');
774
- conversationHistory[currentConversationId].messages.push({ role: 'assistant', content: 'Sorry, I encountered a network error.' });
775
- localStorage.setItem('chatHistory', JSON.stringify(conversationHistory));
776
  } else {
777
  addMessage('Sorry, I encountered a network error.', 'assistant');
778
  }
@@ -780,7 +578,7 @@ HTML_TEMPLATE = '''
780
  }
781
  }
782
 
783
- function addMessage(content, sender, isStreaming = false, save = true) {
784
  const chatContainer = document.getElementById('chat-container');
785
  const messageDiv = document.createElement('div');
786
  messageDiv.className = `message ${sender}`;
@@ -798,12 +596,8 @@ HTML_TEMPLATE = '''
798
  chatContainer.appendChild(messageDiv);
799
  chatContainer.scrollTop = chatContainer.scrollHeight;
800
 
801
- if (!isStreaming && save) {
802
- if (currentConversationId === null) {
803
- saveConversation();
804
- }
805
- conversationHistory[currentConversationId].messages.push({ role: sender, content: content });
806
- localStorage.setItem('chatHistory', JSON.stringify(conversationHistory));
807
  }
808
  return messageDiv;
809
  }
@@ -827,19 +621,23 @@ HTML_TEMPLATE = '''
827
  const code = match[2].trim();
828
  const startIndex = match.index;
829
 
 
830
  if (startIndex > lastIndex) {
831
  fragments.push({ type: 'text', content: content.slice(lastIndex, startIndex) });
832
  }
833
 
 
834
  fragments.push({ type: 'code', language, content: code });
835
  currentCodeBlocks.push({ language, content: code });
836
  lastIndex = codeBlockRegex.lastIndex;
837
  }
838
 
 
839
  if (lastIndex < content.length) {
840
  fragments.push({ type: 'text', content: content.slice(lastIndex) });
841
  }
842
 
 
843
  messageContent.innerHTML = '';
844
  fragments.forEach((fragment, index) => {
845
  if (fragment.type === 'text') {
@@ -883,7 +681,7 @@ HTML_TEMPLATE = '''
883
  script.textContent = `
884
  import { Sandpack } from '@codesandbox/sandpack-react';
885
  import { createRoot } from 'react-dom';
886
- const root = createRoot(document.getElementById('sandpack-${currentConversationId}-${index}'));
887
  root.render(
888
  React.createElement(Sandpack, {
889
  template: "${fragment.language === 'javascript' ? 'react' : fragment.language}",
@@ -901,12 +699,14 @@ HTML_TEMPLATE = '''
901
  `;
902
 
903
  const sandboxDiv = document.createElement('div');
904
- sandboxDiv.id = `sandpack-${currentConversationId}-${index}`;
905
  codeContainer.appendChild(sandboxDiv);
906
  codeContainer.appendChild(script);
907
  messageContent.appendChild(codeContainer);
908
  }
909
  });
 
 
910
  }
911
 
912
  function showTypingIndicator(show) {
@@ -943,7 +743,6 @@ def chat():
943
  try:
944
  data = request.get_json()
945
  if not data or 'prompt' not in data or 'model' not in data:
946
- logging.warning("Chat request missing 'prompt' or 'model' field")
947
  return jsonify({"status": "error", "message": "Prompt and model are required"}), 400
948
 
949
  prompt = data['prompt']
@@ -956,15 +755,10 @@ def chat():
956
 
957
  if stream and isinstance(result, requests.Response):
958
  def generate_stream():
959
- try:
960
- for chunk in result.iter_content(chunk_size=None):
961
- yield chunk
962
- except Exception as e:
963
- logging.error(f"Streaming error: {e}")
964
- yield json.dumps({"status": "error", "message": str(e)}).encode()
965
  return Response(generate_stream(), content_type='application/json')
966
  else:
967
- logging.info(f"Non-streaming chat response generated with model {model}")
968
  return jsonify(result), 200 if result["status"] == "success" else 500
969
  except Exception as e:
970
  logging.error(f"Chat endpoint error: {e}")
@@ -975,7 +769,6 @@ def get_models():
975
  """Get available models."""
976
  try:
977
  models = ollama_manager.list_models()
978
- logging.info(f"Returning models: {models}")
979
  return jsonify({
980
  "status": "success",
981
  "models": models,
@@ -1001,7 +794,7 @@ def health_check():
1001
  "status": "unhealthy",
1002
  "error": str(e),
1003
  "timestamp": time.time()
1004
- }), 503
1005
 
1006
  if __name__ == '__main__':
1007
  app.run(host='0.0.0.0', port=7860, debug=False)
 
1
  # app.py
2
+ from flask import Flask, request, jsonify, render_template_string, Response
3
  import os
4
  import requests
5
  import json
 
11
 
12
  app = Flask(__name__)
13
 
14
+ # Configure logging with file output in a writable directory
15
+ log_path = '/app/openwebui.log'
16
+ os.makedirs(os.path.dirname(log_path), exist_ok=True)
17
+ log_handler = RotatingFileHandler(log_path, maxBytes=1000000, backupCount=5)
18
  log_handler.setFormatter(logging.Formatter('%(asctime)s - %(levelname)s - %(message)s'))
19
  logging.getLogger().addHandler(log_handler)
20
  logging.getLogger().setLevel(logging.INFO)
 
44
  self.available_models = ALLOWED_MODELS
45
  logging.warning("No allowed models found in API response, using ALLOWED_MODELS")
46
  logging.info(f"Available models: {self.available_models}")
 
 
 
 
 
 
47
  except Exception as e:
48
+ logging.error(f"Error refreshing models: {e}")
49
  self.available_models = ALLOWED_MODELS
50
 
51
  def list_models(self) -> List[str]:
 
55
  def generate(self, model_name: str, prompt: str, stream: bool = False, **kwargs) -> Any:
56
  """Generate text using a model, with optional streaming."""
57
  if model_name not in self.available_models:
 
58
  return {"status": "error", "message": f"Model {model_name} not available"}
59
 
60
  try:
 
72
  response = requests.post(f"{self.base_url}/api/generate", json=payload, timeout=120)
73
  response.raise_for_status()
74
  data = response.json()
 
75
  return {
76
  "status": "success",
77
  "response": data.get('response', ''),
78
  "model": model_name,
79
  "usage": data.get('usage', {})
80
  }
 
 
 
 
 
 
81
  except Exception as e:
82
+ logging.error(f"Error generating response: {e}")
83
  return {"status": "error", "message": str(e)}
84
 
85
  def health_check(self) -> Dict[str, Any]:
 
87
  try:
88
  response = requests.get(f"{self.base_url}/api/tags", timeout=10)
89
  response.raise_for_status()
 
90
  return {"status": "healthy", "available_models": len(self.available_models)}
 
 
 
 
 
 
91
  except Exception as e:
92
+ logging.error(f"Health check failed: {e}")
93
  return {"status": "unhealthy", "error": str(e)}
94
 
95
  # Initialize Ollama manager
96
  ollama_manager = OllamaManager(OLLAMA_BASE_URL)
97
 
98
+ # HTML template for the chat interface with improved UI and Sandpack
99
  HTML_TEMPLATE = '''
100
  <!DOCTYPE html>
101
  <html lang="en">
 
115
  </script>
116
  <style>
117
  :root {
118
+ --primary-color: #667eea;
119
+ --secondary-color: #764ba2;
120
  --text-color: #333;
121
+ --bg-color: #fafbfc;
122
  --message-bg-user: var(--primary-color);
123
  --message-bg-assistant: white;
124
  --avatar-user: var(--primary-color);
125
+ --avatar-assistant: #28a745;
126
+ --border-color: #e9ecef;
127
  --input-bg: white;
 
 
128
  }
129
  .dark-mode {
130
  --primary-color: #3b4a8c;
131
  --secondary-color: #4a2e6b;
132
+ --text-color: #f0f0f0;
133
+ --bg-color: #1a1a1a;
134
  --message-bg-user: var(--primary-color);
135
+ --message-bg-assistant: #2a2a2a;
136
  --avatar-user: var(--primary-color);
137
+ --avatar-assistant: #1a7a3a;
138
+ --border-color: #4a4a4a;
139
+ --input-bg: #3a3a3a;
 
 
140
  }
141
  * {
142
  margin: 0;
 
148
  background: linear-gradient(135deg, var(--primary-color) 0%, var(--secondary-color) 100%);
149
  color: var(--text-color);
150
  min-height: 100vh;
151
+ padding: 20px;
152
  }
153
  .container {
154
+ max-width: 1200px;
155
+ margin: 0 auto;
 
156
  background: var(--bg-color);
157
+ border-radius: 20px;
158
+ box-shadow: 0 20px 40px rgba(0,0,0,0.1);
159
+ overflow: hidden;
160
+ position: relative;
161
  }
162
+ .theme-toggle {
163
+ position: absolute;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
164
  top: 10px;
165
+ right: 10px;
166
+ background: none;
 
167
  border: none;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
168
  cursor: pointer;
169
+ font-size: 1.2rem;
 
 
 
 
 
 
170
  color: white;
171
  }
 
 
 
 
 
 
172
  .header {
173
  background: linear-gradient(135deg, var(--primary-color) 0%, var(--secondary-color) 100%);
174
  color: white;
175
+ padding: 30px;
176
  text-align: center;
 
 
 
 
 
 
 
 
 
 
 
177
  }
178
  .header h1 {
179
+ font-size: 2.5rem;
180
  margin-bottom: 10px;
181
  font-weight: 700;
182
  }
183
  .header p {
184
+ font-size: 1.1rem;
185
  opacity: 0.9;
186
  }
187
  .controls {
188
+ padding: 20px 30px;
189
  background: var(--bg-color);
190
  border-bottom: 1px solid var(--border-color);
191
  display: flex;
192
+ gap: 15px;
193
+ align-items: center;
194
  flex-wrap: wrap;
 
195
  }
196
  .control-group {
197
  display: flex;
198
  align-items: center;
199
  gap: 8px;
 
 
200
  }
201
  .control-group label {
202
  font-weight: 600;
 
205
  }
206
  .control-group select,
207
  .control-group input {
208
+ padding: 8px 12px;
 
209
  border: 2px solid var(--border-color);
210
  border-radius: 8px;
211
  font-size: 14px;
 
219
  border-color: var(--primary-color);
220
  }
221
  .chat-container {
222
+ height: 500px;
223
  overflow-y: auto;
224
  padding: 20px;
225
  background: var(--bg-color);
 
254
  background: var(--message-bg-assistant);
255
  padding: 15px 20px;
256
  border-radius: 18px;
257
+ max-width: 70%;
258
  box-shadow: 0 2px 10px rgba(0,0,0,0.1);
259
  line-height: 1.5;
260
  color: var(--text-color);
 
278
  cursor: pointer;
279
  font-size: 12px;
280
  color: var(--text-color);
 
281
  }
282
  .code-button:hover {
283
  background: rgba(0,0,0,0.2);
 
289
  overflow: hidden;
290
  }
291
  .input-container {
292
+ padding: 20px 30px;
293
  background: var(--bg-color);
294
  border-top: 1px solid var(--border-color);
295
  }
296
  .input-form {
297
  display: flex;
298
+ gap: 15px;
 
299
  }
300
  .input-field {
301
  flex: 1;
302
+ padding: 15px 20px;
303
  border: 2px solid var(--border-color);
304
  border-radius: 25px;
305
  font-size: 16px;
306
  transition: border-color 0.3s;
307
  resize: none;
308
  min-height: 50px;
309
+ max-height: 120px;
310
  background: var(--input-bg);
311
  color: var(--text-color);
312
  }
 
315
  border-color: var(--primary-color);
316
  }
317
  .send-button {
318
+ padding: 15px 30px;
319
  background: linear-gradient(135deg, var(--primary-color) 0%, var(--secondary-color) 100%);
320
  color: white;
321
  border: none;
 
324
  font-weight: 600;
325
  cursor: pointer;
326
  transition: transform 0.2s;
327
+ min-width: 100px;
328
  }
329
  .send-button:hover {
330
  transform: translateY(-2px);
 
353
  border-radius: 18px;
354
  color: #6c757d;
355
  font-style: italic;
 
356
  }
357
  @keyframes fadeIn {
358
  from { opacity: 0; transform: translateY(10px); }
359
  to { opacity: 1; transform: translateY(0); }
360
  }
361
  @media (max-width: 768px) {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
362
  .controls {
 
 
 
 
363
  flex-direction: column;
364
  align-items: stretch;
365
  }
366
+ .control-group {
367
+ justify-content: space-between;
 
368
  }
369
  .message-content {
370
+ max-width: 85%;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
371
  }
372
  }
373
  </style>
374
  </head>
375
  <body>
 
376
  <div class="container">
377
+ <div class="header">
378
+ <h1>🤖 OpenWebUI</h1>
379
+ <p>Chat with your local Ollama models through Hugging Face Spaces</p>
 
 
380
  </div>
381
+
382
+ <div class="controls">
383
+ <div class="control-group">
384
+ <label for="model-select">Model:</label>
385
+ <select id="model-select">
386
+ <option value="">Select a model...</option>
387
+ </select>
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
388
  </div>
389
+ <div class="control-group">
390
+ <label for="temperature">Temperature:</label>
391
+ <input type="range" id="temperature" min="0" max="2" step="0.1" value="0.7">
392
+ <span id="temp-value">0.7</span>
 
 
 
 
393
  </div>
394
+ <div class="control-group">
395
+ <label for="max-tokens">Max Tokens:</label>
396
+ <input type="number" id="max-tokens" min="1" max="4096" value="2048">
397
  </div>
398
+ </div>
399
+
400
+ <div class="chat-container" id="chat-container">
401
+ <div class="message assistant">
402
+ <div class="message-avatar">AI</div>
403
+ <div class="message-content">
404
+ Hello! I'm your AI assistant powered by Ollama. How can I help you today?
405
+ </div>
 
 
 
 
 
406
  </div>
 
 
407
  </div>
408
+
409
+ <div class="typing-indicator" id="typing-indicator">
410
+ AI is thinking...
411
+ </div>
412
+
413
+ <div class="input-container">
414
+ <form class="input-form" id="chat-form">
415
+ <textarea
416
+ class="input-field"
417
+ id="message-input"
418
+ placeholder="Type your message here..."
419
+ rows="1"
420
+ ></textarea>
421
+ <button type="submit" class="send-button" id="send-button">
422
+ Send
423
+ </button>
424
+ </form>
425
+ </div>
426
+
427
+ <div class="status" id="status"></div>
428
  </div>
429
 
430
  <script type="module">
431
  import { Sandpack } from 'https://esm.sh/@codesandbox/sandpack-react@latest';
432
 
433
+ let conversationHistory = [];
 
434
  let currentMessageDiv = null;
435
  let currentCodeBlocks = [];
436
 
437
  document.addEventListener('DOMContentLoaded', function() {
438
  loadModels();
 
439
  setupEventListeners();
440
  autoResizeTextarea();
441
  });
442
+
443
  function toggleTheme() {
444
  document.body.classList.toggle('dark-mode');
445
  const themeToggle = document.getElementById('theme-toggle');
446
  themeToggle.textContent = document.body.classList.contains('dark-mode') ? '☀️' : '🌙';
447
  localStorage.setItem('theme', document.body.classList.contains('dark-mode') ? 'dark' : 'light');
448
  }
449
+
450
  function loadTheme() {
451
  if (localStorage.getItem('theme') === 'dark') {
452
  document.body.classList.add('dark-mode');
453
  document.getElementById('theme-toggle').textContent = '☀️';
454
  }
455
  }
456
+
 
 
 
 
 
457
  async function loadModels() {
458
  const modelSelect = document.getElementById('model-select');
459
  modelSelect.innerHTML = '<option value="">Loading models...</option>';
 
485
  showStatus('Failed to load models: ' + error.message, 'error');
486
  }
487
  }
488
+
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
489
  function setupEventListeners() {
490
  document.getElementById('chat-form').addEventListener('submit', handleSubmit);
491
  document.getElementById('temperature').addEventListener('input', function() {
 
493
  });
494
  document.getElementById('message-input').addEventListener('input', autoResizeTextarea);
495
  document.getElementById('theme-toggle').addEventListener('click', toggleTheme);
 
496
  loadTheme();
497
  }
498
 
499
  function autoResizeTextarea() {
500
  const textarea = document.getElementById('message-input');
501
  textarea.style.height = 'auto';
502
+ textarea.style.height = Math.min(textarea.scrollHeight, 120) + 'px';
503
  }
504
 
505
  async function handleSubmit(e) {
 
520
  }
521
 
522
  addMessage(message, 'user');
 
 
 
 
 
 
523
  messageInput.value = '';
524
  autoResizeTextarea();
525
  showTypingIndicator(true);
 
566
  }
567
 
568
  processCodeBlocks(currentMessageDiv, accumulatedResponse);
 
 
569
  showStatus(`Response generated using ${model}`, 'success');
570
  } catch (error) {
571
  showTypingIndicator(false);
572
  if (currentMessageDiv) {
573
  updateMessage(currentMessageDiv, 'Sorry, I encountered a network error.');
 
 
574
  } else {
575
  addMessage('Sorry, I encountered a network error.', 'assistant');
576
  }
 
578
  }
579
  }
580
 
581
+ function addMessage(content, sender, isStreaming = false) {
582
  const chatContainer = document.getElementById('chat-container');
583
  const messageDiv = document.createElement('div');
584
  messageDiv.className = `message ${sender}`;
 
596
  chatContainer.appendChild(messageDiv);
597
  chatContainer.scrollTop = chatContainer.scrollHeight;
598
 
599
+ if (!isStreaming) {
600
+ conversationHistory.push({ role: sender, content: content });
 
 
 
 
601
  }
602
  return messageDiv;
603
  }
 
621
  const code = match[2].trim();
622
  const startIndex = match.index;
623
 
624
+ // Add text before the code block
625
  if (startIndex > lastIndex) {
626
  fragments.push({ type: 'text', content: content.slice(lastIndex, startIndex) });
627
  }
628
 
629
+ // Add code block
630
  fragments.push({ type: 'code', language, content: code });
631
  currentCodeBlocks.push({ language, content: code });
632
  lastIndex = codeBlockRegex.lastIndex;
633
  }
634
 
635
+ // Add remaining text
636
  if (lastIndex < content.length) {
637
  fragments.push({ type: 'text', content: content.slice(lastIndex) });
638
  }
639
 
640
+ // Clear message content and rebuild with fragments
641
  messageContent.innerHTML = '';
642
  fragments.forEach((fragment, index) => {
643
  if (fragment.type === 'text') {
 
681
  script.textContent = `
682
  import { Sandpack } from '@codesandbox/sandpack-react';
683
  import { createRoot } from 'react-dom';
684
+ const root = createRoot(document.getElementById('sandpack-${index}'));
685
  root.render(
686
  React.createElement(Sandpack, {
687
  template: "${fragment.language === 'javascript' ? 'react' : fragment.language}",
 
699
  `;
700
 
701
  const sandboxDiv = document.createElement('div');
702
+ sandboxDiv.id = `sandpack-${index}`;
703
  codeContainer.appendChild(sandboxDiv);
704
  codeContainer.appendChild(script);
705
  messageContent.appendChild(codeContainer);
706
  }
707
  });
708
+
709
+ conversationHistory.push({ role: 'assistant', content: content });
710
  }
711
 
712
  function showTypingIndicator(show) {
 
743
  try:
744
  data = request.get_json()
745
  if not data or 'prompt' not in data or 'model' not in data:
 
746
  return jsonify({"status": "error", "message": "Prompt and model are required"}), 400
747
 
748
  prompt = data['prompt']
 
755
 
756
  if stream and isinstance(result, requests.Response):
757
  def generate_stream():
758
+ for chunk in result.iter_content(chunk_size=None):
759
+ yield chunk
 
 
 
 
760
  return Response(generate_stream(), content_type='application/json')
761
  else:
 
762
  return jsonify(result), 200 if result["status"] == "success" else 500
763
  except Exception as e:
764
  logging.error(f"Chat endpoint error: {e}")
 
769
  """Get available models."""
770
  try:
771
  models = ollama_manager.list_models()
 
772
  return jsonify({
773
  "status": "success",
774
  "models": models,
 
794
  "status": "unhealthy",
795
  "error": str(e),
796
  "timestamp": time.time()
797
+ }), 500
798
 
799
  if __name__ == '__main__':
800
  app.run(host='0.0.0.0', port=7860, debug=False)