import json import gradio as gr import gspread from oauth2client.service_account import ServiceAccountCredentials from llama_cpp import Llama from llama_index.core import VectorStoreIndex, Settings from llama_index.core.node_parser import SentenceSplitter from llama_index.embeddings.huggingface import HuggingFaceEmbedding from llama_index.llms.llama_cpp import LlamaCPP from huggingface_hub import hf_hub_download from llama_index.core.llms import ChatMessage from llama_index.core.chat_engine.condense_plus_context import CondensePlusContextChatEngine from llama_index.core.schema import Document # =================================== # 1️⃣ Fungsi untuk Membaca Google Spreadsheet dari Beberapa Worksheet # =================================== def read_google_sheets(): try: scope = ["https://www.googleapis.com/auth/spreadsheets", "https://www.googleapis.com/auth/drive"] creds = ServiceAccountCredentials.from_json_keyfile_name("credentials.json", scope) client = gspread.authorize(creds) SPREADSHEET_ID = "1e_cNMhwF-QYpyYUpqQh-XCw-OdhWS6EuYsoBUsVtdNg" sheet_names = ["datatarget", "datacuti", "dataabsen", "datalembur"] all_data = [] spreadsheet = client.open_by_key(SPREADSHEET_ID) for sheet_name in sheet_names: try: sheet = spreadsheet.worksheet(sheet_name) data = sheet.get_all_values() all_data.append(f"=== Data dari {sheet_name.upper()} ===") all_data.extend([" | ".join(row) for row in data]) all_data.append("\n") except gspread.exceptions.WorksheetNotFound: all_data.append(f"❌ ERROR: Worksheet {sheet_name} tidak ditemukan.") return "\n".join(all_data) except Exception as e: return f"❌ ERROR: {str(e)}" # =================================== # 2️⃣ Fungsi untuk Membaca File PKB JSON # =================================== def read_pkb_json(): try: with open("pkb.json", "r", encoding="utf-8") as file: pkb_data = json.load(file) return json.dumps(pkb_data, indent=2, ensure_ascii=False) except Exception as e: return f"❌ ERROR: {str(e)}" # =================================== # 3️⃣ Fungsi untuk Mengunduh Model Llama # =================================== def initialize_llama_model(): model_path = hf_hub_download( repo_id="TheBLoke/zephyr-7b-beta-GGUF", filename="zephyr-7b-beta.Q4_K_M.gguf", cache_dir="./models" ) return model_path # =================================== # 4️⃣ Inisialisasi Model dan Pengaturan # =================================== def initialize_settings(model_path): Settings.llm = LlamaCPP( model_path=model_path, temperature=0.7, ) # =================================== # 5️⃣ Inisialisasi Index dari Data Spreadsheet & PKB JSON # =================================== def initialize_index(): text_data = read_google_sheets() pkb_data = read_pkb_json() combined_text = text_data + "\n\n" + pkb_data document = Document(text=combined_text) documents = [document] parser = SentenceSplitter(chunk_size=150, chunk_overlap=10) nodes = parser.get_nodes_from_documents(documents) embedding = HuggingFaceEmbedding("sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2") Settings.embed_model = embedding index = VectorStoreIndex(nodes) return index # =================================== # 6️⃣ Inisialisasi Mesin Chatbot # =================================== def initialize_chat_engine(index): retriever = index.as_retriever(similarity_top_k=3) chat_engine = CondensePlusContextChatEngine.from_defaults( retriever=retriever, verbose=True, ) return chat_engine # =================================== # 7️⃣ Fungsi untuk Menghasilkan Respons Chatbot # =================================== def generate_response(message, history, chat_engine): if history is None: history = [] response = chat_engine.stream_chat(message) text = "".join(response.response_gen) history.append((message, text)) return history # =================================== # 8️⃣ Fungsi Utama untuk Menjalankan Aplikasi # =================================== def main(): model_path = initialize_llama_model() initialize_settings(model_path) index = initialize_index() chat_engine = initialize_chat_engine(index) def chatbot_response(message, history): return generate_response(message, history, chat_engine) gr.Interface( fn=chatbot_response, inputs=["text"], outputs=["text"], ).launch() if __name__ == "__main__": main()