{ "nbformat": 4, "nbformat_minor": 0, "metadata": { "colab": { "provenance": [], "gpuType": "T4" }, "kernelspec": { "name": "python3", "display_name": "Python 3" }, "language_info": { "name": "python" }, "accelerator": "GPU" }, "cells": [ { "cell_type": "code", "source": [ "from google.colab import drive\n", "drive.mount('/content/drive')" ], "metadata": { "id": "fZnNN5kHlpVa" }, "execution_count": null, "outputs": [] }, { "cell_type": "code", "source": [ "%cd /content/files" ], "metadata": { "id": "3FNd407Xmmmf" }, "execution_count": null, "outputs": [] }, { "cell_type": "code", "source": [ "!pip install -U bitsandbytes\n" ], "metadata": { "id": "JUhd_t-7pDYY" }, "execution_count": null, "outputs": [] }, { "cell_type": "code", "execution_count": null, "metadata": { "id": "puAjgEBAlYN0" }, "outputs": [], "source": [ "# Install required packages\n", "!pip install torch transformers safetensors accelerate\n", "\n", "import torch\n", "from transformers import AutoModelForCausalLM, AutoTokenizer\n", "import json\n", "\n", "class ChatBot:\n", " def __init__(self, model_path):\n", " # Initialize tokenizer and model\n", " self.tokenizer = AutoTokenizer.from_pretrained(model_path)\n", " self.model = AutoModelForCausalLM.from_pretrained(\n", " model_path,\n", " device_map=\"auto\",\n", " torch_dtype=torch.float16\n", " )\n", "\n", " def chat(self, user_input, max_length=1000):\n", " # Prepare input\n", " inputs = self.tokenizer(user_input, return_tensors=\"pt\").to(self.model.device)\n", "\n", " # Generate response\n", " with torch.no_grad():\n", " outputs = self.model.generate(\n", " **inputs,\n", " max_length=max_length,\n", " num_return_sequences=1,\n", " temperature=0.7,\n", " do_sample=True,\n", " pad_token_id=self.tokenizer.eos_token_id\n", " )\n", "\n", " # Decode and return response\n", " response = self.tokenizer.decode(outputs[0], skip_special_tokens=True)\n", " return response\n", "\n", "# Simple chat interface\n", "def start_chat():\n", " print(\"Initializing chatbot... This may take a few minutes.\")\n", " chatbot = ChatBot(\"/content/files\") # Use the directory containing model files\n", "\n", " print(\"\\nChat initialized! Type 'quit' to exit.\")\n", " while True:\n", " user_input = input(\"\\nYou: \")\n", " if user_input.lower() == 'quit':\n", " break\n", "\n", " response = chatbot.chat(user_input)\n", " print(f\"\\nBot: {response}\")\n", "\n", "# Start the chat\n", "if __name__ == \"__main__\":\n", " start_chat()" ] } ] }