og12 / app.py
kursathalat's picture
Update app.py
143b5da verified
import gradio as gr
from huggingface_hub import InferenceClient
import os
import json
import importlib
import sys
"""
For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
"""
client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
########
import os
import requests
import base64
GITHUB_API_KEY = os.environ.get('GITHUB_API_KEY')
GITHUB_API_URL_PY = os.environ.get('GITHUB_API_URL_PY')
GITHUB_API_URL_JSON = os.environ.get('GITHUB_API_URL_JSON')
GITHUB_API_URL = os.environ.get('GITHUB_API_URL')
HEADERS = {"Authorization": f"token {GITHUB_API_KEY}"}
def fetch_and_save_files():
"""
Fetches all Python files from a specific folder in a private GitHub repository
and saves them locally.
"""
response = requests.get(GITHUB_API_URL, headers=HEADERS)
if response.status_code != 200:
raise Exception(f"Failed to list files: {response.status_code} - {response.text}")
files = response.json()
py_files = [f for f in files if f['name'].endswith('.py')]
os.makedirs("temp_modules", exist_ok=True) # Create a local folder to save the files
for file in py_files:
file_name = file["name"]
file_url = file["url"] # API URL to fetch content
# Fetch file content
file_response = requests.get(file_url, headers=HEADERS)
if file_response.status_code != 200:
print(f"Failed to fetch {file_name}")
continue
file_content = base64.b64decode(file_response.json()["content"]).decode("utf-8")
# Save file locally
local_path = os.path.join("temp_modules", file_name)
with open(local_path, "w") as f:
f.write(file_content)
print(f"Saved {file_name} to {local_path}")
print("All Python files have been saved locally.")
def load_modules():
"""
Dynamically loads all saved Python files as modules.
"""
temp_folder = "temp_modules"
sys.path.append(temp_folder) # Add folder to Python path
modules = {}
for file_name in os.listdir(temp_folder):
if file_name.endswith(".py"):
module_name = file_name[:-3] # Strip ".py" extension
try:
modules[module_name] = importlib.import_module(module_name)
print(f"Loaded module: {module_name}")
except Exception as e:
print(f"Failed to load module {module_name}: {e}")
return modules
# Step 2: Fetch and save Python files, then load them dynamically
try:
fetch_and_save_files()
modules = load_modules()
print("\nModules loaded successfully. You can now call their methods freely!")
# Example usage
# Assuming you have a class or function in main.py
main_module = modules.get("main20") # Access the 'main.py' module
classes_module = modules.get("second_main") # Access the 'classes.py' module
if main_module and hasattr(main_module, "main_function"):
main_module.main_function() # Call a function from main.py
if classes_module and hasattr(classes_module, "SomeClass"):
obj = classes_module.SomeClass() # Instantiate a class from classes.py
obj.some_method() # Call a method
except Exception as e:
print("Error:", e)
a = classes_module.clio(4)
b = a.print_man()
print(b)
c = main_module.first_func(778)
print(c)
########
def respond(
message,
history: list[tuple[str, str]],
system_message,
max_tokens,
temperature,
top_p,
):
messages = [{"role": "system", "content": system_message}]
for val in history:
if val[0]:
messages.append({"role": "user", "content": val[0]})
if val[1]:
messages.append({"role": "assistant", "content": val[1]})
messages.append({"role": "user", "content": message})
response = ""
for message in client.chat_completion(
messages,
max_tokens=max_tokens,
stream=True,
temperature=temperature,
top_p=top_p,
):
token = message.choices[0].delta.content
response += token
yield response
"""
For information on how to customize the ChatInterface, peruse the gradio docs: https://www.gradio.app/docs/chatinterface
"""
demo = gr.ChatInterface(
respond,
additional_inputs=[
gr.Textbox(value="You are a friendly Chatbot.", label="System message"),
gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
gr.Slider(
minimum=0.1,
maximum=1.0,
value=0.95,
step=0.05,
label="Top-p (nucleus sampling)",
),
],
)
if __name__ == "__main__":
demo.launch()