import os import gradio as gr from openai import OpenAI from typing import List, Tuple # Define available models AVAILABLE_MODELS = { "DeepSeek V3 (Hyperbolic.xyz)": "deepseek-ai/DeepSeek-V3", "DeepSeek V3 (HuggingFace.co)": "deepseek-ai/DeepSeek-V3", "Llama3.3-70b-Instruct": "meta-llama/Llama-3.3-70B-Instruct", "Llama3.1-8b-Instruct": "meta-llama/Meta-Llama-3.1-8B-Instruct", } HYPERB_ENDPOINT_URL = "https://api.hyperbolic.xyz/v1" HF_ENDPOINT_URL = "https://huggingface.co/api/inference-proxy/together" HYPERB_API_KEY = os.getenv('HYPERBOLIC_XYZ_KEY') HF_API_KEY = os.getenv('HF_KEY') PASSWORD = os.getenv("PASSWD") # Store the password in an environment variable hyperb_client = OpenAI(base_url=HYPERB_ENDPOINT_URL, api_key=HYPERB_API_KEY) hf_client = OpenAI(base_url=HF_ENDPOINT_URL, api_key=HF_API_KEY) def respond( message: str, history: List[Tuple[str, str]], system_message: str, model_choice: str, max_tokens: int, temperature: float, top_p: float, ): messages = [{"role": "system", "content": system_message}] for user_msg, assistant_msg in history: if user_msg: messages.append({"role": "user", "content": user_msg}) if assistant_msg: messages.append({"role": "assistant", "content": assistant_msg}) messages.append({"role": "user", "content": message}) response = "" if "(HuggingFace.co)" in model_choice: this_client = hf_client else: this_client = hyperb_client for chunk in this_client.chat.completions.create( model=AVAILABLE_MODELS[model_choice], # Use the selected model messages=messages, max_tokens=max_tokens, temperature=temperature, top_p=top_p, stream=True, ): token = chunk.choices[0].delta.content or "" response += token yield response def check_password(input_password): if input_password == PASSWORD: return gr.update(visible=False), gr.update(visible=True) else: return gr.update(value="", interactive=True), gr.update(visible=False) with gr.Blocks() as demo: with gr.Column(): password_input = gr.Textbox( type="password", label="Enter Password", interactive=True ) submit_button = gr.Button("Submit") error_message = gr.Textbox( label="Error", visible=False, interactive=False ) with gr.Column(visible=False) as chat_interface: chat = gr.ChatInterface( respond, api_name=False, additional_inputs=[ gr.Textbox(value="You are a helpful assistant.", label="System message"), gr.Dropdown( choices=list(AVAILABLE_MODELS.keys()), value=list(AVAILABLE_MODELS.keys())[0], label="Select Model" ), gr.Slider(minimum=1, maximum=30000, value=2048, step=100, label="Max new tokens"), gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"), gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p (nucleus sampling)"), ], ) submit_button.click(check_password, inputs=password_input, outputs=[password_input, chat_interface]) if __name__ == "__main__": demo.launch(share=True)