Spaces:
Runtime error
Runtime error
| import gradio as gr | |
| import ctypes | |
| import llama_cpp | |
| from llama_cpp import Llama | |
| from huggingface_hub import hf_hub_download | |
| llm = Llama(model_path= hf_hub_download(repo_id="TheBloke/WizardLM-13B-V1.2-GGML", filename="wizardlm-13b-v1.2.ggmlv3.q5_1.bin")) | |
| def generate_text(input_text): | |
| output = llm(f"Q: {input_text} A:", max_tokens=256, stop=["Q:", "\n"], echo=True) | |
| return output['choices'][0]['text'] | |
| input_text = gr.inputs.Textbox(lines= 10, label="Enter your input text") | |
| output_text = gr.outputs.Textbox(label="Output text") | |
| description = "llama.cpp implementation in python [https://github.com/abetlen/llama-cpp-python]" | |
| examples = [ | |
| ["What is the capital of France? ", "The capital of France is Paris."], | |
| ["Who wrote the novel 'Pride and Prejudice'?", "The novel 'Pride and Prejudice' was written by Jane Austen."], | |
| ["What is the square root of 64?", "The square root of 64 is 8."] | |
| ] | |
| gr.Interface(fn=generate_text, inputs=input_text, outputs=output_text, title="Llama Language Model", description=description, examples=examples).launch() | |