Spaces:
Sleeping
Sleeping
File size: 3,748 Bytes
1f999b3 0cb0e01 b549f57 1f999b3 78fe05b 1f999b3 b2701c0 1f999b3 177e34e 1f999b3 30c7eaf 177e34e 30c7eaf 1f999b3 a119f09 0a03f9c 8503e7d f8f5852 8503e7d 0a03f9c b55d16a e4965c2 a119f09 f8f5852 472db8f 1f999b3 553075a e4965c2 7fdce12 a119f09 0a03f9c |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 |
import gradio as gr
import os
os.environ["KERAS_BACKEND"] = "tensorflow"
import keras
import keras_nlp
css = """
html, body {
margin: 0;
padding: 0;
height: 100%;
overflow: hidden;
}
body::before {
content: '';
position: fixed;
top: 0;
left: 0;
width: 100vw;
height: 100vh;
background-image: url('https://github.com/ShebMichel/kagglex_imagebot/blob/main/geoBot_to_github.gif');
background-size: cover;
background-repeat: no-repeat;
opacity: 0.65; /* Faint background image */
background-position: center;
z-index: -1; /* Keep the background behind text */
}
.gradio-container {
display: flex;
justify-content: center;
align-items: center;
height: 100vh; /* Ensure the content is vertically centered */
}
"""
geomodel_llm = keras_nlp.models.CausalLM.from_preset("hf://ShebMichel/geobot_teacher-v0")
def launch(input):
template = "Instruction:\n{instruction}\n\nResponse:\n{response}"
prompt = template.format(
instruction=input,
response="",
)
out = geomodel_llm.generate(prompt, max_length=1024)
ind = out.index('Response') + len('Response')+2
return out[ind:]
# Define the function to handle both text and file input
def analyze_response(text_input, file_input):
# Process text and file inputs as required
# For now, we'll just return a placeholder response
response = f"Received text: {text_input}\n"
if file_input is not None:
response += f"File uploaded: {file_input.name}"
else:
response += "No file uploaded."
return response
# Set up Gradio Interface
# iface = gr.Interface(
# #fn=chatbot,
# inputs=[
# gr.File(label="Upload a file (PDF, JSON, DOCX)"),
# gr.Textbox(label="Your Message"),
# "state" # Keeps chat history between turns
# ],
# outputs="chatbot",
# live=True,
# description="Drag and drop a file and start chatting with the bot based on its contents."
# )
# iface.launch()
#title="π Hola-Hello-Bonjour-Mbote-δ½ ε₯½ π <br><br> I am geobot-teacher the student marker! <br><br> I am here to analyse each question to determine whether the response qualifies as a pass or fail. <br><br> Try me :)",
#description="Synthetic QA pairs (~1k) was finetuned on top of Gemma_2b_en.")
# inputs="text"
# iface = gr.Interface(launch,
# inputs="text",
# outputs="text",
# css=css,
# title="π Hi, I am geobot-teacher: The Student Marker π",
# description="Hola/Hello/Bonjour/Mbote/δ½ ε₯½ \n\n"
# "I am here to analyse each question to determine whether the response qualifies as a pass or fail.\n\n"
# "Try me :)",
# )
iface = gr.Interface(fn=analyze_response,
inputs=[
gr.File(label="Upload a file (PDF, JSON, DOCX)"),gr.Textbox(label="Enter your response"), gr.Radio(
["QCM","short_answer_questions","long_answer_questions"])],
outputs="text",
css=css,
title="π Hi, I am geobot-teacher: The Student Marker π",
description="Hola/Hello/Bonjour/Mbote/δ½ ε₯½ \n\n"
"I am here to analyse each question to determine whether the response qualifies as a pass or fail.\n\n"
"Try me :)",
)
iface.launch(share=True)
|