import { pipeline, env } from 'https://cdn.jsdelivr.net/npm/@xenova/transformers@2.17.2';

// Since we will download the model from the Hugging Face Hub, we can skip the local model check
env.allowLocalModels = false;

// Reference the elements that we will need
const status = document.getElementById('status');
const msg = document.getElementById('message');
const checkMessage = document.getElementById('checkMessage');
// Create a new pipeline
status.textContent = 'Loading model...';
const pipe = await pipeline('text-classification', 'bajrangCoder/roberta_spam_onnx_quantised');
status.textContent = 'Ready';

checkMessage.onclick = async () => {
  try{
    status.textContent = 'Analysing...';
    const output = await pipe(msg.value);
    console.log(output)
    status.textContent = JSON.stringify(output);
  } catch(err) {
    console.log("Error: ", err);
  }
}