# models.py from transformers import pipeline # Load all models here def load_sentiment_model(): """Loads the sentiment analysis model.""" return pipeline( "sentiment-analysis", model="distilbert-base-uncased-finetuned-sst-2-english", device=-1 # Force CPU ) def load_summarization_model(): """Loads the text summarization model.""" return pipeline( "summarization", model="sshleifer/distilbart-cnn-12-6", device=-1 # Force CPU ) def load_translation_model(): """Loads the English to French translation model.""" return pipeline( "translation_en_to_fr", model="t5-small", device=-1 # Force CPU ) # New model loading functions def load_question_answering_model(): """Loads a question answering model.""" return pipeline( "question-answering", model="distilbert-base-cased-distilled-squad", device=-1 # Force CPU ) def load_text_generation_model(): """Loads a text generation model.""" return pipeline( "text-generation", model="gpt2", device=-1 # Force CPU ) def load_ner_model(): """Loads a named entity recognition model.""" return pipeline( "ner", model="dbmdz/bert-large-cased-finetuned-conll03-english", aggregation_strategy="simple", device=-1 # Force CPU ) def load_text_classification_model(): """Loads a zero-shot classification model.""" return pipeline( "zero-shot-classification", model="facebook/bart-large-mnli", device=-1 # Force CPU ) # Remove text-to-sql model for now to fix the import error # def load_text_to_sql_model(): # """Loads a text-to-SQL model.""" # return pipeline( # "text2text-generation", # model="mrm8488/t5-base-finetuned-wikiSQL", # device=-1 # Force CPU # )