Spaces:
Running
Running
| #modules/semantic/semantic_interface.py | |
| import streamlit as st | |
| from streamlit_float import * | |
| from streamlit_antd_components import * | |
| from streamlit.components.v1 import html | |
| import io | |
| from io import BytesIO | |
| import base64 | |
| import matplotlib.pyplot as plt | |
| import pandas as pd | |
| import re | |
| from .semantic_process import ( | |
| process_semantic_input, | |
| format_semantic_results | |
| ) | |
| from ..utils.widget_utils import generate_unique_key | |
| from ..database.semantic_mongo_db import store_student_semantic_result | |
| from ..database.semantic_export import export_user_interactions | |
| import logging | |
| logger = logging.getLogger(__name__) | |
| def display_semantic_interface(lang_code, nlp_models, semantic_t): | |
| """ | |
| Interfaz para el an谩lisis sem谩ntico | |
| Args: | |
| lang_code: C贸digo del idioma actual | |
| nlp_models: Modelos de spaCy cargados | |
| semantic_t: Diccionario de traducciones sem谩nticas | |
| """ | |
| # Inicializar el estado de la entrada | |
| input_key = f"semantic_input_{lang_code}" | |
| if input_key not in st.session_state: | |
| st.session_state[input_key] = "" | |
| # Inicializar contador de an谩lisis si no existe | |
| if 'semantic_analysis_counter' not in st.session_state: | |
| st.session_state.semantic_analysis_counter = 0 | |
| # Campo de entrada de texto | |
| text_input = st.text_area( | |
| semantic_t.get('text_input_label', 'Enter text to analyze'), | |
| height=150, | |
| placeholder=semantic_t.get('text_input_placeholder', 'Enter your text here...'), | |
| value=st.session_state[input_key], | |
| key=f"text_area_{lang_code}_{st.session_state.semantic_analysis_counter}" | |
| ) | |
| # Opci贸n para cargar archivo | |
| uploaded_file = st.file_uploader( | |
| semantic_t.get('file_uploader', 'Or upload a text file'), | |
| type=['txt'], | |
| key=f"file_uploader_{lang_code}_{st.session_state.semantic_analysis_counter}" | |
| ) | |
| if st.button( | |
| semantic_t.get('analyze_button', 'Analyze text'), | |
| key=f"analyze_button_{lang_code}_{st.session_state.semantic_analysis_counter}" | |
| ): | |
| if text_input or uploaded_file is not None: | |
| try: | |
| with st.spinner(semantic_t.get('processing', 'Processing...')): | |
| # Obtener el texto a analizar | |
| text_content = uploaded_file.getvalue().decode('utf-8') if uploaded_file else text_input | |
| # Realizar el an谩lisis | |
| analysis_result = process_semantic_input( | |
| text_content, | |
| lang_code, | |
| nlp_models, | |
| semantic_t | |
| ) | |
| # Guardar resultado en el estado de la sesi贸n | |
| st.session_state.semantic_result = analysis_result | |
| st.session_state.semantic_analysis_counter += 1 | |
| # Mostrar resultados | |
| display_semantic_results( | |
| st.session_state.semantic_result, | |
| lang_code, | |
| semantic_t | |
| ) | |
| except Exception as e: | |
| logger.error(f"Error en an谩lisis sem谩ntico: {str(e)}") | |
| st.error(semantic_t.get('error_processing', f'Error processing text: {str(e)}')) | |
| else: | |
| st.warning(semantic_t.get('warning_message', 'Please enter text or upload a file')) | |
| # Si no se presion贸 el bot贸n, verificar si hay resultados previos | |
| elif 'semantic_result' in st.session_state and st.session_state.semantic_result is not None: | |
| display_semantic_results( | |
| st.session_state.semantic_result, | |
| lang_code, | |
| semantic_t | |
| ) | |
| else: | |
| st.info(semantic_t.get('initial_message', 'Enter text to begin analysis')) | |
| def display_semantic_results(result, lang_code, semantic_t): | |
| """ | |
| Muestra los resultados del an谩lisis sem谩ntico | |
| Args: | |
| result: Resultados del an谩lisis | |
| lang_code: C贸digo del idioma | |
| semantic_t: Diccionario de traducciones | |
| """ | |
| if result is None or not result['success']: | |
| st.warning(semantic_t.get('no_results', 'No results available')) | |
| return | |
| analysis = result['analysis'] | |
| # Mostrar conceptos clave | |
| with st.expander(semantic_t.get('key_concepts', 'Key Concepts'), expanded=True): | |
| concept_text = " | ".join([ | |
| f"{concept} ({frequency:.2f})" | |
| for concept, frequency in analysis['key_concepts'] | |
| ]) | |
| st.write(concept_text) | |
| # Mostrar gr谩fico de relaciones conceptuales | |
| with st.expander(semantic_t.get('conceptual_relations', 'Conceptual Relations'), expanded=True): | |
| st.image(analysis['concept_graph']) | |
| # Mostrar gr谩fico de entidades | |
| with st.expander(semantic_t.get('entity_relations', 'Entity Relations'), expanded=True): | |
| st.image(analysis['entity_graph']) | |
| # Mostrar entidades identificadas | |
| if 'entities' in analysis: | |
| with st.expander(semantic_t.get('identified_entities', 'Identified Entities'), expanded=True): | |
| for entity_type, entities in analysis['entities'].items(): | |
| st.subheader(entity_type) | |
| st.write(", ".join(entities)) | |
| # Bot贸n de exportaci贸n | |
| if st.button(semantic_t.get('export_button', 'Export Analysis')): | |
| pdf_buffer = export_user_interactions(st.session_state.username, 'semantic') | |
| st.download_button( | |
| label=semantic_t.get('download_pdf', 'Download PDF'), | |
| data=pdf_buffer, | |
| file_name="semantic_analysis.pdf", | |
| mime="application/pdf" | |
| ) |