v4 / modules /semantic /semantic_interface.py
AIdeaText's picture
Update modules/semantic/semantic_interface.py
c7330d5 verified
raw
history blame
6.87 kB
#modules/semantic/semantic_interface.py
# Importaciones necesarias
import streamlit as st
from streamlit_float import *
from streamlit_antd_components import *
from streamlit.components.v1 import html
import io
from io import BytesIO
import base64
import matplotlib.pyplot as plt
import pandas as pd
import re
import logging
# Configuración del logger
logger = logging.getLogger(__name__)
# Importaciones locales
from .semantic_process import (
process_semantic_input,
format_semantic_results
)
from ..utils.widget_utils import generate_unique_key
from ..database.semantic_mongo_db import store_student_semantic_result
from ..database.semantics_export import export_user_interactions
def display_semantic_interface(lang_code, nlp_models, semantic_t):
"""
Interfaz para el análisis semántico
Args:
lang_code: Código del idioma actual
nlp_models: Modelos de spaCy cargados
semantic_t: Diccionario de traducciones semánticas
"""
try:
# Inicializar el estado de la entrada
input_key = f"semantic_input_{lang_code}"
if input_key not in st.session_state:
st.session_state[input_key] = ""
if 'semantic_analysis_counter' not in st.session_state:
st.session_state.semantic_analysis_counter = 0
# Campo de entrada de texto con key única
text_input = st.text_area(
semantic_t.get('text_input_label', 'Enter text to analyze'),
height=150,
placeholder=semantic_t.get('text_input_placeholder', 'Enter your text here...'),
value=st.session_state[input_key],
key=f"semantic_text_area_{st.session_state.semantic_analysis_counter}"
)
# Opción para cargar archivo con key única
uploaded_file = st.file_uploader(
semantic_t.get('file_uploader', 'Or upload a text file'),
type=['txt'],
key=f"semantic_file_uploader_{st.session_state.semantic_analysis_counter}"
)
# Botón de análisis con key única
analyze_button = st.button(
semantic_t.get('analyze_button', 'Analyze text'),
key=f"semantic_analyze_button_{st.session_state.semantic_analysis_counter}"
)
if analyze_button:
if text_input or uploaded_file is not None:
try:
with st.spinner(semantic_t.get('processing', 'Processing...')):
text_content = uploaded_file.getvalue().decode('utf-8') if uploaded_file else text_input
analysis_result = process_semantic_input(
text_content,
lang_code,
nlp_models,
semantic_t
)
if analysis_result['success']:
st.session_state.semantic_result = analysis_result
st.session_state.semantic_analysis_counter += 1
# Guardar en la base de datos antes de mostrar resultados
if store_student_semantic_result(
st.session_state.username,
text_content,
analysis_result['analysis']
):
st.success(semantic_t.get('success_message', 'Analysis saved successfully'))
# Mostrar resultados
display_semantic_results(
analysis_result,
lang_code,
semantic_t
)
else:
st.error(semantic_t.get('error_message', 'Error saving analysis'))
else:
st.error(analysis_result['message'])
except Exception as e:
logger.error(f"Error en análisis semántico: {str(e)}")
st.error(semantic_t.get('error_processing', f'Error processing text: {str(e)}'))
else:
st.warning(semantic_t.get('warning_message', 'Please enter text or upload a file'))
# Mostrar resultados previos
elif 'semantic_result' in st.session_state and st.session_state.semantic_result is not None:
display_semantic_results(
st.session_state.semantic_result,
lang_code,
semantic_t
)
else:
st.info(semantic_t.get('initial_message', 'Enter text to begin analysis'))
except Exception as e:
logger.error(f"Error general en interfaz semántica: {str(e)}")
st.error("Se produjo un error. Por favor, intente de nuevo.")
def display_semantic_results(result, lang_code, semantic_t):
"""
Muestra los resultados del análisis semántico
"""
if result is None or not result['success']:
st.warning(semantic_t.get('no_results', 'No results available'))
return
analysis = result['analysis']
# Mostrar conceptos clave
with st.expander(semantic_t.get('key_concepts', 'Key Concepts'), expanded=True):
concept_text = " | ".join([
f"{concept} ({frequency:.2f})"
for concept, frequency in analysis['key_concepts']
])
st.write(concept_text)
# Mostrar gráfico de relaciones conceptuales
with st.expander(semantic_t.get('conceptual_relations', 'Conceptual Relations'), expanded=True):
st.image(analysis['concept_graph'])
# Mostrar gráfico de entidades
with st.expander(semantic_t.get('entity_relations', 'Entity Relations'), expanded=True):
st.image(analysis['entity_graph'])
# Mostrar entidades identificadas
if 'entities' in analysis:
with st.expander(semantic_t.get('identified_entities', 'Identified Entities'), expanded=True):
for entity_type, entities in analysis['entities'].items():
st.subheader(entity_type)
st.write(", ".join(entities))
# Botón de exportación
if st.button(semantic_t.get('export_button', 'Export Analysis'),
key=f"semantic_export_{st.session_state.semantic_analysis_counter}"):
pdf_buffer = export_user_interactions(st.session_state.username, 'semantic')
st.download_button(
label=semantic_t.get('download_pdf', 'Download PDF'),
data=pdf_buffer,
file_name="semantic_analysis.pdf",
mime="application/pdf",
key=f"semantic_download_{st.session_state.semantic_analysis_counter}"
)