File size: 6,865 Bytes
c7330d5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3f98e79
 
 
 
 
 
 
 
dd52ef3
 
 
 
 
78411bb
dd52ef3
 
 
8aeac38
dd52ef3
 
 
 
 
8aeac38
3f98e79
dd52ef3
8aeac38
dd52ef3
 
 
8aeac38
dd52ef3
 
8aeac38
dd52ef3
 
8aeac38
dd52ef3
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8aeac38
dd52ef3
 
 
 
 
 
8aeac38
 
 
 
 
 
dd52ef3
 
 
 
 
 
 
 
 
8aeac38
 
dd52ef3
 
 
 
 
 
 
 
 
 
 
 
3f98e79
 
 
 
 
 
 
78411bb
 
3f98e79
 
78411bb
8aeac38
3f98e79
 
 
 
78411bb
 
3f98e79
8aeac38
3f98e79
 
 
8aeac38
3f98e79
 
 
 
8aeac38
3f98e79
 
 
 
 
8aeac38
 
3f98e79
 
 
 
 
dd52ef3
8aeac38
3f98e79
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
#modules/semantic/semantic_interface.py
# Importaciones necesarias
import streamlit as st
from streamlit_float import *
from streamlit_antd_components import *
from streamlit.components.v1 import html
import io
from io import BytesIO
import base64
import matplotlib.pyplot as plt
import pandas as pd
import re
import logging

# Configuración del logger
logger = logging.getLogger(__name__)

# Importaciones locales
from .semantic_process import (
    process_semantic_input,
    format_semantic_results
)

from ..utils.widget_utils import generate_unique_key
from ..database.semantic_mongo_db import store_student_semantic_result
from ..database.semantics_export import export_user_interactions


def display_semantic_interface(lang_code, nlp_models, semantic_t):
    """
    Interfaz para el análisis semántico
    Args:
        lang_code: Código del idioma actual
        nlp_models: Modelos de spaCy cargados
        semantic_t: Diccionario de traducciones semánticas
    """
    try:
        # Inicializar el estado de la entrada
        input_key = f"semantic_input_{lang_code}"
        if input_key not in st.session_state:
            st.session_state[input_key] = ""
            
        if 'semantic_analysis_counter' not in st.session_state:
            st.session_state.semantic_analysis_counter = 0

        # Campo de entrada de texto con key única
        text_input = st.text_area(
            semantic_t.get('text_input_label', 'Enter text to analyze'),
            height=150,
            placeholder=semantic_t.get('text_input_placeholder', 'Enter your text here...'),
            value=st.session_state[input_key],
            key=f"semantic_text_area_{st.session_state.semantic_analysis_counter}"
        )

        # Opción para cargar archivo con key única
        uploaded_file = st.file_uploader(
            semantic_t.get('file_uploader', 'Or upload a text file'),
            type=['txt'],
            key=f"semantic_file_uploader_{st.session_state.semantic_analysis_counter}"
        )

        # Botón de análisis con key única
        analyze_button = st.button(
            semantic_t.get('analyze_button', 'Analyze text'),
            key=f"semantic_analyze_button_{st.session_state.semantic_analysis_counter}"
        )

        if analyze_button:
            if text_input or uploaded_file is not None:
                try:
                    with st.spinner(semantic_t.get('processing', 'Processing...')):
                        text_content = uploaded_file.getvalue().decode('utf-8') if uploaded_file else text_input
                        
                        analysis_result = process_semantic_input(
                            text_content, 
                            lang_code,
                            nlp_models,
                            semantic_t
                        )
                        
                        if analysis_result['success']:
                            st.session_state.semantic_result = analysis_result
                            st.session_state.semantic_analysis_counter += 1
                            
                            # Guardar en la base de datos antes de mostrar resultados
                            if store_student_semantic_result(
                                st.session_state.username,
                                text_content,
                                analysis_result['analysis']
                            ):
                                st.success(semantic_t.get('success_message', 'Analysis saved successfully'))
                                # Mostrar resultados
                                display_semantic_results(
                                    analysis_result,
                                    lang_code,
                                    semantic_t
                                )
                            else:
                                st.error(semantic_t.get('error_message', 'Error saving analysis'))
                        else:
                            st.error(analysis_result['message'])
                except Exception as e:
                    logger.error(f"Error en análisis semántico: {str(e)}")
                    st.error(semantic_t.get('error_processing', f'Error processing text: {str(e)}'))
            else:
                st.warning(semantic_t.get('warning_message', 'Please enter text or upload a file'))
        
        # Mostrar resultados previos
        elif 'semantic_result' in st.session_state and st.session_state.semantic_result is not None:
            display_semantic_results(
                st.session_state.semantic_result,
                lang_code,
                semantic_t
            )
        else:
            st.info(semantic_t.get('initial_message', 'Enter text to begin analysis'))

    except Exception as e:
        logger.error(f"Error general en interfaz semántica: {str(e)}")
        st.error("Se produjo un error. Por favor, intente de nuevo.")

def display_semantic_results(result, lang_code, semantic_t):
    """
    Muestra los resultados del análisis semántico
    """
    if result is None or not result['success']:
        st.warning(semantic_t.get('no_results', 'No results available'))
        return

    analysis = result['analysis']
    
    # Mostrar conceptos clave
    with st.expander(semantic_t.get('key_concepts', 'Key Concepts'), expanded=True):
        concept_text = " | ".join([
            f"{concept} ({frequency:.2f})" 
            for concept, frequency in analysis['key_concepts']
        ])
        st.write(concept_text)

    # Mostrar gráfico de relaciones conceptuales
    with st.expander(semantic_t.get('conceptual_relations', 'Conceptual Relations'), expanded=True):
        st.image(analysis['concept_graph'])

    # Mostrar gráfico de entidades
    with st.expander(semantic_t.get('entity_relations', 'Entity Relations'), expanded=True):
        st.image(analysis['entity_graph'])

    # Mostrar entidades identificadas
    if 'entities' in analysis:
        with st.expander(semantic_t.get('identified_entities', 'Identified Entities'), expanded=True):
            for entity_type, entities in analysis['entities'].items():
                st.subheader(entity_type)
                st.write(", ".join(entities))

    # Botón de exportación
    if st.button(semantic_t.get('export_button', 'Export Analysis'), 
                key=f"semantic_export_{st.session_state.semantic_analysis_counter}"):
        pdf_buffer = export_user_interactions(st.session_state.username, 'semantic')
        st.download_button(
            label=semantic_t.get('download_pdf', 'Download PDF'),
            data=pdf_buffer,
            file_name="semantic_analysis.pdf",
            mime="application/pdf",
            key=f"semantic_download_{st.session_state.semantic_analysis_counter}"
        )