Spaces:
Runtime error
Runtime error
# -*- coding: utf-8 -*- | |
"""Copia de Training_fruit_vegetable.ipynb | |
Automatically generated by Colaboratory. | |
Original file is located at | |
https://colab.research.google.com/drive/1h-zNQjkVokq9MDVJb61PPvYb4f1eJcF6 | |
Montaje del disco | |
""" | |
"""Importar librer铆as""" | |
import tensorflow as tf | |
import matplotlib.pyplot as plt | |
"""Preprocesamiento de datos""" | |
#Preprocesamiento de imagenes del conjunto de entrenamiento | |
training_set = tf.keras.utils.image_dataset_from_directory( | |
'/content/drive/MyDrive/TallerIII/FruitTrainingDataset/train', | |
labels="inferred", | |
label_mode="categorical", | |
class_names=None, | |
color_mode="rgb", | |
batch_size=32, | |
image_size=(64, 64), | |
shuffle=True, | |
seed=None, | |
validation_split=None, | |
subset=None, | |
interpolation="bilinear", | |
follow_links=False, | |
crop_to_aspect_ratio=False | |
) | |
#Preprocesamiento de imagenes del conjunto de validacion | |
validation_set = tf.keras.utils.image_dataset_from_directory( | |
'/content/drive/MyDrive/TallerIII/FruitTrainingDataset/validation', | |
labels="inferred", | |
label_mode="categorical", | |
class_names=None, | |
color_mode="rgb", | |
batch_size=32, | |
image_size=(64, 64), | |
shuffle=True, | |
seed=None, | |
validation_split=None, | |
subset=None, | |
interpolation="bilinear", | |
follow_links=False, | |
crop_to_aspect_ratio=False | |
) | |
"""Crear el modelo""" | |
model = tf.keras.models.Sequential() | |
"""Capa de convoluci贸n""" | |
model.add(tf.keras.layers.Conv2D(filters=32,kernel_size=3,padding='same',activation='relu',input_shape=[64,64,3])) | |
model.add(tf.keras.layers.Conv2D(filters=32,kernel_size=3,activation='relu')) | |
model.add(tf.keras.layers.MaxPool2D(pool_size=2,strides=2)) | |
model.add(tf.keras.layers.Dropout(0.25)) | |
model.add(tf.keras.layers.Conv2D(filters=64,kernel_size=3,padding='same',activation='relu')) | |
model.add(tf.keras.layers.Conv2D(filters=64,kernel_size=3,activation='relu')) | |
model.add(tf.keras.layers.MaxPool2D(pool_size=2,strides=2)) | |
model.add(tf.keras.layers.Dropout(0.25)) | |
model.add(tf.keras.layers.Flatten()) | |
model.add(tf.keras.layers.Dense(units=512,activation='relu')) | |
model.add(tf.keras.layers.Dense(units=256,activation='relu')) | |
model.add(tf.keras.layers.Dropout(0.5)) #To avoid overfitting | |
#Output Layer | |
model.add(tf.keras.layers.Dense(units=36,activation='softmax')) | |
"""Capas del modelo""" | |
model.summary() | |
"""Compilaci贸n del modelo""" | |
model.compile(optimizer='adam',loss='categorical_crossentropy',metrics=['accuracy',"mean_absolute_error","Precision","Recall",tf.keras.metrics.AUC()]) | |
"""Configuraci贸n tensorboard""" | |
# Commented out IPython magic to ensure Python compatibility. | |
from tensorflow.keras.callbacks import ModelCheckpoint | |
import tensorflow as tf | |
import datetime | |
# %load_ext tensorboard | |
#!rm -rf ./logs/ | |
log_dir = "logs/fit/" + datetime.datetime.now().strftime("%Y%m%d-%H%M%S") | |
tensorboard_callback = tf.keras.callbacks.TensorBoard(log_dir=log_dir, histogram_freq=1) | |
"""Ruta""" | |
ruta_rfv="/content/drive/MyDrive/TallerIII/Reconocimiento_frutas_verduras" | |
"""Punto de control del modelo y devoluci贸n de llamada""" | |
checkpoint_callback = ModelCheckpoint( | |
filepath=ruta_rfv + '/peso2/weights.{epoch:1d}.h5', | |
save_weights_only=True, | |
save_best_only=False, | |
verbose=1 | |
) | |
"""Entrenamiento del modelo""" | |
epochs = 10 | |
history=model.fit(x=training_set,validation_data=validation_set,epochs=epochs, callbacks=[tensorboard_callback, checkpoint_callback]) | |
import os | |
import re | |
#Codigo para extraer el numero maximo de epoca almacenado en un directorio | |
# Ruta del directorio | |
dir_path = "/content/drive/MyDrive/Taller3/Reconocimiento_frutas_verduras/peso2" | |
# Lista para almacenar los n煤meros extra铆dos | |
num_list = [] | |
# Recorrer todos los archivos en el directorio | |
for filename in os.listdir(dir_path): | |
# Comprobar si el archivo es uno de los archivos de pesos | |
if filename.startswith("weights.") and filename.endswith(".h5"): | |
# Extraer el n煤mero del nombre del archivo usando una expresi贸n regular | |
match = re.search(r'\d+', filename) | |
if match: | |
# Convertir el n煤mero a un entero y a帽adirlo a la lista | |
num = int(match.group()) | |
num_list.append(num) | |
# Imprimir la lista de n煤meros | |
print(num_list) | |
# Variable para almacenar el n煤mero m谩ximo | |
max_num = max(num_list) if num_list else None | |
# Imprimir el n煤mero m谩ximo | |
print(max_num) | |
max_num_string = str(max_num) | |
print(max_num_string) | |
#Definir directorio que contiene el archivo de pesos de la ultima epoca | |
ruta = ruta_rfv + '/peso2/weights.' + max_num_string +'.h5' | |
print(ruta) | |
#Cargar el archivo de pesos de la ultima epoca ejecutada | |
model.load_weights(ruta) | |
#Entrenar el modelo desde la ultima epoca almacenada usando el parametro initial_epoch | |
history = model.fit(x=training_set,validation_data=validation_set, epochs=epochs, initial_epoch=max_num, callbacks=[tensorboard_callback, checkpoint_callback]) | |
"""Curvas de entrenamiento""" | |
# Commented out IPython magic to ensure Python compatibility. | |
# %tensorboard --logdir logs/fit | |
"""Evaluar el modelo entrenado""" | |
#Precisi贸n del conjunto de entrenamiento | |
train_loss, train_acc = model.evaluate(training_set) | |
print('Training accuracy:', train_acc) | |
#Precisi贸n del conjunto de validaci贸n | |
val_loss, val_acc = model.evaluate(validation_set) | |
print('Validation accuracy:', val_acc) | |
"""Guardar el modelo""" | |
ruta_modelo=ruta_rfv + "/modelo/modeloRFV.h5" | |
model.save(ruta_modelo) | |
"""Guardar pesos""" | |
ruta_pesos=ruta_rfv + "/modelo/pesosRFV.h5" | |
model.save_weights(ruta_pesos) |