Explicación
Explicación
ipynb - Colab
import pandas as pd
import matplotlib.pyplot as plt
from sklearn.tree import DecisionTreeClassifier, plot_tree
from sklearn.model_selection import train_test_split
from sklearn.metrics import accuracy_score, confusion_matrix, ConfusionMatrixDisplay
from sklearn.decomposition import PCA
data = pd.read_csv('Pokemon.csv')
data = data[['HP', 'Attack', 'Defense', 'Sp. Atk', 'Sp. Def', 'Speed', 'Type 1']]
data = data.dropna()
data['Type 1'] = data['Type 1'].astype('category')
data['Type Code'] = data['Type 1'].cat.codes # Guardar códigos de tipo
def graficar_importancia(importances):
plt.figure()
plt.barh(X.columns, importances, color='skyblue')
plt.xlabel("Importancia")
plt.title("Importancia de Características")
plt.grid(True)
plt.show()
def graficar_precision_vs_profundidad(criterion):
depths = range(1, 11)
accuracies = []
for d in depths:
model = DecisionTreeClassifier(max_depth=d, criterion=criterion)
model.fit(X_train, y_train)
pred = model.predict(X_test)
acc = accuracy_score(y_test, pred)
accuracies.append(acc)
plt.figure()
plt.plot(depths, accuracies, marker='o', linestyle='--', color='orange')
plt.xlabel('Profundidad del Árbol')
plt.ylabel('Precisión')
plt.title('Precisión vs Profundidad del Árbol')
plt.grid(True)
plt.show()
def graficar_pca():
pca = PCA(n_components=2)
X_pca = pca.fit_transform(X)
plt.figure(figsize=(8,6))
scatter = plt.scatter(X_pca[:, 0], X_pca[:, 1], c=y, cmap='tab20', alpha=0.6)
lt l b l("C t P i i l 1")
https://colab.research.google.com/drive/1YuDHSG8QsmdJ8-w_ERtMrL9e-_U70TnP#scrollTo=CwfwYSiAKpUg&printMode=true 1/5
21/4/25, 8:57 p.m. Untitled1.ipynb - Colab
plt.xlabel("Componente Principal 1")
plt.ylabel("Componente Principal 2")
plt.title("Visualización PCA del dataset Pokémon")
plt.colorbar(scatter, label='Tipo')
plt.show()
# Entrenar modelo
model = DecisionTreeClassifier(max_depth=max_depth, criterion=criterion)
model.fit(X_train, y_train)
# Evaluación
predictions = model.predict(X_test)
accuracy = accuracy_score(y_test, predictions)
print(f"\n✅ Precisión del modelo: {accuracy:.2f}")
# Importancia de características
importances = model.feature_importances_
graficar_importancia(importances)
# Árbol de Decisión
if model.tree_.node_count > 1:
plt.figure(figsize=(20, 10))
plot_tree(model, feature_names=X.columns, class_names=type_names.astype(str), filled=True)
plt.title("Árbol de Decisión")
plt.show()
else:
print("⚠️ El árbol es demasiado simple para graficar. Aumenta la profundidad.")
def on_button_click(b):
plt.close('all') # Cierra gráficos anteriores
entrenar_y_graficar(max_depth_slider.value, criterion_dropdown.value)
button.on_click(on_button_click)
https://colab.research.google.com/drive/1YuDHSG8QsmdJ8-w_ERtMrL9e-_U70TnP#scrollTo=CwfwYSiAKpUg&printMode=true 2/5
21/4/25, 8:57 p.m. Untitled1.ipynb - Colab
Max Depth: 5
Criterion: gini
Generar Árbol
https://colab.research.google.com/drive/1YuDHSG8QsmdJ8-w_ERtMrL9e-_U70TnP#scrollTo=CwfwYSiAKpUg&printMode=true 3/5
21/4/25, 8:57 p.m. Untitled1.ipynb - Colab
https://colab.research.google.com/drive/1YuDHSG8QsmdJ8-w_ERtMrL9e-_U70TnP#scrollTo=CwfwYSiAKpUg&printMode=true 4/5
21/4/25, 8:57 p.m. Untitled1.ipynb - Colab
https://colab.research.google.com/drive/1YuDHSG8QsmdJ8-w_ERtMrL9e-_U70TnP#scrollTo=CwfwYSiAKpUg&printMode=true 5/5