feat: add Cohen's Kappa metric to classifier evaluation and output

This commit is contained in:
Andrew 2026-03-15 12:33:10 +07:00
parent e1f727831f
commit 5a9b8469bd
2 changed files with 5 additions and 2 deletions

View file

@ -95,6 +95,7 @@ def main():
print(f"Training samples: {metrics['train_samples']}")
print(f"Validation samples: {metrics['val_samples']}")
print(f"Accuracy: {metrics['accuracy']:.2%}")
print(f"Cohen's Kappa: {metrics['kappa']:.4f}")
print(f"Classes: {metrics['classes']}")
print()

View file

@ -6,7 +6,7 @@ import numpy as np
import rasterio
from rasterio.transform import from_bounds
from sklearn.model_selection import train_test_split
from sklearn.metrics import classification_report, accuracy_score, confusion_matrix
from sklearn.metrics import classification_report, accuracy_score, confusion_matrix, cohen_kappa_score
from .data import RasterData, VectorData, load_raster, load_vector, extract_raster_values_by_polygons
from .strategies import ClassificationStrategy
@ -80,11 +80,13 @@ class GISClassifier:
# Evaluate
y_pred = self.strategy.predict(X_val)
accuracy = accuracy_score(y_val, y_pred)
kappa = cohen_kappa_score(y_val, y_pred)
return {
"train_samples": len(X_train),
"val_samples": len(X_val),
"accuracy": accuracy,
"kappa": kappa,
"classes": list(self._classes),
}