feat: add Cohen's Kappa metric to classifier evaluation and output
This commit is contained in:
parent
e1f727831f
commit
5a9b8469bd
2 changed files with 5 additions and 2 deletions
1
main.py
1
main.py
|
|
@ -95,6 +95,7 @@ def main():
|
|||
print(f"Training samples: {metrics['train_samples']}")
|
||||
print(f"Validation samples: {metrics['val_samples']}")
|
||||
print(f"Accuracy: {metrics['accuracy']:.2%}")
|
||||
print(f"Cohen's Kappa: {metrics['kappa']:.4f}")
|
||||
print(f"Classes: {metrics['classes']}")
|
||||
print()
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue