kernel_authorship/src/evaluation.py

12 lines
382 B
Python

from sklearn.metrics import f1_score, accuracy_score
def evaluation(y_true, y_pred):
acc = accuracy_score(y_true, y_pred)
macrof1 = f1_score(y_true, y_pred, average='macro')
microf1 = f1_score(y_true, y_pred, average='micro')
print(f'acc={acc * 100:.4f}%')
print(f'macro-f1={macrof1:.4f}')
print(f'micro-f1={microf1:.4f}')
return acc, macrof1, microf1