From 7365253fc36c35a488ac498e5ddea3b3b8fbf558 Mon Sep 17 00:00:00 2001 From: abhi1nandy2 Date: Fri, 9 Apr 2021 12:38:07 +0530 Subject: [PATCH] Added classification report in `evaluate` function Can see tag-wise precision Precision, Recall, F1-Score and support --- main.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/main.py b/main.py index 3a8449e..dc30cc5 100644 --- a/main.py +++ b/main.py @@ -167,6 +167,20 @@ def evaluate(data, model, name, nbest=None): gold_results += gold_label decode_time = time.time() - start_time speed = len(instances)/decode_time + + gold_ = [] + pred_ = [] + + for it_ in gold_results: + for it_2 in it_: + gold_.append(it_2) + + for it_ in pred_results: + for it_2 in it_: + pred_.append(it_2) + + print(classification_report(gold_, pred_)) + acc, p, r, f = get_ner_fmeasure(gold_results, pred_results, data.tagScheme) if nbest: return speed, acc, p, r, f, nbest_pred_results, pred_scores