Source code for simpleml.metrics.classification

'''
Module for classification metrics
https://en.wikipedia.org/wiki/Confusion_matrix

Includes base class and derived metrics following the nomenclature:
    ConstraintValueMetric
Where:
    Constraint is the lookup criteria (ex FPR in ROC curve)
    Value is desired value (ex TPR in ROC curve)


This module is organized by metric and prediction dependencies:
    1) Base classes with methods and utilities
    2) Aggregate Metrics (single value output)
        2a) Single values computed via Predict method (operating points)
        2b) Single values computed via proba method (agg over curve)
    3) Curve Metrics (constraint: value)
        3a) Threshold: confusion matrix metrics
        3b) confusion matrix metrics: threshold or other metrics
'''

from simpleml.metrics.base_metric import Metric
from simpleml import TRAIN_SPLIT, VALIDATION_SPLIT, TEST_SPLIT
from sklearn.metrics import confusion_matrix, roc_auc_score, accuracy_score, f1_score
import numpy as np
import pandas as pd


__author__ = 'Elisha Yadgaran'


############################### BASE ###############################

[docs]class ClassificationMetric(Metric): ''' TODO: Figure out multiclass generalizations ''' def __init__(self, dataset_split, **kwargs): ''' :param dataset_split: string denoting which dataset split to use can be one of: `TRAIN`, `VALIDATION`, Other. Other gets no prefix Default is train split to stay consistent with no split mapping to Train in Pipeline ''' name = kwargs.pop('name', '') self.dataset_split = dataset_split # Explicitly call out in sample or validation metrics if dataset_split == TRAIN_SPLIT: name = 'in_sample_' + name elif dataset_split == VALIDATION_SPLIT: name = 'validation_' + name super(ClassificationMetric, self).__init__(name=name, **kwargs) @property def labels(self): return self.model.get_labels(dataset_split=self.dataset_split) @property def probabilities(self): return self.model.predict_proba(X=None, dataset_split=self.dataset_split) @property def predictions(self): return self.model.predict(X=None, dataset_split=self.dataset_split)
[docs]class BinaryClassificationMetric(ClassificationMetric): @property def probabilities(self): probabilities = self.model.predict_proba(X=None, dataset_split=self.dataset_split) if len(probabilities.shape) > 1 and probabilities.shape[1] > 1: # Indicates multiple class probabilities are returned (class_0, class_1) probabilities = probabilities[:, 1] return probabilities @property def predictions(self): predictions = self.model.predict(X=None, dataset_split=self.dataset_split) if len(predictions.shape) > 1 and predictions.shape[1] > 1: # Indicates multiple class predictions are returned (class_0, class_1) predictions = predictions[:, 1] return predictions @property def confusion_matrix(self): ''' Property method to return (or generate) dataframe of confusion matrix at each threshold ''' if not hasattr(self, '_confusion_matrix') or self._confusion_matrix is None: self.create_confusion_matrix() return self._confusion_matrix
[docs] def create_confusion_matrix(self): ''' Iterate through each threshold and compute confusion matrix ''' # Thresholds to compute confusion matrix at (default every 0.005 increment) thresholds = np.linspace(0, 1, 201) probabilities = self.probabilities labels = self.labels results = [] for threshold in thresholds: predictions = np.where(probabilities >= threshold, 1, 0) tn, fp, fn, tp = confusion_matrix(labels, predictions).ravel() results.append((threshold, tn, fp, fn, tp)) self._confusion_matrix = pd.DataFrame(results, columns=['threshold', 'tn', 'fp', 'fn', 'tp'])
[docs] @staticmethod def dedupe_curve(keys, values, maximize=True, round_places=3): ''' Method to deduplicate multiple values for the same key on a curve (ex multiple thresholds with the same fpr and different tpr for roc) :param maximize: Boolean, whether to choose the maximum value for each unique key or the minimum ''' # Round arbitrary decimal places to dedupe keys = [round(i, round_places) for i in keys] values = [round(i, round_places) for i in values] df = pd.DataFrame(list(zip(keys, values)), columns=['keys', 'values']) agg = 'max' if maximize else 'min' return df.groupby('keys').agg({'values': agg}).to_dict()['values']
@property def thresholds(self): ''' Convenience property for the probability thresholds ''' return self.confusion_matrix.threshold @property def true_positive_rate(self): ''' Convenience property for the True Positive Rate (TP/TP+FN) ''' return self.confusion_matrix.tp / (self.confusion_matrix.tp + self.confusion_matrix.fn) @property def false_positive_rate(self): ''' Convenience property for the False Positive Rate (FP/FP+TN) ''' return self.confusion_matrix.fp / (self.confusion_matrix.fp + self.confusion_matrix.tn) @property def true_negative_rate(self): ''' Convenience property for the True Negative Rate (TN/FP+TN) ''' return self.confusion_matrix.tn / (self.confusion_matrix.fp + self.confusion_matrix.tn) @property def false_negative_rate(self): ''' Convenience property for the False Negative Rate (FN/TP+FN) ''' return self.confusion_matrix.fn / (self.confusion_matrix.tp + self.confusion_matrix.fn) @property def false_discovery_rate(self): ''' Convenience property for the False Discovery Rate (FP/FP+TP) ''' return self.confusion_matrix.fp / (self.confusion_matrix.fp + self.confusion_matrix.tp) @property def false_omission_rate(self): ''' Convenience property for the False Omission Rate (FN/TN+FN) ''' return self.confusion_matrix.fn / (self.confusion_matrix.tn + self.confusion_matrix.fn) @property def positive_predictive_value(self): ''' Convenience property for the Positive Predictive Value (TP/FP+TP) ''' return self.confusion_matrix.tp / (self.confusion_matrix.fp + self.confusion_matrix.tp) @property def negative_predictive_value(self): ''' Convenience property for the Negative Predictive Value (TN/TN+FN) ''' return self.confusion_matrix.tn / (self.confusion_matrix.tn + self.confusion_matrix.fn) @property def predicted_positive_rate(self): ''' Convenience property for the Predicted Positive Rate (TP+FP/TP+FP+TN+FN) ''' return (self.confusion_matrix.tp + self.confusion_matrix.fp) /\ (self.confusion_matrix.fp + self.confusion_matrix.tn + self.confusion_matrix.tp + self.confusion_matrix.fn) @property def predicted_negative_rate(self): ''' Convenience property for the Predicted Negative Rate (TN+FN/TP+FP+TN+FN) ''' return (self.confusion_matrix.tn + self.confusion_matrix.fn) /\ (self.confusion_matrix.fp + self.confusion_matrix.tn + self.confusion_matrix.tp + self.confusion_matrix.fn) @property def accuracy(self): ''' Convenience property for the Accuracy Rate (TP+TN/TP+FP+TN+FN) ''' return (self.confusion_matrix.tp + self.confusion_matrix.tn) /\ (self.confusion_matrix.fp + self.confusion_matrix.tn + self.confusion_matrix.tp + self.confusion_matrix.fn) @property def f1(self): ''' Convenience property for the F1 Score (2*TP/2*TP+FP+FN) ''' return (2.0 * self.confusion_matrix.tp) / (2.0 * self.confusion_matrix.tp + self.confusion_matrix.fp + self.confusion_matrix.fn) @property def matthews_correlation_coefficient(self): ''' Convenience property for the Matthews Correlation Coefficient (TP*TN-FP*FN/((FP+TP)*(TP+FN)*(TN+FP)*(TN+FN))^0.5) ''' return (self.confusion_matrix.tp * self.confusion_matrix.tn - self.confusion_matrix.fp * self.confusion_matrix.fn) /\ ((self.confusion_matrix.fp + self.confusion_matrix.tp) * (self.confusion_matrix.tp + self.confusion_matrix.fn) *\ (self.confusion_matrix.tn + self.confusion_matrix.fp) * (self.confusion_matrix.tn + self.confusion_matrix.fn))**0.5 @property def informedness(self): ''' Convenience property for the Informedness (TPR+TNR-1) ''' return self.true_positive_rate + self.true_negative_rate - 1 @property def markedness(self): ''' Convenience property for the Markedness (PPV+NPV-1) ''' return self.positive_predictive_value + self.negative_predictive_value - 1
############################### AGGREGATE METRICS ############################### ''' Pointwise metrics using only the predict scoring method (fixed operating point) '''
[docs]class AccuracyMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'classification_accuracy' super(AccuracyMetric, self).__init__(**kwargs)
[docs] def score(self): predictions = self.predictions labels = self.labels accuracy = accuracy_score(y_true=labels, y_pred=predictions) self.values = {'agg': accuracy}
[docs]class TprMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'tpr' super(TprMetric, self).__init__(**kwargs)
[docs] def score(self): predictions = self.predictions labels = self.labels tn, fp, fn, tp = confusion_matrix(labels, predictions).ravel() tpr = float(tp) / (tp + fn) self.values = {'agg': tpr}
[docs]class FprMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'fpr' super(FprMetric, self).__init__(**kwargs)
[docs] def score(self): predictions = self.predictions labels = self.labels tn, fp, fn, tp = confusion_matrix(labels, predictions).ravel() fpr = float(fp) / (fp + tn) self.values = {'agg': fpr}
[docs]class F1ScoreMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'f1_score' super(F1ScoreMetric, self).__init__(**kwargs)
[docs] def score(self): predictions = self.predictions labels = self.labels f1_score_ = f1_score(y_true=labels, y_pred=predictions) self.values = {'agg': f1_score_}
''' Aggregate metrics computed by evaluating over entire curves (Requires proba method) '''
[docs]class RocAucMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'roc_auc' super(RocAucMetric, self).__init__(**kwargs)
[docs] def score(self): probabilities = self.probabilities labels = self.labels auc = roc_auc_score(y_true=labels, y_score=probabilities) self.values = {'agg': auc}
############################### CURVE METRICS ############################### ''' Threshold Constrained Metrics '''
[docs]class ThresholdTprMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'threshold_tpr_curve' super(ThresholdTprMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.thresholds, self.true_positive_rate, maximize=True)
[docs]class ThresholdTnrMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'threshold_tnr_curve' super(ThresholdTnrMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.thresholds, self.true_negative_rate, maximize=True)
[docs]class ThresholdFnrMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'threshold_fnr_curve' super(ThresholdFnrMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.thresholds, self.false_negative_rate, maximize=False)
[docs]class ThresholdFprMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'threshold_fpr_curve' super(ThresholdFprMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.thresholds, self.false_positive_rate, maximize=False)
[docs]class ThresholdFdrMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'threshold_fdr_curve' super(ThresholdFdrMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.thresholds, self.false_discovery_rate, maximize=False)
[docs]class ThresholdForMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'threshold_for_curve' super(ThresholdForMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.thresholds, self.false_omission_rate, maximize=False)
[docs]class ThresholdPpvMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'threshold_ppv_curve' super(ThresholdPpvMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.thresholds, self.positive_predictive_value, maximize=True)
[docs]class ThresholdNpvMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'threshold_npv_curve' super(ThresholdNpvMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.thresholds, self.negative_predictive_value, maximize=True)
[docs]class ThresholdPredictedPositiveRateMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'threshold_predicted_positive_rate_curve' super(ThresholdPredictedPositiveRateMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.thresholds, self.predicted_positive_rate, maximize=True)
[docs]class ThresholdPredictedNegativeRateMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'threshold_predicted_negative_rate_curve' super(ThresholdPredictedNegativeRateMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.thresholds, self.predicted_negative_rate, maximize=True)
[docs]class ThresholdAccuracyMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'threshold_accuracy_curve' super(ThresholdAccuracyMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.thresholds, self.accuracy, maximize=True)
[docs]class ThresholdF1ScoreMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'threshold_f1_score_curve' super(ThresholdF1ScoreMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.thresholds, self.f1, maximize=True)
[docs]class ThresholdMccMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'threshold_mcc_curve' super(ThresholdMccMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.thresholds, self.matthews_correlation_coefficient, maximize=True)
[docs]class ThresholdInformednessMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'threshold_informedness_curve' super(ThresholdInformednessMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.thresholds, self.informedness, maximize=True)
[docs]class ThresholdMarkednessMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'threshold_markedness_curve' super(ThresholdMarkednessMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.thresholds, self.markedness, maximize=True)
''' FPR Constrained Metrics '''
[docs]class FprThresholdMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'fpr_threshold_curve' super(FprThresholdMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.false_positive_rate, self.thresholds, maximize=False)
[docs]class FprTprMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'fpr_tpr_curve' super(FprTprMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.false_positive_rate, self.true_positive_rate, maximize=True)
[docs]class FprTnrMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'fpr_tnr_curve' super(FprTnrMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.false_positive_rate, self.true_negative_rate, maximize=True)
[docs]class FprFnrMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'fpr_fnr_curve' super(FprFnrMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.false_positive_rate, self.false_negative_rate, maximize=False)
[docs]class FprFdrMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'fpr_fdr_curve' super(FprFdrMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.false_positive_rate, self.false_discovery_rate, maximize=False)
[docs]class FprForMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'fpr_for_curve' super(FprForMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.false_positive_rate, self.false_omission_rate, maximize=False)
[docs]class FprPpvMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'fpr_ppv_curve' super(FprPpvMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.false_positive_rate, self.positive_predictive_value, maximize=True)
[docs]class FprNpvMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'fpr_npv_curve' super(FprNpvMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.false_positive_rate, self.negative_predictive_value, maximize=True)
[docs]class FprPredictedPositiveRateMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'fpr_predicted_positive_rate_curve' super(FprPredictedPositiveRateMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.false_positive_rate, self.predicted_positive_rate, maximize=True)
[docs]class FprPredictedNegativeRateMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'fpr_predicted_negative_rate_curve' super(FprPredictedNegativeRateMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.false_positive_rate, self.predicted_negative_rate, maximize=True)
[docs]class FprAccuracyMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'fpr_accuracy_curve' super(FprAccuracyMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.false_positive_rate, self.accuracy, maximize=True)
[docs]class FprF1ScoreMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'fpr_f1_score_curve' super(FprF1ScoreMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.false_positive_rate, self.f1, maximize=True)
[docs]class FprMccMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'fpr_mcc_curve' super(FprMccMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.false_positive_rate, self.matthews_correlation_coefficient, maximize=True)
[docs]class FprInformednessMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'fpr_informedness_curve' super(FprInformednessMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.false_positive_rate, self.informedness, maximize=True)
[docs]class FprMarkednessMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'fpr_markedness_curve' super(FprMarkednessMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.false_positive_rate, self.markedness, maximize=True)
''' TPR Constrained Metrics '''
[docs]class TprThresholdMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'tpr_threshold_curve' super(TprThresholdMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.true_positive_rate, self.thresholds, maximize=False)
[docs]class TprFprMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'tpr_fpr_curve' super(TprFprMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.true_positive_rate, self.false_positive_rate, maximize=True)
[docs]class TprTnrMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'tpr_tnr_curve' super(TprTnrMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.true_positive_rate, self.true_negative_rate, maximize=True)
[docs]class TprFnrMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'tpr_fnr_curve' super(TprFnrMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.true_positive_rate, self.false_negative_rate, maximize=False)
[docs]class TprFdrMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'tpr_fdr_curve' super(TprFdrMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.true_positive_rate, self.false_discovery_rate, maximize=False)
[docs]class TprForMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'tpr_for_curve' super(TprForMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.true_positive_rate, self.false_omission_rate, maximize=False)
[docs]class TprPpvMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'tpr_ppv_curve' super(TprPpvMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.true_positive_rate, self.positive_predictive_value, maximize=True)
[docs]class TprNpvMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'tpr_npv_curve' super(TprNpvMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.true_positive_rate, self.negative_predictive_value, maximize=True)
[docs]class TprPredictedPositiveRateMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'tpr_predicted_positive_rate_curve' super(TprPredictedPositiveRateMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.true_positive_rate, self.predicted_positive_rate, maximize=True)
[docs]class TprPredictedNegativeRateMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'tpr_predicted_negative_rate_curve' super(TprPredictedNegativeRateMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.true_positive_rate, self.predicted_negative_rate, maximize=True)
[docs]class TprAccuracyMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'tpr_accuracy_curve' super(TprAccuracyMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.true_positive_rate, self.accuracy, maximize=True)
[docs]class TprF1ScoreMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'tpr_f1_score_curve' super(TprF1ScoreMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.true_positive_rate, self.f1, maximize=True)
[docs]class TprMccMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'tpr_mcc_curve' super(TprMccMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.true_positive_rate, self.matthews_correlation_coefficient, maximize=True)
[docs]class TprInformednessMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'tpr_informedness_curve' super(TprInformednessMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.true_positive_rate, self.informedness, maximize=True)
[docs]class TprMarkednessMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'tpr_markedness_curve' super(TprMarkednessMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.true_positive_rate, self.markedness, maximize=True)
''' TNR Constrained Metrics '''
[docs]class TnrThresholdMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'tnr_threshold_curve' super(TnrThresholdMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.true_negative_rate, self.thresholds, maximize=False)
[docs]class TnrFprMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'tnr_fpr_curve' super(TnrFprMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.true_negative_rate, self.false_positive_rate, maximize=True)
[docs]class TnrTprMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'tnr_tpr_curve' super(TnrTprMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.true_negative_rate, self.true_positive_rate, maximize=True)
[docs]class TnrFnrMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'tnr_fnr_curve' super(TnrFnrMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.true_negative_rate, self.false_negative_rate, maximize=False)
[docs]class TnrFdrMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'tnr_fdr_curve' super(TnrFdrMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.true_negative_rate, self.false_discovery_rate, maximize=False)
[docs]class TnrForMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'tnr_for_curve' super(TnrForMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.true_negative_rate, self.false_omission_rate, maximize=False)
[docs]class TnrPpvMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'tnr_ppv_curve' super(TnrPpvMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.true_negative_rate, self.positive_predictive_value, maximize=True)
[docs]class TnrNpvMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'tnr_npv_curve' super(TnrNpvMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.true_negative_rate, self.negative_predictive_value, maximize=True)
[docs]class TnrPredictedPositiveRateMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'tnr_predicted_positive_rate_curve' super(TnrPredictedPositiveRateMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.true_negative_rate, self.predicted_positive_rate, maximize=True)
[docs]class TnrPredictedNegativeRateMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'tnr_predicted_negative_rate_curve' super(TnrPredictedNegativeRateMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.true_negative_rate, self.predicted_negative_rate, maximize=True)
[docs]class TnrAccuracyMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'tnr_accuracy_curve' super(TnrAccuracyMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.true_negative_rate, self.accuracy, maximize=True)
[docs]class TnrF1ScoreMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'tnr_f1_score_curve' super(TnrF1ScoreMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.true_negative_rate, self.f1, maximize=True)
[docs]class TnrMccMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'tnr_mcc_curve' super(TnrMccMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.true_negative_rate, self.matthews_correlation_coefficient, maximize=True)
[docs]class TnrInformednessMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'tnr_informedness_curve' super(TnrInformednessMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.true_negative_rate, self.informedness, maximize=True)
[docs]class TnrMarkednessMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'tnr_markedness_curve' super(TnrMarkednessMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.true_negative_rate, self.markedness, maximize=True)
''' FNR Constrained Metrics '''
[docs]class FnrThresholdMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'fnr_threshold_curve' super(FnrThresholdMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.false_negative_rate, self.thresholds, maximize=False)
[docs]class FnrFprMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'fnr_fpr_curve' super(FnrFprMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.false_negative_rate, self.false_positive_rate, maximize=True)
[docs]class FnrTprMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'fnr_tpr_curve' super(FnrTprMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.false_negative_rate, self.true_positive_rate, maximize=True)
[docs]class FnrTnrMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'fnr_tnr_curve' super(FnrTnrMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.false_negative_rate, self.true_negative_rate, maximize=True)
[docs]class FnrFdrMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'fnr_fdr_curve' super(FnrFdrMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.false_negative_rate, self.false_discovery_rate, maximize=False)
[docs]class FnrForMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'fnr_for_curve' super(FnrForMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.false_negative_rate, self.false_omission_rate, maximize=False)
[docs]class FnrPpvMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'fnr_ppv_curve' super(FnrPpvMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.false_negative_rate, self.positive_predictive_value, maximize=True)
[docs]class FnrNpvMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'fnr_npv_curve' super(FnrNpvMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.false_negative_rate, self.negative_predictive_value, maximize=True)
[docs]class FnrPredictedPositiveRateMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'fnr_predicted_positive_rate_curve' super(FnrPredictedPositiveRateMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.false_negative_rate, self.predicted_positive_rate, maximize=True)
[docs]class FnrPredictedNegativeRateMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'fnr_predicted_negative_rate_curve' super(FnrPredictedNegativeRateMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.false_negative_rate, self.predicted_negative_rate, maximize=True)
[docs]class FnrAccuracyMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'fnr_accuracy_curve' super(FnrAccuracyMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.false_negative_rate, self.accuracy, maximize=True)
[docs]class FnrF1ScoreMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'fnr_f1_score_curve' super(FnrF1ScoreMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.false_negative_rate, self.f1, maximize=True)
[docs]class FnrMccMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'fnr_mcc_curve' super(FnrMccMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.false_negative_rate, self.matthews_correlation_coefficient, maximize=True)
[docs]class FnrInformednessMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'fnr_informedness_curve' super(FnrInformednessMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.false_negative_rate, self.informedness, maximize=True)
[docs]class FnrMarkednessMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'fnr_markedness_curve' super(FnrMarkednessMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.false_negative_rate, self.markedness, maximize=True)
''' FDR Constrained Metrics '''
[docs]class FdrThresholdMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'fdr_threshold_curve' super(FdrThresholdMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.false_discovery_rate, self.thresholds, maximize=False)
[docs]class FdrFprMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'fdr_fpr_curve' super(FdrFprMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.false_discovery_rate, self.false_positive_rate, maximize=True)
[docs]class FdrTprMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'fdr_tpr_curve' super(FdrTprMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.false_discovery_rate, self.true_positive_rate, maximize=True)
[docs]class FdrTnrMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'fdr_tnr_curve' super(FdrTnrMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.false_discovery_rate, self.true_negative_rate, maximize=True)
[docs]class FdrFnrMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'fdr_fnr_curve' super(FdrFnrMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.false_discovery_rate, self.false_negative_rate, maximize=False)
[docs]class FdrForMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'fdr_for_curve' super(FdrForMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.false_discovery_rate, self.false_omission_rate, maximize=False)
[docs]class FdrPpvMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'fdr_ppv_curve' super(FdrPpvMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.false_discovery_rate, self.positive_predictive_value, maximize=True)
[docs]class FdrNpvMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'fdr_npv_curve' super(FdrNpvMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.false_discovery_rate, self.negative_predictive_value, maximize=True)
[docs]class FdrPredictedPositiveRateMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'fdr_predicted_positive_rate_curve' super(FdrPredictedPositiveRateMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.false_discovery_rate, self.predicted_positive_rate, maximize=True)
[docs]class FdrPredictedNegativeRateMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'fdr_predicted_negative_rate_curve' super(FdrPredictedNegativeRateMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.false_discovery_rate, self.predicted_negative_rate, maximize=True)
[docs]class FdrAccuracyMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'fdr_accuracy_curve' super(FdrAccuracyMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.false_discovery_rate, self.accuracy, maximize=True)
[docs]class FdrF1ScoreMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'fdr_f1_score_curve' super(FdrF1ScoreMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.false_discovery_rate, self.f1, maximize=True)
[docs]class FdrMccMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'fdr_mcc_curve' super(FdrMccMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.false_discovery_rate, self.matthews_correlation_coefficient, maximize=True)
[docs]class FdrInformednessMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'fdr_informedness_curve' super(FdrInformednessMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.false_discovery_rate, self.informedness, maximize=True)
[docs]class FdrMarkednessMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'fdr_markedness_curve' super(FdrMarkednessMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.false_discovery_rate, self.markedness, maximize=True)
''' FOR Constrained Metrics '''
[docs]class ForThresholdMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'for_threshold_curve' super(ForThresholdMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.false_omission_rate, self.thresholds, maximize=False)
[docs]class ForFprMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'for_fpr_curve' super(ForFprMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.false_omission_rate, self.false_positive_rate, maximize=True)
[docs]class ForTprMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'for_tpr_curve' super(ForTprMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.false_omission_rate, self.true_positive_rate, maximize=True)
[docs]class ForTnrMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'for_tnr_curve' super(ForTnrMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.false_omission_rate, self.true_negative_rate, maximize=True)
[docs]class ForFnrMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'for_fnr_curve' super(ForFnrMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.false_omission_rate, self.false_negative_rate, maximize=False)
[docs]class ForFdrMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'for_fdr_curve' super(ForFdrMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.false_omission_rate, self.false_discovery_rate, maximize=False)
[docs]class ForPpvMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'for_ppv_curve' super(ForPpvMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.false_omission_rate, self.positive_predictive_value, maximize=True)
[docs]class ForNpvMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'for_npv_curve' super(ForNpvMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.false_omission_rate, self.negative_predictive_value, maximize=True)
[docs]class ForPredictedPositiveRateMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'for_predicted_positive_rate_curve' super(ForPredictedPositiveRateMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.false_omission_rate, self.predicted_positive_rate, maximize=True)
[docs]class ForPredictedNegativeRateMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'for_predicted_negative_rate_curve' super(ForPredictedNegativeRateMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.false_omission_rate, self.predicted_negative_rate, maximize=True)
[docs]class ForAccuracyMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'for_accuracy_curve' super(ForAccuracyMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.false_omission_rate, self.accuracy, maximize=True)
[docs]class ForF1ScoreMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'for_f1_score_curve' super(ForF1ScoreMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.false_omission_rate, self.f1, maximize=True)
[docs]class ForMccMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'for_mcc_curve' super(ForMccMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.false_omission_rate, self.matthews_correlation_coefficient, maximize=True)
[docs]class ForInformednessMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'for_informedness_curve' super(ForInformednessMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.false_omission_rate, self.informedness, maximize=True)
[docs]class ForMarkednessMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'for_markedness_curve' super(ForMarkednessMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.false_omission_rate, self.markedness, maximize=True)
''' PPV Constrained Metrics '''
[docs]class PpvThresholdMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'ppv_threshold_curve' super(PpvThresholdMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.positive_predictive_value, self.thresholds, maximize=False)
[docs]class PpvFprMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'ppv_fpr_curve' super(PpvFprMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.positive_predictive_value, self.false_positive_rate, maximize=True)
[docs]class PpvTprMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'ppv_tpr_curve' super(PpvTprMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.positive_predictive_value, self.true_positive_rate, maximize=True)
[docs]class PpvTnrMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'ppv_tnr_curve' super(PpvTnrMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.positive_predictive_value, self.true_negative_rate, maximize=True)
[docs]class PpvFnrMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'ppv_fnr_curve' super(PpvFnrMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.positive_predictive_value, self.false_negative_rate, maximize=False)
[docs]class PpvFdrMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'ppv_fdr_curve' super(PpvFdrMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.positive_predictive_value, self.false_discovery_rate, maximize=False)
[docs]class PpvForMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'ppv_for_curve' super(PpvForMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.positive_predictive_value, self.false_omission_rate, maximize=False)
[docs]class PpvNpvMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'ppv_npv_curve' super(PpvNpvMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.positive_predictive_value, self.negative_predictive_value, maximize=True)
[docs]class PpvPredictedPositiveRateMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'ppv_predicted_positive_rate_curve' super(PpvPredictedPositiveRateMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.positive_predictive_value, self.predicted_positive_rate, maximize=True)
[docs]class PpvPredictedNegativeRateMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'ppv_predicted_negative_rate_curve' super(PpvPredictedNegativeRateMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.positive_predictive_value, self.predicted_negative_rate, maximize=True)
[docs]class PpvAccuracyMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'ppv_accuracy_curve' super(PpvAccuracyMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.positive_predictive_value, self.accuracy, maximize=True)
[docs]class PpvF1ScoreMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'ppv_f1_score_curve' super(PpvF1ScoreMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.positive_predictive_value, self.f1, maximize=True)
[docs]class PpvMccMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'ppv_mcc_curve' super(PpvMccMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.positive_predictive_value, self.matthews_correlation_coefficient, maximize=True)
[docs]class PpvInformednessMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'ppv_informedness_curve' super(PpvInformednessMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.positive_predictive_value, self.informedness, maximize=True)
[docs]class PpvMarkednessMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'ppv_markedness_curve' super(PpvMarkednessMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.positive_predictive_value, self.markedness, maximize=True)
''' NPV Constrained Metrics '''
[docs]class NpvThresholdMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'npv_threshold_curve' super(NpvThresholdMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.negative_predictive_value, self.thresholds, maximize=False)
[docs]class NpvFprMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'npv_fpr_curve' super(NpvFprMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.negative_predictive_value, self.false_positive_rate, maximize=True)
[docs]class NpvTprMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'npv_tpr_curve' super(NpvTprMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.negative_predictive_value, self.true_positive_rate, maximize=True)
[docs]class NpvTnrMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'npv_tnr_curve' super(NpvTnrMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.negative_predictive_value, self.true_negative_rate, maximize=True)
[docs]class NpvFnrMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'npv_fnr_curve' super(NpvFnrMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.negative_predictive_value, self.false_negative_rate, maximize=False)
[docs]class NpvFdrMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'npv_fdr_curve' super(NpvFdrMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.negative_predictive_value, self.false_discovery_rate, maximize=False)
[docs]class NpvForMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'npv_for_curve' super(NpvForMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.negative_predictive_value, self.false_omission_rate, maximize=False)
[docs]class NpvPpvMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'npv_ppv_curve' super(NpvPpvMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.negative_predictive_value, self.positive_predictive_value, maximize=True)
[docs]class NpvPredictedPositiveRateMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'npv_predicted_positive_rate_curve' super(NpvPredictedPositiveRateMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.negative_predictive_value, self.predicted_positive_rate, maximize=True)
[docs]class NpvPredictedNegativeRateMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'npv_predicted_negative_rate_curve' super(NpvPredictedNegativeRateMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.negative_predictive_value, self.predicted_negative_rate, maximize=True)
[docs]class NpvAccuracyMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'npv_accuracy_curve' super(NpvAccuracyMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.negative_predictive_value, self.accuracy, maximize=True)
[docs]class NpvF1ScoreMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'npv_f1_score_curve' super(NpvF1ScoreMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.negative_predictive_value, self.f1, maximize=True)
[docs]class NpvMccMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'npv_mcc_curve' super(NpvMccMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.negative_predictive_value, self.matthews_correlation_coefficient, maximize=True)
[docs]class NpvInformednessMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'npv_informedness_curve' super(NpvInformednessMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.negative_predictive_value, self.informedness, maximize=True)
[docs]class NpvMarkednessMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'npv_markedness_curve' super(NpvMarkednessMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.negative_predictive_value, self.markedness, maximize=True)
''' Predicted Positive Rate Constrained Metrics '''
[docs]class PredictedPositiveRateThresholdMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'predicted_positive_rate_threshold_curve' super(PredictedPositiveRateThresholdMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.predicted_positive_rate, self.thresholds, maximize=False)
[docs]class PredictedPositiveRateFprMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'predicted_positive_rate_fpr_curve' super(PredictedPositiveRateFprMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.predicted_positive_rate, self.false_positive_rate, maximize=True)
[docs]class PredictedPositiveRateTprMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'predicted_positive_rate_tpr_curve' super(PredictedPositiveRateTprMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.predicted_positive_rate, self.true_positive_rate, maximize=True)
[docs]class PredictedPositiveRateTnrMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'predicted_positive_rate_tnr_curve' super(PredictedPositiveRateTnrMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.predicted_positive_rate, self.true_negative_rate, maximize=True)
[docs]class PredictedPositiveRateFnrMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'predicted_positive_rate_fnr_curve' super(PredictedPositiveRateFnrMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.predicted_positive_rate, self.false_negative_rate, maximize=False)
[docs]class PredictedPositiveRateFdrMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'predicted_positive_rate_fdr_curve' super(PredictedPositiveRateFdrMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.predicted_positive_rate, self.false_discovery_rate, maximize=False)
[docs]class PredictedPositiveRateForMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'predicted_positive_rate_for_curve' super(PredictedPositiveRateForMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.predicted_positive_rate, self.false_omission_rate, maximize=False)
[docs]class PredictedPositiveRatePpvMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'predicted_positive_rate_ppv_curve' super(PredictedPositiveRatePpvMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.predicted_positive_rate, self.positive_predictive_value, maximize=True)
[docs]class PredictedPositiveRateNpvMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'predicted_positive_rate_npv_curve' super(PredictedPositiveRateNpvMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.predicted_positive_rate, self.negative_predictive_value, maximize=True)
[docs]class PredictedPositiveRatePredictedNegativeRateMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'predicted_positive_rate_predicted_negative_rate_curve' super(PredictedPositiveRatePredictedNegativeRateMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.predicted_positive_rate, self.predicted_negative_rate, maximize=True)
[docs]class PredictedPositiveRateAccuracyMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'predicted_positive_rate_accuracy_curve' super(PredictedPositiveRateAccuracyMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.predicted_positive_rate, self.accuracy, maximize=True)
[docs]class PredictedPositiveRateF1ScoreMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'predicted_positive_rate_f1_score_curve' super(PredictedPositiveRateF1ScoreMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.predicted_positive_rate, self.f1, maximize=True)
[docs]class PredictedPositiveRateMccMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'predicted_positive_rate_mcc_curve' super(PredictedPositiveRateMccMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.predicted_positive_rate, self.matthews_correlation_coefficient, maximize=True)
[docs]class PredictedPositiveRateInformednessMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'predicted_positive_rate_informedness_curve' super(PredictedPositiveRateInformednessMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.predicted_positive_rate, self.informedness, maximize=True)
[docs]class PredictedPositiveRateMarkednessMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'predicted_positive_rate_markedness_curve' super(PredictedPositiveRateMarkednessMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.predicted_positive_rate, self.markedness, maximize=True)
''' Predicted Negative Rate Constrained Metrics '''
[docs]class PredictedNegativeRateThresholdMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'predicted_negative_rate_threshold_curve' super(PredictedNegativeRateThresholdMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.predicted_negative_rate, self.thresholds, maximize=False)
[docs]class PredictedNegativeRateFprMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'predicted_negative_rate_fpr_curve' super(PredictedNegativeRateFprMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.predicted_negative_rate, self.false_positive_rate, maximize=True)
[docs]class PredictedNegativeRateTprMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'predicted_negative_rate_tpr_curve' super(PredictedNegativeRateTprMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.predicted_negative_rate, self.true_positive_rate, maximize=True)
[docs]class PredictedNegativeRateTnrMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'predicted_negative_rate_tnr_curve' super(PredictedNegativeRateTnrMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.predicted_negative_rate, self.true_negative_rate, maximize=True)
[docs]class PredictedNegativeRateFnrMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'predicted_negative_rate_fnr_curve' super(PredictedNegativeRateFnrMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.predicted_negative_rate, self.false_negative_rate, maximize=False)
[docs]class PredictedNegativeRateFdrMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'predicted_negative_rate_fdr_curve' super(PredictedNegativeRateFdrMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.predicted_negative_rate, self.false_discovery_rate, maximize=False)
[docs]class PredictedNegativeRateForMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'predicted_negative_rate_for_curve' super(PredictedNegativeRateForMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.predicted_negative_rate, self.false_omission_rate, maximize=False)
[docs]class PredictedNegativeRatePpvMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'predicted_negative_rate_ppv_curve' super(PredictedNegativeRatePpvMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.predicted_negative_rate, self.positive_predictive_value, maximize=True)
[docs]class PredictedNegativeRateNpvMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'predicted_negative_rate_npv_curve' super(PredictedNegativeRateNpvMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.predicted_negative_rate, self.negative_predictive_value, maximize=True)
[docs]class PredictedNegativeRatePredictedPositiveRateMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'predicted_negative_rate_predicted_positive_rate_curve' super(PredictedNegativeRatePredictedPositiveRateMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.predicted_negative_rate, self.predicted_positive_rate, maximize=True)
[docs]class PredictedNegativeRateAccuracyMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'predicted_negative_rate_accuracy_curve' super(PredictedNegativeRateAccuracyMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.predicted_negative_rate, self.accuracy, maximize=True)
[docs]class PredictedNegativeRateF1ScoreMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'predicted_negative_rate_f1_score_curve' super(PredictedNegativeRateF1ScoreMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.predicted_negative_rate, self.f1, maximize=True)
[docs]class PredictedNegativeRateMccMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'predicted_negative_rate_mcc_curve' super(PredictedNegativeRateMccMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.predicted_negative_rate, self.matthews_correlation_coefficient, maximize=True)
[docs]class PredictedNegativeRateInformednessMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'predicted_negative_rate_informedness_curve' super(PredictedNegativeRateInformednessMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.predicted_negative_rate, self.informedness, maximize=True)
[docs]class PredictedNegativeRateMarkednessMetric(BinaryClassificationMetric): def __init__(self, **kwargs): # Drop whatever name was passed and explicitly rename kwargs['name'] = 'predicted_negative_rate_markedness_curve' super(PredictedNegativeRateMarkednessMetric, self).__init__(**kwargs)
[docs] def score(self): self.values = self.dedupe_curve(self.predicted_negative_rate, self.markedness, maximize=True)