"""
Module for classification metrics
https://en.wikipedia.org/wiki/Confusion_matrix
Includes base class and derived metrics following the nomenclature:
ConstraintValueMetric
Where:
Constraint is the lookup criteria (ex FPR in ROC curve)
Value is desired value (ex TPR in ROC curve)
This module is organized by metric and prediction dependencies:
1) Base classes with methods and utilities
2) Aggregate Metrics (single value output)
2a) Single values computed via Predict method (operating points)
2b) Single values computed via proba method (agg over curve)
3) Curve Metrics (constraint: value)
3a) Threshold: confusion matrix metrics
3b) confusion matrix metrics: threshold or other metrics
"""
[docs]__author__ = "Elisha Yadgaran"
import logging
from abc import abstractmethod
from typing import Any, Optional
import numpy as np
import pandas as pd
from sklearn.metrics import accuracy_score, confusion_matrix, f1_score, roc_auc_score
from simpleml.constants import TEST_SPLIT, TRAIN_SPLIT, VALIDATION_SPLIT
from simpleml.metrics.base_metric import Metric
from simpleml.utils.errors import MetricError
[docs]LOGGER = logging.getLogger(__name__)
############################### BASE ###############################
[docs]class ClassificationMetric(Metric):
"""
TODO: Figure out multiclass generalizations
"""
def __init__(self, dataset_split: Optional[str] = None, **kwargs):
"""
:param dataset_split: string denoting which dataset split to use
can be one of: `TRAIN`, `VALIDATION`, Other. Other gets no prefix
Default is train split to stay consistent with no split mapping to Train
in Pipeline
"""
name = kwargs.pop("name", "")
# Explicitly call out in sample or validation metrics
# Only relevant if using a split dataset. No split pipelines will return
# all data by default on null input, while split ones will return empty splits
if dataset_split == TRAIN_SPLIT:
LOGGER.warning(
"Metric renaming due to dataset split is deprecated and name will be migrated in a future release"
)
name = "in_sample_" + name
elif dataset_split == VALIDATION_SPLIT:
LOGGER.warning(
"Metric renaming due to dataset split is deprecated and name will be migrated in a future release"
)
name = "validation_" + name
super(ClassificationMetric, self).__init__(name=name, **kwargs)
self.config["dataset_split"] = dataset_split
[docs] def _get_split(self, column: str) -> Any:
if self.dataset.id == self.model.pipeline.dataset_id:
LOGGER.debug(
"Dataset is the same as model dataset, using pipeline dataset split instead of raw dataset one"
)
return self._get_pipeline_split(
column=column, split=self.config.get("dataset_split")
)
return self._get_dataset_split(
column=column, split=self.config.get("dataset_split")
)
@property
[docs] def labels(self) -> Any:
if self.dataset is None:
raise MetricError("Must set dataset before scoring classification metrics!")
return self._get_split(column="y")
@property
[docs] def probabilities(self) -> Any:
if self.dataset is None:
raise MetricError("Must set dataset before scoring classification metrics!")
probabilities = self.model.predict_proba(
X=self._get_split(column="X"), transform=True
)
self.validate_predictions(probabilities)
return probabilities
@property
[docs] def predictions(self) -> Any:
if self.dataset is None:
raise MetricError("Must set dataset before scoring classification metrics!")
preds = self.model.predict(X=self._get_split(column="X"), transform=True)
self.validate_predictions(preds)
return preds
@staticmethod
[docs] def validate_predictions(predictions: Any) -> None:
invalid = None
if predictions is None:
invalid = True
elif isinstance(predictions, (pd.DataFrame, pd.Series)) and predictions.empty:
invalid = True
elif isinstance(predictions, np.ndarray) and predictions.size == 0:
invalid = True
if invalid:
raise MetricError("Attempting to score an empty dataset")
[docs]class BinaryClassificationMetric(ClassificationMetric):
@property
[docs] def labels(self):
# extends parent label retrieval with a validation step for binary values
labels = super(BinaryClassificationMetric, self).labels
self.validate_labels(labels)
return labels
@staticmethod
[docs] def validate_labels(labels):
invalid = None
if labels is None:
invalid = True
else:
invalid = len(set(labels) - {0, 1}) > 0
if invalid:
raise MetricError(
"Attempting to score a binary metric with labels outside of {0,1}"
)
@property
[docs] def probabilities(self):
probabilities = super(BinaryClassificationMetric, self).probabilities
if len(probabilities.shape) > 1 and probabilities.shape[1] > 1:
# Indicates multiple class probabilities are returned (class_0, class_1)
probabilities = probabilities[:, 1]
return probabilities
@property
[docs] def predictions(self):
predictions = super(BinaryClassificationMetric, self).predictions
if len(predictions.shape) > 1 and predictions.shape[1] > 1:
# Indicates multiple class predictions are returned (class_0, class_1)
predictions = predictions[:, 1]
return predictions
@property
[docs] def confusion_matrix(self):
"""
Property method to return (or generate) dataframe of confusion
matrix at each threshold
"""
if not hasattr(self, "_confusion_matrix") or self._confusion_matrix is None:
self.create_confusion_matrix()
return self._confusion_matrix
@staticmethod
[docs] def _create_confusion_matrix(thresholds, probabilities, labels):
"""
Independent computation method (easier testing)
"""
results = []
for threshold in thresholds:
predictions = np.where(probabilities >= threshold, 1, 0)
tn, fp, fn, tp = confusion_matrix(
labels, predictions, labels=[0, 1]
).ravel()
results.append((threshold, tn, fp, fn, tp))
return pd.DataFrame(results, columns=["threshold", "tn", "fp", "fn", "tp"])
[docs] def create_confusion_matrix(self):
"""
Iterate through each threshold and compute confusion matrix
"""
# Thresholds to compute confusion matrix at (default every 0.005 increment)
thresholds = np.linspace(0, 1, 201)
probabilities = self.probabilities
labels = self.labels
self._confusion_matrix = self._create_confusion_matrix(
thresholds, probabilities, labels
)
@staticmethod
[docs] def dedupe_curve(keys, values, maximize=True, round_places=3):
"""
Method to deduplicate multiple values for the same key on a curve
(ex multiple thresholds with the same fpr and different tpr for roc)
:param maximize: Boolean, whether to choose the maximum value for each
unique key or the minimum
"""
# Round arbitrary decimal places to dedupe
keys = [round(i, round_places) for i in keys]
values = [round(i, round_places) for i in values]
df = pd.DataFrame(list(zip(keys, values)), columns=["keys", "values"])
df.dropna(axis=0, inplace=True)
agg = "max" if maximize else "min"
return df.groupby("keys").agg({"values": agg}).to_dict()["values"]
@property
[docs] def thresholds(self):
"""
Convenience property for the probability thresholds
"""
return self.confusion_matrix.threshold
@property
[docs] def true_positive_rate(self):
"""
Convenience property for the True Positive Rate (TP/TP+FN)
"""
return self.confusion_matrix.tp / (
self.confusion_matrix.tp + self.confusion_matrix.fn
)
@property
[docs] def false_positive_rate(self):
"""
Convenience property for the False Positive Rate (FP/FP+TN)
"""
return self.confusion_matrix.fp / (
self.confusion_matrix.fp + self.confusion_matrix.tn
)
@property
[docs] def true_negative_rate(self):
"""
Convenience property for the True Negative Rate (TN/FP+TN)
"""
return self.confusion_matrix.tn / (
self.confusion_matrix.fp + self.confusion_matrix.tn
)
@property
[docs] def false_negative_rate(self):
"""
Convenience property for the False Negative Rate (FN/TP+FN)
"""
return self.confusion_matrix.fn / (
self.confusion_matrix.tp + self.confusion_matrix.fn
)
@property
[docs] def false_discovery_rate(self):
"""
Convenience property for the False Discovery Rate (FP/FP+TP)
"""
return self.confusion_matrix.fp / (
self.confusion_matrix.fp + self.confusion_matrix.tp
)
@property
[docs] def false_omission_rate(self):
"""
Convenience property for the False Omission Rate (FN/TN+FN)
"""
return self.confusion_matrix.fn / (
self.confusion_matrix.tn + self.confusion_matrix.fn
)
@property
[docs] def positive_predictive_value(self):
"""
Convenience property for the Positive Predictive Value (TP/FP+TP)
"""
return self.confusion_matrix.tp / (
self.confusion_matrix.fp + self.confusion_matrix.tp
)
@property
[docs] def negative_predictive_value(self):
"""
Convenience property for the Negative Predictive Value (TN/TN+FN)
"""
return self.confusion_matrix.tn / (
self.confusion_matrix.tn + self.confusion_matrix.fn
)
@property
[docs] def predicted_positive_rate(self):
"""
Convenience property for the Predicted Positive Rate (TP+FP/TP+FP+TN+FN)
"""
return (self.confusion_matrix.tp + self.confusion_matrix.fp) / (
self.confusion_matrix.fp
+ self.confusion_matrix.tn
+ self.confusion_matrix.tp
+ self.confusion_matrix.fn
)
@property
[docs] def predicted_negative_rate(self):
"""
Convenience property for the Predicted Negative Rate (TN+FN/TP+FP+TN+FN)
"""
return (self.confusion_matrix.tn + self.confusion_matrix.fn) / (
self.confusion_matrix.fp
+ self.confusion_matrix.tn
+ self.confusion_matrix.tp
+ self.confusion_matrix.fn
)
@property
[docs] def accuracy(self):
"""
Convenience property for the Accuracy Rate (TP+TN/TP+FP+TN+FN)
"""
return (self.confusion_matrix.tp + self.confusion_matrix.tn) / (
self.confusion_matrix.fp
+ self.confusion_matrix.tn
+ self.confusion_matrix.tp
+ self.confusion_matrix.fn
)
@property
[docs] def f1(self):
"""
Convenience property for the F1 Score (2*TP/2*TP+FP+FN)
"""
return (2.0 * self.confusion_matrix.tp) / (
2.0 * self.confusion_matrix.tp
+ self.confusion_matrix.fp
+ self.confusion_matrix.fn
)
@property
[docs] def matthews_correlation_coefficient(self):
"""
Convenience property for the Matthews Correlation Coefficient (TP*TN-FP*FN/((FP+TP)*(TP+FN)*(TN+FP)*(TN+FN))^0.5)
"""
numerator = (
self.confusion_matrix.tp * self.confusion_matrix.tn
- self.confusion_matrix.fp * self.confusion_matrix.fn
)
denominator = (
(self.confusion_matrix.fp + self.confusion_matrix.tp)
* (self.confusion_matrix.tp + self.confusion_matrix.fn)
* (self.confusion_matrix.tn + self.confusion_matrix.fp)
* (self.confusion_matrix.tn + self.confusion_matrix.fn)
) ** 0.5
return numerator / denominator
@property
@property
[docs] def markedness(self):
"""
Convenience property for the Markedness (PPV+NPV-1)
"""
return self.positive_predictive_value + self.negative_predictive_value - 1
############################### AGGREGATE METRICS ###############################
"""
Pointwise metrics using only the predict scoring method
(fixed operating point)
"""
[docs]class AggregateBinaryClassificationMetric(BinaryClassificationMetric):
@staticmethod
@abstractmethod
[docs] def _score(predictions, labels):
"""
Each aggregate needs to define a separate private method to actually
calculate the aggregate
Separated from the public score method to enable easier testing and
extension (values can be passed from non internal properties)
"""
[docs] def score(self):
"""
Main scoring method. Uses internal values and passes to class level
aggregation method
"""
predictions = self.predictions
labels = self.labels
self.values = {"agg": self._score(predictions, labels)}
[docs]class AccuracyMetric(AggregateBinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "classification_accuracy"
super(AccuracyMetric, self).__init__(**kwargs)
@staticmethod
[docs] def _score(predictions, labels):
return accuracy_score(y_true=labels, y_pred=predictions)
[docs]class TprMetric(AggregateBinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "tpr"
super(TprMetric, self).__init__(**kwargs)
@staticmethod
[docs] def _score(predictions, labels):
tn, fp, fn, tp = confusion_matrix(labels, predictions).ravel()
return float(tp) / (tp + fn)
[docs]class FprMetric(AggregateBinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "fpr"
super(FprMetric, self).__init__(**kwargs)
@staticmethod
[docs] def _score(predictions, labels):
tn, fp, fn, tp = confusion_matrix(labels, predictions).ravel()
return float(fp) / (fp + tn)
[docs]class F1ScoreMetric(AggregateBinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "f1_score"
super(F1ScoreMetric, self).__init__(**kwargs)
@staticmethod
[docs] def _score(predictions, labels):
return f1_score(y_true=labels, y_pred=predictions)
"""
Aggregate metrics computed by evaluating over entire curves
(Requires proba method)
"""
[docs]class RocAucMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "roc_auc"
super(RocAucMetric, self).__init__(**kwargs)
@staticmethod
[docs] def _score(probabilities, labels):
return roc_auc_score(y_true=labels, y_score=probabilities)
[docs] def score(self):
probabilities = self.probabilities
labels = self.labels
self.values = {"agg": self._score(probabilities, labels)}
############################### CURVE METRICS ###############################
"""
Threshold Constrained Metrics
"""
[docs]class ThresholdTprMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "threshold_tpr_curve"
super(ThresholdTprMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.thresholds, self.true_positive_rate, maximize=True
)
[docs]class ThresholdTnrMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "threshold_tnr_curve"
super(ThresholdTnrMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.thresholds, self.true_negative_rate, maximize=True
)
[docs]class ThresholdFnrMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "threshold_fnr_curve"
super(ThresholdFnrMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.thresholds, self.false_negative_rate, maximize=False
)
[docs]class ThresholdFprMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "threshold_fpr_curve"
super(ThresholdFprMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.thresholds, self.false_positive_rate, maximize=False
)
[docs]class ThresholdFdrMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "threshold_fdr_curve"
super(ThresholdFdrMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.thresholds, self.false_discovery_rate, maximize=False
)
)
[docs]class ThresholdPpvMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "threshold_ppv_curve"
super(ThresholdPpvMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.thresholds, self.positive_predictive_value, maximize=True
)
[docs]class ThresholdNpvMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "threshold_npv_curve"
super(ThresholdNpvMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.thresholds, self.negative_predictive_value, maximize=True
)
[docs]class ThresholdPredictedPositiveRateMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "threshold_predicted_positive_rate_curve"
super(ThresholdPredictedPositiveRateMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.thresholds, self.predicted_positive_rate, maximize=True
)
[docs]class ThresholdPredictedNegativeRateMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "threshold_predicted_negative_rate_curve"
super(ThresholdPredictedNegativeRateMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.thresholds, self.predicted_negative_rate, maximize=True
)
[docs]class ThresholdAccuracyMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "threshold_accuracy_curve"
super(ThresholdAccuracyMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(self.thresholds, self.accuracy, maximize=True)
[docs]class ThresholdF1ScoreMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "threshold_f1_score_curve"
super(ThresholdF1ScoreMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(self.thresholds, self.f1, maximize=True)
[docs]class ThresholdMccMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "threshold_mcc_curve"
super(ThresholdMccMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.thresholds, self.matthews_correlation_coefficient, maximize=True
)
)
[docs]class ThresholdMarkednessMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "threshold_markedness_curve"
super(ThresholdMarkednessMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(self.thresholds, self.markedness, maximize=True)
"""
FPR Constrained Metrics
"""
[docs]class FprThresholdMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "fpr_threshold_curve"
super(FprThresholdMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.false_positive_rate, self.thresholds, maximize=False
)
[docs]class FprTprMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "fpr_tpr_curve"
super(FprTprMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.false_positive_rate, self.true_positive_rate, maximize=True
)
[docs]class FprTnrMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "fpr_tnr_curve"
super(FprTnrMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.false_positive_rate, self.true_negative_rate, maximize=True
)
[docs]class FprFnrMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "fpr_fnr_curve"
super(FprFnrMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.false_positive_rate, self.false_negative_rate, maximize=False
)
[docs]class FprFdrMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "fpr_fdr_curve"
super(FprFdrMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.false_positive_rate, self.false_discovery_rate, maximize=False
)
)
[docs]class FprPpvMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "fpr_ppv_curve"
super(FprPpvMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.false_positive_rate, self.positive_predictive_value, maximize=True
)
[docs]class FprNpvMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "fpr_npv_curve"
super(FprNpvMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.false_positive_rate, self.negative_predictive_value, maximize=True
)
[docs]class FprPredictedPositiveRateMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "fpr_predicted_positive_rate_curve"
super(FprPredictedPositiveRateMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.false_positive_rate, self.predicted_positive_rate, maximize=True
)
[docs]class FprPredictedNegativeRateMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "fpr_predicted_negative_rate_curve"
super(FprPredictedNegativeRateMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.false_positive_rate, self.predicted_negative_rate, maximize=True
)
[docs]class FprAccuracyMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "fpr_accuracy_curve"
super(FprAccuracyMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.false_positive_rate, self.accuracy, maximize=True
)
[docs]class FprF1ScoreMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "fpr_f1_score_curve"
super(FprF1ScoreMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.false_positive_rate, self.f1, maximize=True
)
[docs]class FprMccMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "fpr_mcc_curve"
super(FprMccMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.false_positive_rate,
self.matthews_correlation_coefficient,
maximize=True,
)
)
[docs]class FprMarkednessMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "fpr_markedness_curve"
super(FprMarkednessMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.false_positive_rate, self.markedness, maximize=True
)
"""
TPR Constrained Metrics
"""
[docs]class TprThresholdMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "tpr_threshold_curve"
super(TprThresholdMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.true_positive_rate, self.thresholds, maximize=False
)
[docs]class TprFprMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "tpr_fpr_curve"
super(TprFprMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.true_positive_rate, self.false_positive_rate, maximize=True
)
[docs]class TprTnrMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "tpr_tnr_curve"
super(TprTnrMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.true_positive_rate, self.true_negative_rate, maximize=True
)
[docs]class TprFnrMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "tpr_fnr_curve"
super(TprFnrMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.true_positive_rate, self.false_negative_rate, maximize=False
)
[docs]class TprFdrMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "tpr_fdr_curve"
super(TprFdrMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.true_positive_rate, self.false_discovery_rate, maximize=False
)
)
[docs]class TprPpvMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "tpr_ppv_curve"
super(TprPpvMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.true_positive_rate, self.positive_predictive_value, maximize=True
)
[docs]class TprNpvMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "tpr_npv_curve"
super(TprNpvMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.true_positive_rate, self.negative_predictive_value, maximize=True
)
[docs]class TprPredictedPositiveRateMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "tpr_predicted_positive_rate_curve"
super(TprPredictedPositiveRateMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.true_positive_rate, self.predicted_positive_rate, maximize=True
)
[docs]class TprPredictedNegativeRateMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "tpr_predicted_negative_rate_curve"
super(TprPredictedNegativeRateMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.true_positive_rate, self.predicted_negative_rate, maximize=True
)
[docs]class TprAccuracyMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "tpr_accuracy_curve"
super(TprAccuracyMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.true_positive_rate, self.accuracy, maximize=True
)
[docs]class TprF1ScoreMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "tpr_f1_score_curve"
super(TprF1ScoreMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(self.true_positive_rate, self.f1, maximize=True)
[docs]class TprMccMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "tpr_mcc_curve"
super(TprMccMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.true_positive_rate,
self.matthews_correlation_coefficient,
maximize=True,
)
)
[docs]class TprMarkednessMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "tpr_markedness_curve"
super(TprMarkednessMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.true_positive_rate, self.markedness, maximize=True
)
"""
TNR Constrained Metrics
"""
[docs]class TnrThresholdMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "tnr_threshold_curve"
super(TnrThresholdMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.true_negative_rate, self.thresholds, maximize=False
)
[docs]class TnrFprMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "tnr_fpr_curve"
super(TnrFprMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.true_negative_rate, self.false_positive_rate, maximize=True
)
[docs]class TnrTprMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "tnr_tpr_curve"
super(TnrTprMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.true_negative_rate, self.true_positive_rate, maximize=True
)
[docs]class TnrFnrMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "tnr_fnr_curve"
super(TnrFnrMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.true_negative_rate, self.false_negative_rate, maximize=False
)
[docs]class TnrFdrMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "tnr_fdr_curve"
super(TnrFdrMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.true_negative_rate, self.false_discovery_rate, maximize=False
)
)
[docs]class TnrPpvMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "tnr_ppv_curve"
super(TnrPpvMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.true_negative_rate, self.positive_predictive_value, maximize=True
)
[docs]class TnrNpvMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "tnr_npv_curve"
super(TnrNpvMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.true_negative_rate, self.negative_predictive_value, maximize=True
)
[docs]class TnrPredictedPositiveRateMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "tnr_predicted_positive_rate_curve"
super(TnrPredictedPositiveRateMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.true_negative_rate, self.predicted_positive_rate, maximize=True
)
[docs]class TnrPredictedNegativeRateMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "tnr_predicted_negative_rate_curve"
super(TnrPredictedNegativeRateMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.true_negative_rate, self.predicted_negative_rate, maximize=True
)
[docs]class TnrAccuracyMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "tnr_accuracy_curve"
super(TnrAccuracyMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.true_negative_rate, self.accuracy, maximize=True
)
[docs]class TnrF1ScoreMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "tnr_f1_score_curve"
super(TnrF1ScoreMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(self.true_negative_rate, self.f1, maximize=True)
[docs]class TnrMccMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "tnr_mcc_curve"
super(TnrMccMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.true_negative_rate,
self.matthews_correlation_coefficient,
maximize=True,
)
)
[docs]class TnrMarkednessMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "tnr_markedness_curve"
super(TnrMarkednessMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.true_negative_rate, self.markedness, maximize=True
)
"""
FNR Constrained Metrics
"""
[docs]class FnrThresholdMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "fnr_threshold_curve"
super(FnrThresholdMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.false_negative_rate, self.thresholds, maximize=False
)
[docs]class FnrFprMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "fnr_fpr_curve"
super(FnrFprMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.false_negative_rate, self.false_positive_rate, maximize=True
)
[docs]class FnrTprMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "fnr_tpr_curve"
super(FnrTprMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.false_negative_rate, self.true_positive_rate, maximize=True
)
[docs]class FnrTnrMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "fnr_tnr_curve"
super(FnrTnrMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.false_negative_rate, self.true_negative_rate, maximize=True
)
[docs]class FnrFdrMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "fnr_fdr_curve"
super(FnrFdrMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.false_negative_rate, self.false_discovery_rate, maximize=False
)
)
[docs]class FnrPpvMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "fnr_ppv_curve"
super(FnrPpvMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.false_negative_rate, self.positive_predictive_value, maximize=True
)
[docs]class FnrNpvMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "fnr_npv_curve"
super(FnrNpvMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.false_negative_rate, self.negative_predictive_value, maximize=True
)
[docs]class FnrPredictedPositiveRateMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "fnr_predicted_positive_rate_curve"
super(FnrPredictedPositiveRateMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.false_negative_rate, self.predicted_positive_rate, maximize=True
)
[docs]class FnrPredictedNegativeRateMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "fnr_predicted_negative_rate_curve"
super(FnrPredictedNegativeRateMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.false_negative_rate, self.predicted_negative_rate, maximize=True
)
[docs]class FnrAccuracyMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "fnr_accuracy_curve"
super(FnrAccuracyMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.false_negative_rate, self.accuracy, maximize=True
)
[docs]class FnrF1ScoreMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "fnr_f1_score_curve"
super(FnrF1ScoreMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.false_negative_rate, self.f1, maximize=True
)
[docs]class FnrMccMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "fnr_mcc_curve"
super(FnrMccMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.false_negative_rate,
self.matthews_correlation_coefficient,
maximize=True,
)
)
[docs]class FnrMarkednessMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "fnr_markedness_curve"
super(FnrMarkednessMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.false_negative_rate, self.markedness, maximize=True
)
"""
FDR Constrained Metrics
"""
[docs]class FdrThresholdMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "fdr_threshold_curve"
super(FdrThresholdMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.false_discovery_rate, self.thresholds, maximize=False
)
[docs]class FdrFprMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "fdr_fpr_curve"
super(FdrFprMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.false_discovery_rate, self.false_positive_rate, maximize=True
)
[docs]class FdrTprMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "fdr_tpr_curve"
super(FdrTprMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.false_discovery_rate, self.true_positive_rate, maximize=True
)
[docs]class FdrTnrMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "fdr_tnr_curve"
super(FdrTnrMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.false_discovery_rate, self.true_negative_rate, maximize=True
)
[docs]class FdrFnrMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "fdr_fnr_curve"
super(FdrFnrMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.false_discovery_rate, self.false_negative_rate, maximize=False
)
)
[docs]class FdrPpvMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "fdr_ppv_curve"
super(FdrPpvMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.false_discovery_rate, self.positive_predictive_value, maximize=True
)
[docs]class FdrNpvMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "fdr_npv_curve"
super(FdrNpvMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.false_discovery_rate, self.negative_predictive_value, maximize=True
)
[docs]class FdrPredictedPositiveRateMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "fdr_predicted_positive_rate_curve"
super(FdrPredictedPositiveRateMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.false_discovery_rate, self.predicted_positive_rate, maximize=True
)
[docs]class FdrPredictedNegativeRateMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "fdr_predicted_negative_rate_curve"
super(FdrPredictedNegativeRateMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.false_discovery_rate, self.predicted_negative_rate, maximize=True
)
[docs]class FdrAccuracyMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "fdr_accuracy_curve"
super(FdrAccuracyMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.false_discovery_rate, self.accuracy, maximize=True
)
[docs]class FdrF1ScoreMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "fdr_f1_score_curve"
super(FdrF1ScoreMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.false_discovery_rate, self.f1, maximize=True
)
[docs]class FdrMccMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "fdr_mcc_curve"
super(FdrMccMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.false_discovery_rate,
self.matthews_correlation_coefficient,
maximize=True,
)
)
[docs]class FdrMarkednessMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "fdr_markedness_curve"
super(FdrMarkednessMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.false_discovery_rate, self.markedness, maximize=True
)
"""
FOR Constrained Metrics
"""
[docs]class ForThresholdMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "for_threshold_curve"
super(ForThresholdMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.false_omission_rate, self.thresholds, maximize=False
)
[docs]class ForFprMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "for_fpr_curve"
super(ForFprMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.false_omission_rate, self.false_positive_rate, maximize=True
)
[docs]class ForTprMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "for_tpr_curve"
super(ForTprMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.false_omission_rate, self.true_positive_rate, maximize=True
)
[docs]class ForTnrMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "for_tnr_curve"
super(ForTnrMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.false_omission_rate, self.true_negative_rate, maximize=True
)
[docs]class ForFnrMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "for_fnr_curve"
super(ForFnrMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.false_omission_rate, self.false_negative_rate, maximize=False
)
[docs]class ForFdrMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "for_fdr_curve"
super(ForFdrMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.false_omission_rate, self.false_discovery_rate, maximize=False
)
[docs]class ForPpvMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "for_ppv_curve"
super(ForPpvMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.false_omission_rate, self.positive_predictive_value, maximize=True
)
[docs]class ForNpvMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "for_npv_curve"
super(ForNpvMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.false_omission_rate, self.negative_predictive_value, maximize=True
)
[docs]class ForPredictedPositiveRateMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "for_predicted_positive_rate_curve"
super(ForPredictedPositiveRateMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.false_omission_rate, self.predicted_positive_rate, maximize=True
)
[docs]class ForPredictedNegativeRateMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "for_predicted_negative_rate_curve"
super(ForPredictedNegativeRateMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.false_omission_rate, self.predicted_negative_rate, maximize=True
)
[docs]class ForAccuracyMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "for_accuracy_curve"
super(ForAccuracyMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.false_omission_rate, self.accuracy, maximize=True
)
[docs]class ForF1ScoreMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "for_f1_score_curve"
super(ForF1ScoreMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.false_omission_rate, self.f1, maximize=True
)
)
)
)
"""
PPV Constrained Metrics
"""
[docs]class PpvThresholdMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "ppv_threshold_curve"
super(PpvThresholdMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.positive_predictive_value, self.thresholds, maximize=False
)
[docs]class PpvFprMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "ppv_fpr_curve"
super(PpvFprMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.positive_predictive_value, self.false_positive_rate, maximize=True
)
[docs]class PpvTprMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "ppv_tpr_curve"
super(PpvTprMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.positive_predictive_value, self.true_positive_rate, maximize=True
)
[docs]class PpvTnrMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "ppv_tnr_curve"
super(PpvTnrMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.positive_predictive_value, self.true_negative_rate, maximize=True
)
[docs]class PpvFnrMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "ppv_fnr_curve"
super(PpvFnrMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.positive_predictive_value, self.false_negative_rate, maximize=False
)
[docs]class PpvFdrMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "ppv_fdr_curve"
super(PpvFdrMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.positive_predictive_value, self.false_discovery_rate, maximize=False
)
)
[docs]class PpvNpvMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "ppv_npv_curve"
super(PpvNpvMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.positive_predictive_value,
self.negative_predictive_value,
maximize=True,
)
[docs]class PpvPredictedPositiveRateMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "ppv_predicted_positive_rate_curve"
super(PpvPredictedPositiveRateMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.positive_predictive_value, self.predicted_positive_rate, maximize=True
)
[docs]class PpvPredictedNegativeRateMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "ppv_predicted_negative_rate_curve"
super(PpvPredictedNegativeRateMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.positive_predictive_value, self.predicted_negative_rate, maximize=True
)
[docs]class PpvAccuracyMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "ppv_accuracy_curve"
super(PpvAccuracyMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.positive_predictive_value, self.accuracy, maximize=True
)
[docs]class PpvF1ScoreMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "ppv_f1_score_curve"
super(PpvF1ScoreMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.positive_predictive_value, self.f1, maximize=True
)
[docs]class PpvMccMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "ppv_mcc_curve"
super(PpvMccMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.positive_predictive_value,
self.matthews_correlation_coefficient,
maximize=True,
)
)
[docs]class PpvMarkednessMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "ppv_markedness_curve"
super(PpvMarkednessMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.positive_predictive_value, self.markedness, maximize=True
)
"""
NPV Constrained Metrics
"""
[docs]class NpvThresholdMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "npv_threshold_curve"
super(NpvThresholdMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.negative_predictive_value, self.thresholds, maximize=False
)
[docs]class NpvFprMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "npv_fpr_curve"
super(NpvFprMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.negative_predictive_value, self.false_positive_rate, maximize=True
)
[docs]class NpvTprMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "npv_tpr_curve"
super(NpvTprMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.negative_predictive_value, self.true_positive_rate, maximize=True
)
[docs]class NpvTnrMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "npv_tnr_curve"
super(NpvTnrMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.negative_predictive_value, self.true_negative_rate, maximize=True
)
[docs]class NpvFnrMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "npv_fnr_curve"
super(NpvFnrMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.negative_predictive_value, self.false_negative_rate, maximize=False
)
[docs]class NpvFdrMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "npv_fdr_curve"
super(NpvFdrMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.negative_predictive_value, self.false_discovery_rate, maximize=False
)
)
[docs]class NpvPpvMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "npv_ppv_curve"
super(NpvPpvMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.negative_predictive_value,
self.positive_predictive_value,
maximize=True,
)
[docs]class NpvPredictedPositiveRateMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "npv_predicted_positive_rate_curve"
super(NpvPredictedPositiveRateMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.negative_predictive_value, self.predicted_positive_rate, maximize=True
)
[docs]class NpvPredictedNegativeRateMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "npv_predicted_negative_rate_curve"
super(NpvPredictedNegativeRateMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.negative_predictive_value, self.predicted_negative_rate, maximize=True
)
[docs]class NpvAccuracyMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "npv_accuracy_curve"
super(NpvAccuracyMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.negative_predictive_value, self.accuracy, maximize=True
)
[docs]class NpvF1ScoreMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "npv_f1_score_curve"
super(NpvF1ScoreMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.negative_predictive_value, self.f1, maximize=True
)
[docs]class NpvMccMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "npv_mcc_curve"
super(NpvMccMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.negative_predictive_value,
self.matthews_correlation_coefficient,
maximize=True,
)
)
[docs]class NpvMarkednessMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "npv_markedness_curve"
super(NpvMarkednessMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.negative_predictive_value, self.markedness, maximize=True
)
"""
Predicted Positive Rate Constrained Metrics
"""
[docs]class PredictedPositiveRateThresholdMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "predicted_positive_rate_threshold_curve"
super(PredictedPositiveRateThresholdMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.predicted_positive_rate, self.thresholds, maximize=False
)
[docs]class PredictedPositiveRateFprMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "predicted_positive_rate_fpr_curve"
super(PredictedPositiveRateFprMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.predicted_positive_rate, self.false_positive_rate, maximize=True
)
[docs]class PredictedPositiveRateTprMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "predicted_positive_rate_tpr_curve"
super(PredictedPositiveRateTprMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.predicted_positive_rate, self.true_positive_rate, maximize=True
)
[docs]class PredictedPositiveRateTnrMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "predicted_positive_rate_tnr_curve"
super(PredictedPositiveRateTnrMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.predicted_positive_rate, self.true_negative_rate, maximize=True
)
[docs]class PredictedPositiveRateFnrMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "predicted_positive_rate_fnr_curve"
super(PredictedPositiveRateFnrMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.predicted_positive_rate, self.false_negative_rate, maximize=False
)
[docs]class PredictedPositiveRateFdrMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "predicted_positive_rate_fdr_curve"
super(PredictedPositiveRateFdrMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.predicted_positive_rate, self.false_discovery_rate, maximize=False
)
)
[docs]class PredictedPositiveRatePpvMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "predicted_positive_rate_ppv_curve"
super(PredictedPositiveRatePpvMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.predicted_positive_rate, self.positive_predictive_value, maximize=True
)
[docs]class PredictedPositiveRateNpvMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "predicted_positive_rate_npv_curve"
super(PredictedPositiveRateNpvMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.predicted_positive_rate, self.negative_predictive_value, maximize=True
)
[docs]class PredictedPositiveRatePredictedNegativeRateMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "predicted_positive_rate_predicted_negative_rate_curve"
super(PredictedPositiveRatePredictedNegativeRateMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.predicted_positive_rate, self.predicted_negative_rate, maximize=True
)
[docs]class PredictedPositiveRateAccuracyMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "predicted_positive_rate_accuracy_curve"
super(PredictedPositiveRateAccuracyMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.predicted_positive_rate, self.accuracy, maximize=True
)
[docs]class PredictedPositiveRateF1ScoreMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "predicted_positive_rate_f1_score_curve"
super(PredictedPositiveRateF1ScoreMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.predicted_positive_rate, self.f1, maximize=True
)
[docs]class PredictedPositiveRateMccMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "predicted_positive_rate_mcc_curve"
super(PredictedPositiveRateMccMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.predicted_positive_rate,
self.matthews_correlation_coefficient,
maximize=True,
)
)
[docs]class PredictedPositiveRateMarkednessMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "predicted_positive_rate_markedness_curve"
super(PredictedPositiveRateMarkednessMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.predicted_positive_rate, self.markedness, maximize=True
)
"""
Predicted Negative Rate Constrained Metrics
"""
[docs]class PredictedNegativeRateThresholdMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "predicted_negative_rate_threshold_curve"
super(PredictedNegativeRateThresholdMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.predicted_negative_rate, self.thresholds, maximize=False
)
[docs]class PredictedNegativeRateFprMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "predicted_negative_rate_fpr_curve"
super(PredictedNegativeRateFprMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.predicted_negative_rate, self.false_positive_rate, maximize=True
)
[docs]class PredictedNegativeRateTprMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "predicted_negative_rate_tpr_curve"
super(PredictedNegativeRateTprMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.predicted_negative_rate, self.true_positive_rate, maximize=True
)
[docs]class PredictedNegativeRateTnrMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "predicted_negative_rate_tnr_curve"
super(PredictedNegativeRateTnrMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.predicted_negative_rate, self.true_negative_rate, maximize=True
)
[docs]class PredictedNegativeRateFnrMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "predicted_negative_rate_fnr_curve"
super(PredictedNegativeRateFnrMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.predicted_negative_rate, self.false_negative_rate, maximize=False
)
[docs]class PredictedNegativeRateFdrMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "predicted_negative_rate_fdr_curve"
super(PredictedNegativeRateFdrMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.predicted_negative_rate, self.false_discovery_rate, maximize=False
)
)
[docs]class PredictedNegativeRatePpvMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "predicted_negative_rate_ppv_curve"
super(PredictedNegativeRatePpvMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.predicted_negative_rate, self.positive_predictive_value, maximize=True
)
[docs]class PredictedNegativeRateNpvMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "predicted_negative_rate_npv_curve"
super(PredictedNegativeRateNpvMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.predicted_negative_rate, self.negative_predictive_value, maximize=True
)
[docs]class PredictedNegativeRatePredictedPositiveRateMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "predicted_negative_rate_predicted_positive_rate_curve"
super(PredictedNegativeRatePredictedPositiveRateMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.predicted_negative_rate, self.predicted_positive_rate, maximize=True
)
[docs]class PredictedNegativeRateAccuracyMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "predicted_negative_rate_accuracy_curve"
super(PredictedNegativeRateAccuracyMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.predicted_negative_rate, self.accuracy, maximize=True
)
[docs]class PredictedNegativeRateF1ScoreMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "predicted_negative_rate_f1_score_curve"
super(PredictedNegativeRateF1ScoreMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.predicted_negative_rate, self.f1, maximize=True
)
[docs]class PredictedNegativeRateMccMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "predicted_negative_rate_mcc_curve"
super(PredictedNegativeRateMccMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.predicted_negative_rate,
self.matthews_correlation_coefficient,
maximize=True,
)
)
[docs]class PredictedNegativeRateMarkednessMetric(BinaryClassificationMetric):
def __init__(self, **kwargs):
# Drop whatever name was passed and explicitly rename
kwargs["name"] = "predicted_negative_rate_markedness_curve"
super(PredictedNegativeRateMarkednessMetric, self).__init__(**kwargs)
[docs] def score(self):
self.values = self.dedupe_curve(
self.predicted_negative_rate, self.markedness, maximize=True
)