negative entropy confidence added

This commit is contained in:
Lorenzo Volpi 2023-11-05 14:15:43 +01:00
parent 14326b2122
commit 5f26bc7059
2 changed files with 43 additions and 4 deletions

View File

@ -91,6 +91,32 @@ def mulmc_sld(c_model, validation, protocol) -> EvaluationReport:
)
@method
def binne_sld(c_model, validation, protocol) -> EvaluationReport:
est = BQAE(
c_model,
SLD(LogisticRegression()),
confidence="entropy",
).fit(validation)
return evaluation_report(
estimator=est,
protocol=protocol,
)
@method
def mulne_sld(c_model, validation, protocol) -> EvaluationReport:
est = MCAE(
c_model,
SLD(LogisticRegression()),
confidence="entropy",
).fit(validation)
return evaluation_report(
estimator=est,
protocol=protocol,
)
@method
def bin_sld_gs(c_model, validation, protocol) -> EvaluationReport:
v_train, v_val = validation.split_stratified(0.6, random_state=0)
@ -101,7 +127,7 @@ def bin_sld_gs(c_model, validation, protocol) -> EvaluationReport:
"q__classifier__C": np.logspace(-3, 3, 7),
"q__classifier__class_weight": [None, "balanced"],
"q__recalib": [None, "bcts", "vs"],
"confidence": [None, "max_conf"],
"confidence": [None, "max_conf", "entropy"],
},
refit=False,
protocol=UPP(v_val, repeats=100),
@ -123,7 +149,7 @@ def mul_sld_gs(c_model, validation, protocol) -> EvaluationReport:
"q__classifier__C": np.logspace(-3, 3, 7),
"q__classifier__class_weight": [None, "balanced"],
"q__recalib": [None, "bcts", "vs"],
"confidence": [None, "max_conf"],
"confidence": [None, "max_conf", "entropy"],
},
refit=False,
protocol=UPP(v_val, repeats=100),
@ -200,6 +226,7 @@ def bin_pacc_gs(c_model, validation, protocol) -> EvaluationReport:
param_grid={
"q__classifier__C": np.logspace(-3, 3, 7),
"q__classifier__class_weight": [None, "balanced"],
"confidence": [None, "max_conf", "entropy"],
},
refit=False,
protocol=UPP(v_val, repeats=100),
@ -220,6 +247,7 @@ def mul_pacc_gs(c_model, validation, protocol) -> EvaluationReport:
param_grid={
"q__classifier__C": np.logspace(-3, 3, 7),
"q__classifier__class_weight": [None, "balanced"],
"confidence": [None, "max_conf", "entropy"],
},
refit=False,
protocol=UPP(v_val, repeats=100),

View File

@ -31,11 +31,22 @@ class BaseAccuracyEstimator(BaseQuantifier):
self.classifier = classifier
def __get_confidence(self):
def max_conf(probas):
_mc = np.max(probas, axis=-1)
_min = 1.0 / probas.shape[1]
_norm_mc = (_mc - _min) / (1.0 - _min)
return _norm_mc
def entropy(probas):
_ent = np.sum(np.multiply(probas, np.log(probas + 1e-20)), axis=1)
return _ent
if self.confidence is None:
return None
__confs = {
"max_conf": lambda probas: np.max(probas, axis=-1).reshape((len(probas), 1))
"max_conf": max_conf,
"entropy": entropy,
}
return __confs.get(self.confidence, None)
@ -43,7 +54,7 @@ class BaseAccuracyEstimator(BaseQuantifier):
_ext = pred_proba
_f_conf = self.__get_confidence()
if _f_conf is not None:
_confs = _f_conf(pred_proba)
_confs = _f_conf(pred_proba).reshape((len(pred_proba), 1))
_ext = np.concatenate((_confs, pred_proba), axis=1)
return _ext