lauching experiments
This commit is contained in:
parent
a3f0008a2a
commit
881e1033f1
|
|
@ -48,14 +48,14 @@ from sklearn.base import clone
|
|||
|
||||
def methods():
|
||||
acc_hyper = {}
|
||||
hdy_hyper = {'n_bins': [3,4,5,8,16,32]}
|
||||
hdy_hyper = {'nbins': [3,4,5,8,16,32]}
|
||||
kdey_hyper = {'bandwidth': [0.001, 0.005, 0.01, 0.05, 0.1, 0.2]}
|
||||
wrap_hyper = lambda dic: {f'quantifier__{k}':v for k,v in dic.items()}
|
||||
# yield 'BootstrapACC', AggregativeBootstrap(ACC(LR()), n_test_samples=1000, random_state=0), wrap_hyper(acc_hyper)
|
||||
yield 'BootstrapHDy', AggregativeBootstrap(DMy(LR(), divergence='HD'), n_test_samples=1000, random_state=0), wrap_hyper(hdy_hyper)
|
||||
# yield 'BootstrapHDy', AggregativeBootstrap(DMy(LR(), divergence='HD'), n_test_samples=1000, random_state=0), wrap_hyper(hdy_hyper)
|
||||
# yield 'BootstrapKDEy', AggregativeBootstrap(KDEyML(LR()), n_test_samples=1000, random_state=0), wrap_hyper(kdey_hyper)
|
||||
# yield 'BayesianACC', BayesianCC(LR(), mcmc_seed=0), acc_hyper
|
||||
# yield 'BayesianHDy', PQ(LR(), stan_seed=0), hdy_hyper
|
||||
yield 'BayesianHDy', PQ(LR(), stan_seed=0), hdy_hyper
|
||||
# yield 'BayesianKDEy', BayesianKDEy(LR(), mcmc_seed=0), kdey_hyper
|
||||
|
||||
|
||||
|
|
@ -137,6 +137,10 @@ if __name__ == '__main__':
|
|||
for setup in [binary, multiclass]:
|
||||
qp.environ['SAMPLE_SIZE'] = setup['sample_size']
|
||||
for data_name in setup['datasets']:
|
||||
print(f'dataset={data_name}')
|
||||
if data_name=='breast-cancer' or data_name.startswith("cmc") or data_name.startswith("ctg"):
|
||||
print(f'skipping dataset: {data_name}')
|
||||
continue
|
||||
data = setup['fetch_fn'](data_name)
|
||||
is_binary = data.n_classes==2
|
||||
result_subdir = result_dir / ('binary' if is_binary else 'multiclass')
|
||||
|
|
|
|||
|
|
@ -1,6 +1,10 @@
|
|||
"""
|
||||
Utility functions for `Bayesian quantification <https://arxiv.org/abs/2302.09159>`_ methods.
|
||||
"""
|
||||
import contextlib
|
||||
import os
|
||||
import sys
|
||||
|
||||
import numpy as np
|
||||
import importlib.resources
|
||||
|
||||
|
|
@ -10,6 +14,8 @@ try:
|
|||
import numpyro
|
||||
import numpyro.distributions as dist
|
||||
import stan
|
||||
import logging
|
||||
import stan.common
|
||||
|
||||
DEPENDENCIES_INSTALLED = True
|
||||
except ImportError:
|
||||
|
|
@ -86,6 +92,18 @@ def sample_posterior(
|
|||
def load_stan_file():
|
||||
return importlib.resources.files('quapy.method').joinpath('stan/pq.stan').read_text(encoding='utf-8')
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _suppress_stan_logging():
|
||||
with open(os.devnull, "w") as devnull:
|
||||
old_stderr = sys.stderr
|
||||
sys.stderr = devnull
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
sys.stderr = old_stderr
|
||||
|
||||
|
||||
def pq_stan(stan_code, n_bins, pos_hist, neg_hist, test_hist, number_of_samples, num_warmup, stan_seed):
|
||||
"""
|
||||
Perform Bayesian prevalence estimation using a Stan model for probabilistic quantification.
|
||||
|
|
@ -121,6 +139,8 @@ def pq_stan(stan_code, n_bins, pos_hist, neg_hist, test_hist, number_of_samples,
|
|||
Each element corresponds to one draw from the posterior distribution.
|
||||
"""
|
||||
|
||||
logging.getLogger("stan.common").setLevel(logging.ERROR)
|
||||
|
||||
stan_data = {
|
||||
'n_bucket': n_bins,
|
||||
'train_neg': neg_hist.tolist(),
|
||||
|
|
@ -129,6 +149,7 @@ def pq_stan(stan_code, n_bins, pos_hist, neg_hist, test_hist, number_of_samples,
|
|||
'posterior': 1
|
||||
}
|
||||
|
||||
with _suppress_stan_logging():
|
||||
stan_model = stan.build(stan_code, data=stan_data, random_seed=stan_seed)
|
||||
fit = stan_model.sample(num_chains=1, num_samples=number_of_samples,num_warmup=num_warmup)
|
||||
|
||||
|
|
|
|||
|
|
@ -642,6 +642,7 @@ class PQ(AggregativeSoftQuantifier, BinaryAggregativeQuantifier):
|
|||
"Run `$ pip install quapy[bayes]` to install them.")
|
||||
|
||||
super().__init__(classifier, fit_classifier, val_split)
|
||||
|
||||
self.nbins = nbins
|
||||
self.fixed_bins = fixed_bins
|
||||
self.num_warmup = num_warmup
|
||||
|
|
|
|||
Loading…
Reference in New Issue