simplfiying the minimal working exaple in the README
This commit is contained in:
parent
365a9e626c
commit
a6ff00f96b
|
@ -1,9 +1,5 @@
|
||||||
Change Log 0.1.9
|
Change Log 0.1.9
|
||||||
----------------
|
----------------
|
||||||
- [TODO] add LeQua2024 and normalized match distance to qp.error
|
|
||||||
- [TODO] add CDE-iteration and Bayes-CDE methods
|
|
||||||
- [TODO] add Friedman's method and DeBias
|
|
||||||
- [TODO] check ignore warning stuff (check https://docs.python.org/3/library/warnings.html#temporarily-suppressing-warnings)
|
|
||||||
|
|
||||||
- Added LeQua 2024 datasets and normalized match distance to qp.error
|
- Added LeQua 2024 datasets and normalized match distance to qp.error
|
||||||
|
|
||||||
|
|
13
README.md
13
README.md
|
@ -45,19 +45,18 @@ of the test set.
|
||||||
|
|
||||||
```python
|
```python
|
||||||
import quapy as qp
|
import quapy as qp
|
||||||
from sklearn.linear_model import LogisticRegression
|
|
||||||
|
|
||||||
dataset = qp.datasets.fetch_twitter('semeval16')
|
dataset = qp.datasets.fetch_UCIBinaryDataset("yeast")
|
||||||
|
training, test = dataset.train_test
|
||||||
|
|
||||||
# create an "Adjusted Classify & Count" quantifier
|
# create an "Adjusted Classify & Count" quantifier
|
||||||
model = qp.method.aggregative.ACC(LogisticRegression())
|
model = qp.method.aggregative.ACC()
|
||||||
model.fit(dataset.training)
|
model.fit(training)
|
||||||
|
|
||||||
estim_prevalence = model.quantify(dataset.test.instances)
|
estim_prevalence = model.quantify(test.X)
|
||||||
true_prevalence = dataset.test.prevalence()
|
true_prevalence = test.prevalence()
|
||||||
|
|
||||||
error = qp.error.mae(true_prevalence, estim_prevalence)
|
error = qp.error.mae(true_prevalence, estim_prevalence)
|
||||||
|
|
||||||
print(f'Mean Absolute Error (MAE)={error:.3f}')
|
print(f'Mean Absolute Error (MAE)={error:.3f}')
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
6
TODO.txt
6
TODO.txt
|
@ -0,0 +1,6 @@
|
||||||
|
- [TODO] add ensemble methods SC-MQ, MC-SQ, MC-MQ
|
||||||
|
- [TODO] add HistNetQ
|
||||||
|
- [TODO] add CDE-iteration and Bayes-CDE methods
|
||||||
|
- [TODO] add Friedman's method and DeBias
|
||||||
|
- [TODO] check ignore warning stuff
|
||||||
|
check https://docs.python.org/3/library/warnings.html#temporarily-suppressing-warnings
|
|
@ -33,9 +33,9 @@ quantifier = KDEyML(classifier=LogisticRegression())
|
||||||
|
|
||||||
# model selection
|
# model selection
|
||||||
param_grid = {
|
param_grid = {
|
||||||
'classifier__C': np.logspace(-3, 3, 7), # classifier-dependent: inverse of regularization strength
|
'classifier__C': np.logspace(-3, 3, 7), # classifier-dependent: inverse of regularization strength
|
||||||
'classifier__class_weight': ['balanced', None], # classifier-dependent: weights of each class
|
'classifier__class_weight': ['balanced', None], # classifier-dependent: weights of each class
|
||||||
'bandwidth': np.linspace(0.01, 0.2, 20) # quantifier-dependent: bandwidth of the kernel
|
'bandwidth': np.linspace(0.01, 0.2, 20) # quantifier-dependent: bandwidth of the kernel
|
||||||
}
|
}
|
||||||
model_selection = GridSearchQ(quantifier, param_grid, protocol=val_generator, error='mrae', refit=False, verbose=True)
|
model_selection = GridSearchQ(quantifier, param_grid, protocol=val_generator, error='mrae', refit=False, verbose=True)
|
||||||
quantifier = model_selection.fit(training)
|
quantifier = model_selection.fit(training)
|
||||||
|
|
Loading…
Reference in New Issue