Skip to content

Commit

Permalink
passing Cleynen et al statut to 'published'
Browse files Browse the repository at this point in the history
  • Loading branch information
jchiquet committed Dec 14, 2023
1 parent 7141a21 commit 7369519
Show file tree
Hide file tree
Showing 2 changed files with 16 additions and 14 deletions.
14 changes: 0 additions & 14 deletions _bibliography/in_production.bib
Original file line number Diff line number Diff line change
@@ -1,14 +0,0 @@
@article{cleynen_local,
bibtex_show = {true},
author = {Cleynen, Alice and Raynal, Louis and Marin, Jean-Michel},
title = {{Local tree methods for classification: a review and some dead ends}},
journal = {Computo},
year = 2023,
abstract = {Random Forests (RF) [@breiman:2001] are very popular machine learning methods. They perform well even with little or no tuning, and have some theoretical guarantees, especially for sparse problems [@biau:2012;@scornet:etal:2015]. These learning strategies have been used in several contexts, also outside the field of classification and regression. To perform Bayesian model selection in the case of intractable likelihoods, the ABC Random Forests (ABC-RF) strategy of @pudlo:etal:2016 consists in applying Random Forests on training sets composed of simulations coming from the Bayesian generative models. The ABC-RF technique is based on an underlying RF for which the training and prediction phases are separated. The training phase does not take into account the data to be predicted. This seems to be suboptimal as in the ABC framework only one observation is of interest for the prediction. In this paper, we study tree-based methods that are built to predict a specific instance in a classification setting. This type of methods falls within the scope of local (lazy/instance-based/case specific) classification learning. We review some existing strategies and propose two new ones. The first consists in modifying the tree splitting rule by using kernels, the second in using a first RF to compute some local variable importance that is used to train a second, more local, RF. Unfortunately, these approaches, although interesting, do not provide conclusive results.},
repository = {published-202312-cleynen-local},
type = {{Research article}},
language = {R},
domain = {Statistics},
keywords = {},
issn = {2824-7795},
}
16 changes: 16 additions & 0 deletions _bibliography/published.bib
Original file line number Diff line number Diff line change
@@ -1,3 +1,19 @@
@article{cleynen_local,
bibtex_show = {true},
author = {Cleynen, Alice and Raynal, Louis and Marin, Jean-Michel},
title = {{Local tree methods for classification: a review and some dead ends}},
journal = {Computo},
year = 2023,
abstract = {Random Forests (RF) [@breiman:2001] are very popular machine learning methods. They perform well even with little or no tuning, and have some theoretical guarantees, especially for sparse problems [@biau:2012;@scornet:etal:2015]. These learning strategies have been used in several contexts, also outside the field of classification and regression. To perform Bayesian model selection in the case of intractable likelihoods, the ABC Random Forests (ABC-RF) strategy of @pudlo:etal:2016 consists in applying Random Forests on training sets composed of simulations coming from the Bayesian generative models. The ABC-RF technique is based on an underlying RF for which the training and prediction phases are separated. The training phase does not take into account the data to be predicted. This seems to be suboptimal as in the ABC framework only one observation is of interest for the prediction. In this paper, we study tree-based methods that are built to predict a specific instance in a classification setting. This type of methods falls within the scope of local (lazy/instance-based/case specific) classification learning. We review some existing strategies and propose two new ones. The first consists in modifying the tree splitting rule by using kernels, the second in using a first RF to compute some local variable importance that is used to train a second, more local, RF. Unfortunately, these approaches, although interesting, do not provide conclusive results.},
doi = {10.57750/3j8m-8d57},
repository = {published-202312-cleynen-local},
type = {{Research article}},
language = {R},
domain = {Statistics},
keywords = {classification, Random Forests, local methods},
issn = {2824-7795},
}

@article{delattre_fim,
bibtex_show = {true},
author = {Delattre, Maud and Kuhn, Estelle},
Expand Down

0 comments on commit 7369519

Please sign in to comment.