2016
Wicker, Jörg; Tyukin, Andrey; Kramer, Stefan
A Nonlinear Label Compression and Transformation Method for Multi-Label Classification using Autoencoders Proceedings Article
In: Bailey, James; Khan, Latifur; Washio, Takashi; Dobbie, Gill; Huang, Zhexue Joshua; Wang, Ruili (Ed.): The 20th Pacific Asia Conference on Knowledge Discovery and Data Mining (PAKDD), pp. 328-340, Springer International Publishing, Switzerland, 2016, ISBN: 978-3-319-31753-3.
Abstract | Links | BibTeX | Altmetric | PlumX | Tags: autoencoders, label compression, machine learning, multi-label classification
@inproceedings{wicker2016nonlinear,
title = {A Nonlinear Label Compression and Transformation Method for Multi-Label Classification using Autoencoders},
author = {J\"{o}rg Wicker and Andrey Tyukin and Stefan Kramer},
editor = {James Bailey and Latifur Khan and Takashi Washio and Gill Dobbie and Zhexue Joshua Huang and Ruili Wang},
url = {http://dx.doi.org/10.1007/978-3-319-31753-3_27},
doi = {10.1007/978-3-319-31753-3_27},
isbn = {978-3-319-31753-3},
year = {2016},
date = {2016-04-16},
booktitle = {The 20th Pacific Asia Conference on Knowledge Discovery and Data Mining (PAKDD)},
volume = {9651},
pages = {328-340},
publisher = {Springer International Publishing},
address = {Switzerland},
series = {Lecture Notes in Computer Science},
abstract = {Multi-label classification targets the prediction of multiple interdependent and non-exclusive binary target variables. Transformation-based algorithms transform the data set such that regular single-label algorithms can be applied to the problem. A special type of transformation-based classifiers are label compression methods, that compress the labels and then mostly use single label classifiers to predict the compressed labels. So far, there are no compression-based algorithms follow a problem transformation approach and address non-linear dependencies in the labels. In this paper, we propose a new algorithm, called Maniac (Multi-lAbel classificatioN usIng AutoenCoders), which extracts the non-linear dependencies by compressing the labels using autoencoders. We adapt the training process of autoencoders in a way to make them more suitable for a parameter optimization in the context of this algorithm. The method is evaluated on eight standard multi-label data sets. Experiments show that despite not producing a good ranking, Maniac generates a particularly good bipartition of the labels into positives and negatives. This is caused by rather strong predictions with either really high or low probability. Additionally, the algorithm seems to perform better given more labels and a higher label cardinality in the data set.},
keywords = {autoencoders, label compression, machine learning, multi-label classification},
pubstate = {published},
tppubtype = {inproceedings}
}
2015
Tyukin, Andrey; Kramer, Stefan; Wicker, Jörg
Scavenger – A Framework for the Efficient Evaluation of Dynamic and Modular Algorithms Proceedings Article
In: Bifet, Albert; May, Michael; Zadrozny, Bianca; Gavalda, Ricard; Pedreschi, Dino; Cardoso, Jaime; Spiliopoulou, Myra (Ed.): Machine Learning and Knowledge Discovery in Databases, pp. 325-328, Springer International Publishing, 2015, ISBN: 978-3-319-23460-1.
Abstract | Links | BibTeX | Altmetric | PlumX | Tags: autoencoders, distributed processing, framework, large-scale, Scavenger
@inproceedings{tyukin2015scavenger,
title = {Scavenger - A Framework for the Efficient Evaluation of Dynamic and Modular Algorithms},
author = {Andrey Tyukin and Stefan Kramer and J\"{o}rg Wicker},
editor = {Albert Bifet and Michael May and Bianca Zadrozny and Ricard Gavalda and Dino Pedreschi and Jaime Cardoso and Myra Spiliopoulou},
url = {http://dx.doi.org/10.1007/978-3-319-23461-8_40},
doi = {10.1007/978-3-319-23461-8_40},
isbn = {978-3-319-23460-1},
year = {2015},
date = {2015-01-01},
booktitle = {Machine Learning and Knowledge Discovery in Databases},
volume = {9286},
pages = {325-328},
publisher = {Springer International Publishing},
series = {Lecture Notes in Computer Science},
abstract = {Machine Learning methods and algorithms are often highly modular in the sense that they rely on a large number of subalgorithms that are in principle interchangeable. For example, it is often possible to use various kinds of pre- and post-processing and various base classifiers or regressors as components of the same modular approach. We propose a framework, called Scavenger, that allows evaluating whole families of conceptually similar algorithms efficiently. The algorithms are represented as compositions, couplings and products of atomic subalgorithms. This allows partial results to be cached and shared between different instances of a modular algorithm, so that potentially expensive partial results need not be recomputed multiple times. Furthermore, our framework deals with issues of the parallel execution, load balancing, and with the backup of partial results for the case of implementation or runtime errors. Scavenger is licensed under the GPLv3 and can be downloaded freely at https://github.com/jorro/scavenger.},
keywords = {autoencoders, distributed processing, framework, large-scale, Scavenger},
pubstate = {published},
tppubtype = {inproceedings}
}