%Aigaion2 BibTeX export from Knowledge Engineering Publications
%Saturday 17 April 2021 10:50:38 AM

@INPROCEEDINGS{nam15wsabieH,
     author = {Nam, Jinseok and Loza Menc{\'{\i}}a, Eneldo and Kim, Hyunwoo J. and F{\"{u}}rnkranz, Johannes},
      title = {Predicting Unseen Labels using Label Hierarchies in Large-Scale Multi-label Learning},
  booktitle = {Proceedings of the European Conference on Machine Learning and Principles and Practice of Knowledge Discovery in Databases},
       year = {2015},
      pages = {102-118},
  publisher = {Springer International Publishing},
   location = {Porto, Portugal},
       issn = {0302-9743},
       isbn = {978-3-319-23527-1},
        url = {http://www.ke.tu-darmstadt.de/publications/papers/ECML2015Nam.pdf},
        doi = {10.1007/978-3-319-23528-8_7},
   abstract = {An important problem in multi-label classification is to capture label patterns or underlying structures that have an impact on such patterns. One way of learning underlying structures over labels is to project both instances and labels into the same space where an instance and its relevant labels tend to have similar representations. In this paper, we present a novel method to learn a joint space of instances and labels by leveraging a hierarchy of labels. We also present an efficient method for pretraining vector representations of labels, namely label embeddings, from large amounts of label co-occurrence patterns and hierarchical structures of labels. This approach also allows us to make predictions on labels that have not been seen during training. We empirically show that the use of pretrained label embeddings allows us to obtain higher accuracies on unseen labels even when the number of labels are quite large. Our experimental results also demonstrate qualitatively that the proposed method is able to learn regularities among labels by exploiting a label hierarchy as well as label co-occurrences.}
}