Time-dependent genre recognition by means of instantaneous frequency spectrum based on Hilbert-Huang transform. Endrjukaite, T. & Kosugi, N. In Proceedings of the 14th International Conference on Information Integration and Web-based Applications & Services - IIWAS '12, pages 64-71, 2012. ACM Press.
abstract   bibtex   
This paper describes a new method of music genre recognition. Even for people it is difficult to define musical genres, because a genre is something more than a set of rules. Automation of this task could improve the work of multiple audio-related WEB portals, such as audio-libraries, and could simplify human activity in other music-related areas. For music genre recognition, we introduce the instantaneous frequency spectrum (IFS) whose calculation is based on the Hilbert-Huang transform. In our method, IFSs of audio signals are generated from their instantaneous frequencies and used to calculate music genre templates. The experimental results for three test music pieces show that the method can accurately detect and differentiate genres of tunes. Slicing test music into frames and recognizing genres for short fragments of a whole music piece gives a precise description of a piece’s internal structure, which could help to enhance people’s understanding of the music. Presentation of this information also is an advance in music visualization.
@inProceedings{
 id = {da6c1420-20b7-325a-ab7d-9fbf0b5d8f11},
 title = {Time-dependent genre recognition by means of instantaneous frequency spectrum based on Hilbert-Huang transform},
 type = {inProceedings},
 year = {2012},
 keywords = {Hilbert transform (HT),Music genre recognition,empirical mode decomposition (EMD),instantaneous frequency spectrum (IFS)},
 created = {2014-12-11T00:06:05.000Z},
 pages = {64-71},
 websites = {http://dl.acm.org/citation.cfm?id=2428750,http://dl.acm.org/citation.cfm?doid=2428736.2428750},
 publisher = {ACM Press},
 city = {New York, New York, USA},
 accessed = {2014-12-10},
 file_attached = {false},
 profile_id = {1a39a66b-a9cd-3b44-8498-85d3ad34e830},
 group_id = {424eb3e1-9b50-35ed-91d3-65c78c3164d7},
 last_modified = {2014-12-11T05:27:35.000Z},
 read = {false},
 starred = {false},
 authored = {false},
 confirmed = {true},
 hidden = {false},
 citation_key = {Endrjukaite2012},
 abstract = {This paper describes a new method of music genre recognition. Even for people it is difficult to define musical genres, because a genre is something more than a set of rules. Automation of this task could improve the work of multiple audio-related WEB portals, such as audio-libraries, and could simplify human activity in other music-related areas. For music genre recognition, we introduce the instantaneous frequency spectrum (IFS) whose calculation is based on the Hilbert-Huang transform. In our method, IFSs of audio signals are generated from their instantaneous frequencies and used to calculate music genre templates. The experimental results for three test music pieces show that the method can accurately detect and differentiate genres of tunes. Slicing test music into frames and recognizing genres for short fragments of a whole music piece gives a precise description of a piece’s internal structure, which could help to enhance people’s understanding of the music. Presentation of this information also is an advance in music visualization.},
 bibtype = {inProceedings},
 author = {Endrjukaite, Tatiana and Kosugi, Naoko},
 booktitle = {Proceedings of the 14th International Conference on Information Integration and Web-based Applications & Services - IIWAS '12}
}
Downloads: 0