Entry Saraswathi:2007:CPE from talip.bib
Last update: Sun Oct 15 02:55:04 MDT 2017
Top |
Symbols |
Numbers |
Math |
A |
B |
C |
D |
E |
F |
G |
H |
I |
J |
K |
L |
M |
N |
O |
P |
Q |
R |
S |
T |
U |
V |
W |
X |
Y |
Z
BibTeX entry
@Article{Saraswathi:2007:CPE,
author = "S. Saraswathi and T. V. Geetha",
title = "Comparison of performance of enhanced morpheme-based
language model with different word-based language
models for improving the performance of {Tamil} speech
recognition system",
journal = j-TALIP,
volume = "6",
number = "3",
pages = "9:1--9:??",
month = nov,
year = "2007",
CODEN = "????",
DOI = "https://doi.org/10.1145/1290002.1290003",
ISSN = "1530-0226 (print), 1558-3430 (electronic)",
ISSN-L = "1530-0226",
bibdate = "Mon Jun 16 17:11:45 MDT 2008",
bibsource = "http://portal.acm.org/;
http://www.math.utah.edu/pub/tex/bib/talip.bib",
abstract = "This paper describes a new technique of language
modeling for a highly inflectional Dravidian language,
Tamil. It aims to alleviate the main problems
encountered in processing of Tamil language, like
enormous vocabulary growth caused by the large number
of different forms derived from one word. The size of
the vocabulary was reduced by, decomposing the words
into stems and endings and storing these sub word units
(morphemes) in the vocabulary separately. A enhanced
morpheme-based language model was designed for the
inflectional language Tamil. The enhanced
morpheme-based language model was trained on the
decomposed corpus. The perplexity and Word Error Rate
(WER) were obtained to check the efficiency of the
model for Tamil speech recognition system. The results
were compared with word-based bigram and trigram
language models, distance based language model,
dependency based language model and class based
language model. From the results it was analyzed that
the enhanced morpheme-based trigram model with Katz
back-off smoothing effect improved the performance of
the Tamil speech recognition system when compared to
the word-based language models.",
acknowledgement = ack-nhfb,
articleno = "9",
fjournal = "ACM Transactions on Asian Language Information
Processing",
journal-URL = "http://portal.acm.org/browse_dl.cfm?&idx=J820",
keywords = "language model; morphemes; perplexity; word error rate
and speech recognition",
}
Related entries
- aim,
8(1)2,
8(2)8,
9(3)10,
10(2)10,
10(4)20,
11(4)17,
12(4)16
- alleviate,
7(3)10,
8(2)9,
9(1)1
- analyzed,
8(1)2,
10(1)2
- based,
1(3)269,
2(4)301,
3(2)113,
4(3)357,
4(4)377,
5(2)121,
5(2)165,
5(3)185,
6(2)8,
6(3)11,
6(4)3,
7(2)6,
7(3)10,
7(4)12,
8(1)4,
8(3)11,
8(3)12,
8(4)17,
8(4)19,
9(1)2,
9(2)5,
9(2)6,
9(2)7,
9(3)11,
10(1)4,
10(1)6,
10(2)7,
10(3)14,
10(3)16,
11(2)6,
11(3)8,
11(3)11,
11(4)14,
11(4)15,
11(4)16,
11(4)17,
11(4)18,
12(1)1,
12(1)2,
12(1)4,
12(3)9,
12(3)10,
12(3)11,
13(1)2,
13(1)3,
13(1)4,
13(2)10,
13(3)13,
13(4)17,
13(4)18
- based, word-,
1(3)173,
1(4)297,
9(2)7,
9(3)11,
11(2)7
- bigram,
1(4)297
- class,
9(2)7,
10(1)2,
10(2)9,
10(3)14
- compared,
5(2)121,
6(3)11,
8(4)16,
9(1)1,
9(3)11,
9(3)12,
10(4)17,
11(2)6,
12(1)2,
12(4)17,
13(1)2,
13(1)4,
13(2)6,
13(3)12
- comparison,
1(2)145,
1(3)225,
2(3)193,
8(1)2,
9(3)11,
9(3)12,
11(2)6,
13(1)3
- corpus,
4(1)18,
4(4)400,
5(2)165,
6(3)11,
7(2)6,
7(3)9,
7(4)13,
8(4)17,
9(2)5,
9(2)7,
9(4)14,
10(2)7,
11(2)5,
11(3)9,
11(3)10,
11(3)11,
11(4)16,
12(1)1,
12(1)2,
12(1)3,
13(1)3
- decomposed,
12(3)10
- decomposing,
6(4)1
- dependency,
8(3)10,
9(2)7,
10(3)15,
11(2)6,
11(4)18
- derived,
5(2)165,
7(4)11,
8(1)3,
8(4)17,
9(3)11,
10(3)13,
12(1)2,
12(3)9,
13(3)12
- describe,
6(4)3,
7(2)5,
7(2)6,
7(2)7,
7(3)9,
7(4)12,
8(4)14,
9(3)10,
9(3)11,
10(1)5,
11(2)6,
12(1)3,
12(2)5,
13(1)1,
13(1)4,
13(2)9
- designed,
6(2)8,
7(4)11,
7(4)12,
10(1)3,
13(4)16
- different,
5(2)89,
6(4)3,
7(2)7,
7(3)8,
7(4)13,
8(1)2,
8(2)7,
8(2)8,
8(3)11,
8(4)16,
8(4)17,
9(1)1,
9(1)4,
9(2)5,
9(2)6,
9(3)12,
10(1)4,
10(1)5,
10(3)12,
10(4)17,
10(4)19,
11(3)8,
11(3)11,
11(4)16,
11(4)17,
11(4)18,
12(1)2,
12(3)11,
12(4)17,
13(2)6,
13(3)11,
13(4)16
- distance,
8(3)10,
10(1)3,
11(3)8,
13(1)2
- effect,
5(3)264,
8(2)9,
9(1)2,
11(4)14,
12(2)7,
12(4)14,
13(1)2
- efficiency,
5(2)89,
6(2)8,
7(3)8,
13(4)17,
13(4)18
- enhanced,
8(1)4
- error,
4(1)18,
7(1)2,
7(3)10,
9(1)2,
9(2)6,
10(1)2,
10(1)5,
10(1)6,
10(2)7,
10(2)10,
11(1)3,
11(2)7,
11(4)18,
12(1)2,
13(2)8,
13(3)14
- form,
7(1)3,
8(1)3,
8(4)18,
9(1)3,
9(3)12,
9(4)13,
10(2)8,
11(2)7,
11(4)13,
12(1)4,
12(3)11
- growth,
9(4)15
- highly,
5(2)121,
6(4)2,
7(3)9,
8(4)18,
10(2)8,
10(4)20,
11(2)4,
12(2)5,
13(1)4,
13(4)17
- improved,
7(4)12,
8(2)6,
8(2)7,
9(3)11,
10(1)3,
10(4)17,
11(3)10,
12(4)14,
12(4)17
- improving,
2(4)301,
4(3)321,
5(4)413,
7(1)2,
8(4)17,
10(1)5,
10(4)18,
12(4)15
- inflectional,
7(3)9,
9(3)11,
10(1)4,
10(2)8
- large,
6(2)8,
6(3)11,
7(3)8,
7(3)9,
7(4)11,
8(1)2,
8(3)12,
8(4)18,
9(4)14,
10(2)7,
10(4)20,
10(4)21,
11(1)2,
11(3)8,
12(2)6,
12(3)9,
13(1)3,
13(2)7,
13(2)8,
13(3)13,
13(4)17,
13(4)18
- like,
11(3)8,
12(3)11,
12(4)14
- main,
7(1)1,
7(1)2,
8(2)7,
8(3)12,
9(3)11,
11(3)8,
12(4)16,
13(1)4
- modeling,
1(1)3,
1(3)173,
3(2)87,
3(3)169,
6(1)z,
6(2)6,
7(3)10,
7(4)13,
8(1)2,
8(1)4,
9(4)14,
10(4)18,
10(4)21,
11(2)5,
12(2)5,
13(3)12,
13(4)16
- morpheme-based,
1(1)65
- morphemes,
9(1)3
- new,
5(1)74,
5(2)121,
6(2)8,
7(2)7,
7(4)11,
8(2)6,
9(1)4,
9(2)7,
10(1)4,
10(3)15,
10(4)17,
10(4)20,
11(2)6,
12(1)1,
12(1)3,
12(3)8,
12(3)9,
12(4)14,
13(1)1,
13(1)2,
13(2)6
- number,
6(2)8,
6(3)11,
7(1)3,
7(3)9,
7(4)11,
8(1)3,
8(4)18,
9(3)12,
10(1)5,
10(2)8,
10(4)21,
11(1)2,
11(3)8,
11(3)9,
12(1)2,
12(2)7,
12(3)9,
13(2)7,
13(3)12,
13(3)13
- obtained,
7(2)7,
8(2)9,
8(3)11,
8(4)17,
9(3)11,
11(4)16,
11(4)17,
12(3)10,
12(4)17,
13(1)4
- one,
5(2)89,
5(2)121,
6(2)6,
6(4)3,
7(3)8,
7(3)9,
7(4)11,
7(4)13,
8(2)9,
8(4)16,
8(4)17,
9(1)1,
9(2)5,
9(2)7,
9(3)12,
9(4)14,
10(1)5,
10(3)12,
10(3)13,
10(4)19,
11(2)4,
11(2)6,
11(2)7,
11(4)14,
12(1)1,
12(1)2,
12(2)5,
12(2)7,
12(3)11,
12(4)16,
13(1)4,
13(2)10,
13(4)17,
13(4)18
- paper,
5(2)89,
5(3)183,
6(3)10,
6(3)11
- performance,
5(2)121,
5(2)165,
6(2)8,
6(4)1,
6(4)3,
7(1)1,
7(1)2,
7(2)5,
7(2)6,
7(2)7,
7(3)9,
7(3)10,
7(4)13,
8(1)2,
8(1)3,
8(2)7,
8(2)8,
8(2)9,
8(3)10,
8(4)16,
8(4)17,
8(4)18,
9(1)2,
9(1)4,
9(2)5,
9(2)6,
9(3)11,
9(3)12,
9(4)14,
10(2)8,
10(3)13,
10(3)14,
11(2)7,
11(3)10,
11(3)11,
11(4)14,
11(4)15,
11(4)17,
12(1)2,
12(3)9,
12(3)11,
12(4)14,
12(4)15,
12(4)16,
13(1)3,
13(1)4,
13(2)7,
13(2)9,
13(4)16,
13(4)17
- perplexity,
9(2)7,
12(4)14
- problem,
6(2)7,
6(3)11,
6(4)1,
7(1)2,
7(2)7,
7(3)10,
8(1)2,
8(2)9,
8(3)10,
8(4)19,
9(1)1,
9(1)3,
9(2)5,
9(4)13,
10(1)2,
10(1)4,
10(3)14,
10(3)16,
10(4)21,
11(3)8,
11(3)11,
11(4)17,
11(4)18,
12(1)2,
12(1)3,
12(2)7,
12(3)10,
12(3)12,
12(4)16,
13(2)8,
13(3)14,
13(4)17
- processing,
3(1)1,
3(4)213,
4(4)375,
5(2)121,
5(4)291,
6(2)7,
6(3)10,
6(4)3,
7(1)1,
7(1)3,
7(2)7,
7(4)13,
8(1)2,
8(2)9,
8(4)13,
8(4)14,
8(4)16,
8(4)18,
8(4)19,
9(2)5,
9(2)6,
9(3)11,
9(4)15,
10(3)11,
10(3)14,
10(4)20,
11(1)2,
11(3)9,
11(4)15,
12(1)3,
12(3)10,
13(1)1,
13(2)7
- rate,
7(1)2,
7(3)10,
10(1)2,
10(1)6,
10(2)7,
12(1)1,
12(1)4,
13(2)8
- recognition,
1(1)83,
1(4)297,
2(1)27,
2(3)290,
5(1)4,
5(2)165,
6(2)6,
6(4)3,
7(1)2,
7(3)10,
8(1)2,
8(3)11,
8(4)18,
9(1)2,
9(2)7,
10(1)6,
10(2)7,
10(2)9,
10(3)13,
11(1)1,
11(1)2,
11(4)13,
11(4)16,
11(4)17,
11(4)18,
12(1)4,
12(3)10,
13(3)12,
13(4)16
- reduced,
12(3)9
- result,
4(2)135,
5(2)121,
5(2)146,
5(2)165,
6(2)6,
6(2)7,
6(3)11,
6(4)3,
7(1)2,
7(2)5,
7(2)6,
7(2)7,
7(3)8,
7(3)10,
7(4)11,
7(4)12,
7(4)13,
8(1)2,
8(1)3,
8(1)4,
8(2)6,
8(2)9,
8(3)10,
8(3)12,
8(4)14,
8(4)15,
8(4)16,
8(4)17,
8(4)18,
8(4)19,
9(1)1,
9(1)2,
9(2)5,
9(2)6,
9(2)7,
9(3)11,
9(3)12,
9(4)14,
10(1)2,
10(2)7,
11(2)4,
11(2)5,
11(3)8,
11(3)9,
11(3)11,
11(4)13,
11(4)14,
11(4)15,
12(1)3,
12(1)4,
12(2)5,
12(2)7,
12(3)9,
12(3)10,
12(3)11,
12(4)14,
12(4)16,
13(1)1,
13(1)4,
13(2)6,
13(2)7,
13(2)9,
13(3)11,
13(3)12,
13(3)14
- size,
5(3)245,
9(2)7,
9(3)12
- smoothing,
6(2)7
- speech,
1(1)83,
4(1)38,
7(1)2,
7(3)10,
8(1)2,
8(1)4,
8(4)14,
8(4)18,
9(1)2,
9(2)7,
10(1)6,
10(2)7,
11(1)2,
11(3)10,
13(4)16
- stem,
6(4)2,
8(4)16,
13(3)14
- storing,
11(3)11
- Tamil,
11(1)1,
12(1)4
- technique,
2(1)63,
3(4)243,
5(2)89,
6(1)z,
6(2)7,
6(2)8,
6(4)2,
6(4)3,
7(2)5,
7(2)7,
7(3)8,
7(4)12,
8(4)18,
9(1)3,
9(2)5,
10(1)4,
10(2)9,
10(2)10,
10(4)21,
11(1)1,
12(2)6,
12(3)9,
12(3)11,
12(4)14,
13(2)8,
13(3)12,
13(3)13,
13(4)18
- trained,
7(4)12,
8(2)6,
8(4)15,
12(3)9,
13(1)3
- unit,
9(3)12,
10(1)6,
11(2)5,
11(2)7,
11(4)16,
11(4)18,
13(2)9
- vocabulary,
8(1)2,
9(3)12,
11(2)7,
12(4)14
- was,
5(2)146,
6(3)11,
8(4)18,
8(4)19,
9(2)7,
9(3)10,
10(4)18,
11(1)2,
11(3)10,
12(4)14,
13(1)4,
13(4)16
- were,
9(2)7,
10(1)2,
10(2)10,
11(3)10,
11(4)13,
12(4)17,
13(2)6,
13(4)16
- when,
5(2)89,
6(3)11,
7(2)6,
7(4)11,
7(4)13,
8(1)3,
8(4)16,
8(4)17,
9(1)3,
9(2)5,
9(3)11,
9(3)12,
9(4)13,
11(2)4,
12(1)2,
12(4)14,
13(2)8,
13(3)12
- word-based,
1(3)173,
1(4)297,
9(2)7,
9(3)11,
11(2)7