Vidal's library
Title: Discrete Sequence Prediction and Its Applications
Author: Philip Laird and Ronald Saul
Journal: Machine Learning
Volume: 15
Number: 1
Pages: 43--68
Publisher: Kluwer Academic Publishers
Year: 1994
DOI: 10.1023/A:1022661103485
Abstract: Learning from experience to predict sequences of discrete symbols is a fundamental problem in machine learning with many applications. We present a simple and practical algorithm (TDAG) for discrete sequence prediction. Based on a text-compression method, the TDAG algorithm limits the growth of storage by retaining the most likely prediction contexts and discarding (forgetting) less likely ones. The storage/speed tradeoffs are parameterized so that the algorithm can be used in a variety of applications. Our experiments verify its performance on data compression tasks and show how it applies to two problems: dynamically optimizing Prolog programs for good average-case behavior and maintaining a cache for a database on mass storage.

Cited by 40  -  Google Scholar

@Article{laird94a,
  author =	 {Philip Laird and Ronald Saul},
  title =	 {Discrete Sequence Prediction and Its Applications},
  googleid =	 {Zm8fkJzXLaoJ:scholar.google.com/},
  journal =	 {Machine Learning},
  volume =	 15,
  number =	 1,
  year =	 1994,
  issn =	 {0885-6125},
  pages =	 {43--68},
  abstract =	 {Learning from experience to predict sequences of
                  discrete symbols is a fundamental problem in machine
                  learning with many applications. We present a simple
                  and practical algorithm (TDAG) for discrete sequence
                  prediction. Based on a text-compression method, the
                  TDAG algorithm limits the growth of storage by
                  retaining the most likely prediction contexts and
                  discarding (forgetting) less likely ones. The
                  storage/speed tradeoffs are parameterized so that
                  the algorithm can be used in a variety of
                  applications. Our experiments verify its performance
                  on data compression tasks and show how it applies to
                  two problems: dynamically optimizing Prolog programs
                  for good average-case behavior and maintaining a
                  cache for a database on mass storage.},
  keywords =     {learning ai},
  doi =		 {10.1023/A:1022661103485},
  publisher =	 {Kluwer Academic Publishers},
  url =		 {http://jmvidal.cse.sc.edu/library/laird94a.pdf},
  cluster = 	 {12262694427832577894}
}
Last modified: Wed Mar 9 10:13:56 EST 2011