@inproceedings{c8d54b7589834ce78f5b1642a4ee4212,

title = "Predictive complexity and information",

abstract = "A new notion of predictive complexity and corresponding amount of information are considered. Predictive complexity is a generalization of Kolmogorov complexity which bounds the ability of any algorithm to predict elements of a sequence of outcomes. We consider predictive complexity for a wide class of bounded loss functions which are generalizations of square-loss function. Relations between unconditional KG(x) and conditional KG(x|y) predictive complexities are studied. We define an algorithm which has some {"}expanding property{"}. It transforms with positive probability sequences of given predictive complexity into sequences of essentially bigger predictive complexity. A concept of amount of predictive information IG(y : x) is studied. We show that this information is non-commutative in a very strong sense and present asymptotic relations between values IG(y : x), IG(x : y), KG(x) and KG(y).",

author = "Vyugin, {Michael V.} and V'yugin, {Vladimir V.}",

year = "2002",

language = "English",

series = "Lecture Notes in Artificial Intelligence (Subseries of Lecture Notes in Computer Science)",

publisher = "Springer Verlag",

pages = "90--105",

editor = "Jyrki Kivinen and Sloan, {Robert H.}",

booktitle = "Computational Learning Theory - 15th Annual Conference on Computational Learning Theory, COLT 2002, Proceedings",

note = "15th Annual Conference on Computational Learning Theory, COLT 2002 ; Conference date: 08-07-2002 Through 10-07-2002",

}