@inproceedings{jacob_snyder_2015, title={Learning rule for associative memory in recurrent neural networks}, DOI={10.1109/ijcnn.2015.7280532}, abstractNote={We present a new learning rule for intralayer connections in neural networks. The rule is based on Hebbian learning principles and is derived from information theoretic considerations. A simple network trained using the rule is shown to have associative memory like properties. The network acts by building connections between correlated data points, under constraints.}, booktitle={2015 international joint conference on neural networks (ijcnn)}, author={Jacob, T. and Snyder, W.}, year={2015} } @article{baron_jacob_2012, title={Variable Length Compression of Codeword Indices for Lossy Compression}, volume={19}, ISSN={["1070-9908"]}, DOI={10.1109/lsp.2012.2223462}, abstractNote={Many problems in information theory feature an index into a random codebook being encoded with a fixed length scheme. We propose to purposefully select the index in a manner that skews its distribution, thus making variable length entropy coding of the index more attractive. In an application to lossy compression of a Bernoulli source, we illustrate that variable length coding yields a reduction in the rate over fixed length coding, and allows to reach a requisite rate distortion performance level using a smaller codebook.}, number={12}, journal={IEEE SIGNAL PROCESSING LETTERS}, author={Baron, Dror and Jacob, Theju}, year={2012}, month={Dec}, pages={849–852} }