@article{Lerke_Heßling_2022, title={On Strange Memory Effects in Long–term Forecasts using Regularised Recurrent Neural Networks}, volume={21}, url={https://computingonline.net/computing/article/view/2513}, DOI={10.47839/ijc.21.1.2513}, abstractNote={<p>Recurrent neural networks (RNN) based on a long short-term memory (LSTM) are used for predicting the future out of a given set of time series data. Usually, only one future time step is predicted. In this article, the capability of LSTM networks for a wide look into the future is explored. The time series data are taken from the evolution of share prices from stock trading. As expected, the longer the view into the future the stronger the deviations between prediction and reality. However, strange memory effects are observed. They range from periodic predictions (with time periods of the order of one month) to predictions that are an exact copy of a long-term sequence from far previous data. The trigger mechanisms for recalling memory in LSTM networks seem to be rather independent of the behaviour of the time-series data within the last “sliding window" or “batch". Similar periodic predictions are also observed for GRU networks and if the trainable parameters are reduced drastically. A better understanding of the influence of regularisations details of RNNs may be helpful for improving their predictive power.</p>}, number={1}, journal={International Journal of Computing}, author={Lerke, Arthur and Heßling, Hermann}, year={2022}, month={Mar.}, pages={19-24} }