@inproceedings{906cff4b9fb049aead665fd081699608,
title = "Learning nonadjacent dependencies in thought, language, and action: Not so hard after all…",
abstract = "Learning to represent hierarchical structure and its nonadjacent dependencies (NDs) is thought to be difficult. I present three simulations of ND learning using a simple recurrent network (SRN). In Simulation 1, I show that the model can learn distance-invariant representations of nonadjacent dependencies. In Simulation 2, I show that purely localist SRNs can learn abstract rule-like relationships. In Simulation 3, I show that SRNs exhibit facilitated learning when there are correlated perceptual and semantic cues to the structure (just as people do). Together, these simulations show that (contrary to previous claims) SRNs are capable of learning abstract and rule-like nonadjacent dependencies, and show critical perceptual- and semantics-syntax interactions during learning. The studies refute the claim that neural networks and other associative models are fundamentally incapable of representing hierarchical structure, and show how recurrent networks can provide insight about principles underlying human learning and the representation of hierarchical structure.",
keywords = "hierarchical structure, nonadjacent dependencies, recurrent connectionist networks",
author = "Willits, {Jon A.}",
note = "Publisher Copyright: {\textcopyright} CogSci 2013.All rights reserved.; 35th Annual Meeting of the Cognitive Science Society - Cooperative Minds: Social Interaction and Group Dynamics, CogSci 2013 ; Conference date: 31-07-2013 Through 03-08-2013",
year = "2013",
language = "English (US)",
series = "Cooperative Minds: Social Interaction and Group Dynamics - Proceedings of the 35th Annual Meeting of the Cognitive Science Society, CogSci 2013",
publisher = "The Cognitive Science Society",
pages = "1605--1610",
editor = "Markus Knauff and Natalie Sebanz and Michael Pauen and Ipke Wachsmuth",
booktitle = "Cooperative Minds",
}