@article{yang_shen_lim_2021, title={Revisit the Scalability of Deep Auto-Regressive Models for Graph Generation}, ISSN={["2161-4393"]}, DOI={10.1109/IJCNN52387.2021.9534206}, abstractNote={As a new promising approach to graph generations, deep auto-regressive graph generation has drawn increasing attention. It however has been commonly deemed as hard to scale up to work with large graphs. In existing studies, it is perceived that the consideration of the full non-local graph dependences is indispensable for this approach to work, which entails the needs for keeping the entire graph's info in memory and hence the perceived “inherent” scalability limitation of the approach. This paper revisits the common perception. It proposes three ways to relax the dependences and conducts a series of empirical measurements. It concludes that the perceived “inherent” scalability limitation is a misperception; with the right design and implementation, deep auto-regressive graph generation can be applied to graphs much larger than the device memory. The rectified perception removes a fundamental barrier for this approach to meet practical needs.}, journal={2021 INTERNATIONAL JOINT CONFERENCE ON NEURAL NETWORKS (IJCNN)}, author={Yang, Shuai and Shen, Xipeng and Lim, Seung-Hwan}, year={2021} }