@article{meyer_2018, title={Rank My Update, Please}, volume={125}, ISSN={["1930-0972"]}, DOI={10.1080/00029890.2017.1389199}, abstractNote={Abstract Updating a given a matrix Am × n by a rank-one matrix B = cdT, where c and d are appropriately sized column vectors, is a common practice throughout all applied areas of mathematics, science, and engineering. Because rank is often tied to the number of degrees of freedom or the level of independence in underlying models or data, it can be imperative to know exactly how the update term affects rank. While it is well known that a rank-one update can only increase or decrease rank by at most one, there is not a widely known formula for exactly how this occurs. This note presents an expression in simply stated terms for the exact rank of a rank-one updated matrix.}, number={1}, journal={AMERICAN MATHEMATICAL MONTHLY}, author={Meyer, Carl D.}, year={2018}, pages={61–64} } @article{meyer_2015, title={Continuity of the Perron root}, volume={63}, ISSN={["1563-5139"]}, DOI={10.1080/03081087.2014.934233}, abstractNote={That the Perron root of a square nonnegative matrix varies continuously with the entries in is a corollary of theorems regarding continuity of eigenvalues or roots of polynomial equations, the proofs of which necessarily involve complex numbers. But since continuity of the Perron root is a question that is entirely in the field of real numbers, it seems reasonable that there should exist a development involving only real analysis. This article presents a simple and completely self-contained development that depends only on real numbers and first principles.}, number={7}, journal={LINEAR & MULTILINEAR ALGEBRA}, author={Meyer, Carl D.}, year={2015}, month={Jul}, pages={1332–1336} } @article{meyer_wessell_2012, title={STOCHASTIC DATA CLUSTERING}, volume={33}, ISSN={["1095-7162"]}, DOI={10.1137/100804395}, abstractNote={In 1961 Herbert Simon and Albert Ando [Econometrika, 29 (1961), pp. 111--138] published the theory behind the long-term behavior of a dynamical system that can be described by a nearly uncoupled matrix. Over the past fifty years this theory has been used in a variety of contexts, including queueing theory, brain organization, and ecology. In all of these applications, the structure of the system is known and the point of interest is the various stages the system passes through on its way to some long-term equilibrium. This paper looks at this problem from the other direction. That is, we develop a technique for using the evolution of the system to tell us about its initial structure, and then use this technique to develop an algorithm that takes the varied solutions from multiple data clustering algorithms to arrive at a single data clustering solution.}, number={4}, journal={SIAM JOURNAL ON MATRIX ANALYSIS AND APPLICATIONS}, author={Meyer, Carl D. and Wessell, Charles D.}, year={2012}, pages={1214–1236} } @article{langville_meyer_2012, title={Who's #1?}, volume={307}, ISSN={["0036-8733"]}, DOI={10.1038/scientificamerican0712-21}, number={1}, journal={SCIENTIFIC AMERICAN}, author={Langville, Amy N. and Meyer, Carl D.}, year={2012}, month={Jul}, pages={21–21} } @article{langville_meyer_2006, title={A reordering for the PageRank problem}, volume={27}, ISSN={["1095-7197"]}, DOI={10.1137/040607551}, abstractNote={We describe a reordering particularly suited to the PageRank problem, which reduces the computation of the PageRank vector to that of solving a much smaller system and then using forward substitution to get the full solution vector. We compare the theoretical rates of convergence of the original PageRank algorithm to that of the new reordered PageRank algorithm, showing that the new algorithm can do no worse than the original algorithm. We present results of an experimental comparison on five datasets, which demonstrate that the reordered PageRank algorithm can provide a speedup of as much as a factor of 6. We also note potential additional benefits that result from the proposed reordering.}, number={6}, journal={SIAM JOURNAL ON SCIENTIFIC COMPUTING}, author={Langville, AN and Meyer, CD}, year={2006}, pages={2112–2120} } @book{langville_meyer_2006, title={Google's PageRank and beyond: The science of search engine rankings}, ISBN={0691122024}, DOI={10.1515/9781400830329}, abstractNote={Why is Google so good at what it does? There ate a variety of reasons, but the fundamental thing that distinguishes Google and has put them so far ahead of other search engines is their patented PageRank concept. PageRank has revolutionized Web search to the extent that it has been charged in Federal Court with driving the direction of commerce on the Internet. Many mathematicians are therefore surprised when they learn that a technology of such consequence is predicated on the same mathematics that is available to undergraduate students. This talk will survey some of these concepts.}, publisher={Princeton, NJ: Princeton University Press}, author={Langville, A. N. and Meyer, C. D.}, year={2006} } @article{langville_meyer_2006, title={Updating Markov chains with an eye on Google's PageRank}, volume={27}, ISSN={["1095-7162"]}, DOI={10.1137/040619028}, abstractNote={An iterative algorithm based on aggregation/disaggregation principles is presented for updating the stationary distribution of a finite homogeneous irreducible Markov chain. The focus is on large-scale problems of the kind that are characterized by Google's PageRank application, but the algorithm is shown to work well in general contexts. The algorithm is flexible in that it allows for changes to the transition probabilities as well as for the creation or deletion of states. In addition to establishing the rate of convergence, it is proven that the algorithm is globally convergent. Results of numerical experiments are presented.}, number={4}, journal={SIAM JOURNAL ON MATRIX ANALYSIS AND APPLICATIONS}, author={Langville, AN and Meyer, CD}, year={2006}, pages={968–987} } @article{langville_meyer_2005, title={A survey of eigenvector methods for Web information retrieval}, volume={47}, ISSN={["1095-7200"]}, DOI={10.1137/S0036144503424786}, abstractNote={Web information retrieval is significantly more challenging than traditional well-controlled, small document collection information retrieval. One main difference between traditional information retrieval and Web information retrieval is the Web's hyperlink structure. This structure has been exploited by several of today's leading Web search engines, particularly Google and Teoma. In this survey paper, we focus on Web information retrieval methods that use eigenvector computations, presenting the three popular methods of HITS, PageRank, and SALSA.}, number={1}, journal={SIAM REVIEW}, author={Langville, AN and Meyer, CD}, year={2005}, month={Mar}, pages={135–161} } @article{langville_meyer_2004, title={Deeper Inside PageRank}, volume={1}, ISSN={1542-7951}, url={http://dx.doi.org/10.1080/15427951.2004.10129091}, DOI={10.1080/15427951.2004.10129091}, abstractNote={This paper serves as a companion or extension to the "Inside PageRank" paper by Bianchini et al. [Bianchini et al. 03]. It is a comprehensive survey of all issues associated with PageRank, covering the basic PageRank model, available and recommended solution methods, storage issues, existence, uniqueness, and convergence properties, possible alterations to the basic model, suggested alternatives to the traditional solution methods, sensitivity and conditioning, and finally the updating problem. We introduce a few new results, provide an extensive reference list, and speculate about exciting areas of future research.}, number={3}, journal={Internet Mathematics}, publisher={Internet Mathematics}, author={Langville, Amy and Meyer, Carl}, year={2004}, month={Jan}, pages={335–380} } @article{meyer_2004, title={Injective properties of complex matrices}, volume={111}, DOI={10.2307/4145059}, number={8}, journal={American Mathematical Monthly}, author={Meyer, C.}, year={2004}, pages={728} } @article{chandler_meyer_rose_2003, title={Eudoxus meets Cayley}, volume={110}, ISSN={["0002-9890"]}, DOI={10.2307/3647962}, number={10}, journal={AMERICAN MATHEMATICAL MONTHLY}, author={Chandler, RE and Meyer, CD and Rose, NJ}, year={2003}, month={Dec}, pages={912–927} } @article{cho_meyer_2001, title={Comparison of perturbation bounds for the stationary distribution of a Markov chain}, volume={335}, ISSN={["0024-3795"]}, DOI={10.1016/S0024-3795(01)00320-2}, abstractNote={The purpose of this paper is to review and compare the existing perturbation bounds for the stationary distribution of a finite, irreducible, homogeneous Markov chain.}, journal={LINEAR ALGEBRA AND ITS APPLICATIONS}, author={Cho, GE and Meyer, CD}, year={2001}, month={Sep}, pages={137–150} } @article{cho_meyer_2000, title={Markov chain sensitivity measured by mean first passage times}, volume={316}, ISSN={["1873-1856"]}, DOI={10.1016/S0024-3795(99)00263-3}, abstractNote={The purpose of this article is to present results concerning the sensitivity of the stationary probabilities for an n-state, time-homogeneous, irreducible Markov chain in terms of the mean first passage times in the chain.}, number={1-3}, journal={LINEAR ALGEBRA AND ITS APPLICATIONS}, author={Cho, GE and Meyer, CD}, year={2000}, month={Sep}, pages={21–28} } @book{meyer_2000, title={Matrix analysis and applied linear algebra}, ISBN={0898714540}, publisher={Philadelphia: Society for Industrial and Applied Mathematics}, author={Meyer, C. D.}, year={2000} } @article{hartfiel_meyer_1998, title={On the structure of stochastic matrices with a subdominant eigenvalue near 1}, volume={272}, ISSN={["0024-3795"]}, DOI={10.1016/s0024-3795(97)00333-9}, abstractNote={An n × n irreducible stochastic matrix P can possess a subdominant eigenvalue, say λ2(P), near λ = 1. In this article we clarify the relationship between the nearness of these eigenvalues and the near-uncoupling (some authors say "nearly completely decomposable") of P. We prove that for fixed n, if λ2(P) is sufficiently close to λ = 1, then P is nearly uncoupled. We then provide examples which show that λ2(P) must, in general, be remarkably close to 1 before such uncoupling occurs.}, journal={LINEAR ALGEBRA AND ITS APPLICATIONS}, author={Hartfiel, DJ and Meyer, CD}, year={1998}, month={Mar}, pages={193–203} } @article{ipsen_meyer_1998, title={The idea behind Krylov methods}, volume={105}, ISSN={["1930-0972"]}, DOI={10.2307/2589281}, abstractNote={Click to increase image sizeClick to decrease image size Additional informationNotes on contributorsIlse C. F. IpsenILSE IPSEN received a Vordiplom in computer science/mathematics from the Universität Kaiserslautern in Germany and a Ph.D. in computer science from Penn State. Before joining the Mathematics Department at North Carolina State University she taught computer science at Yale. Her research interests include numerical linear algebra and scientific computing.Carl D. MeyerCARL MEYER is a professor of Mathematics at North Carolina State University. He received an undergraduate degree in mathematics from the University of Northern Colorado and a Masters and Ph.D. degree in mathematics from Colorado State University. His research interests include matrix and numerical analysis, and applied probability. He has served as Managing Editor for the SIAM Journal on Algebraic and Discrete Methods (now SIMAX), and he is the author of a new text, Matrix Analysis and Applied Linear Algebra.}, number={10}, journal={AMERICAN MATHEMATICAL MONTHLY}, author={Ipsen, ICF and Meyer, CD}, year={1998}, month={Dec}, pages={889–899} }