@article{davis_papp_2024, title={Rational dual certificates for weighted sums-of-squares polynomials with boundable bit size}, volume={121}, ISSN={["1095-855X"]}, url={https://doi.org/10.1016/j.jsc.2023.102254}, DOI={10.1016/j.jsc.2023.102254}, abstractNote={In Davis and Papp (2022), the authors introduced the concept of dual certificates of (weighted) sum-of-squares polynomials, which are vectors from the dual cone of weighted sums of squares (WSOS) polynomials that can be interpreted as nonnegativity certificates. This initial theoretical work showed that for every polynomial in the interior of a WSOS cone, there exists a rational dual certificate proving that the polynomial is WSOS. In this article, we analyze the complexity of rational dual certificates of WSOS polynomials by bounding the bit sizes of integer dual certificates as a function of parameters such as the degree and the number of variables of the polynomials, or their distance from the boundary of the cone. After providing a general bound, we explore several special cases, such as univariate polynomials nonnegative over the real line or a bounded interval, represented in different commonly used bases. We also provide an algorithm which runs in rational arithmetic and computes a rational certificate with boundable bit size for a WSOS lower bound of the input polynomial.}, journal={JOURNAL OF SYMBOLIC COMPUTATION}, author={Davis, Maria M. and Papp, David}, year={2024} } @article{torelli_papp_unkelbach_2023, title={Spatiotemporal fractionation schemes for stereotactic radiosurgery of multiple brain metastases}, volume={6}, ISSN={["2473-4209"]}, DOI={10.1002/mp.16457}, abstractNote={AbstractBackgroundStereotactic radiosurgery (SRS) is an established treatment for patients with brain metastases (BMs). However, damage to the healthy brain may limit the tumor dose for patients with multiple lesions.PurposeIn this study, we investigate the potential of spatiotemporal fractionation schemes to reduce the biological dose received by the healthy brain in SRS of multiple BMs, and also demonstrate a novel concept of spatiotemporal fractionation for polymetastatic cancer patients that faces less hurdles for clinical implementation.MethodsSpatiotemporal fractionation (STF) schemes aim at partial hypofractionation in the metastases along with more uniform fractionation in the healthy brain. This is achieved by delivering distinct dose distributions in different fractions, which are designed based on their cumulative biologically effective dose () such that each fraction contributes with high doses to complementary parts of the target volume, while similar dose baths are delivered to the normal tissue. For patients with multiple brain metastases, a novel constrained approach to spatiotemporal fractionation (cSTF) is proposed, which is more robust against setup and biological uncertainties. The approach aims at irradiating entire metastases with possibly different doses, but spatially similar dose distributions in every fraction, where the optimal dose contribution of every fraction to each metastasis is determined using a new planning objective to be added to the BED‐based treatment plan optimization problem. The benefits of spatiotemporal fractionation schemes are evaluated for three patients, each with >25 BMs.ResultsFor the same tumor BED10 and the same brain volume exposed to high doses in all plans, the mean brain BED2 can be reduced compared to uniformly fractionated plans by 9%–12% with the cSTF plans and by 13%–19% with the STF plans. In contrast to the STF plans, the cSTF plans avoid partial irradiation of the individual metastases and are less sensitive to misalignments of the fractional dose distributions when setup errors occur.ConclusionSpatiotemporal fractionation schemes represent an approach to lower the biological dose to the healthy brain in SRS‐based treatments of multiple BMs. Although cSTF cannot achieve the full BED reduction of STF, it improves on uniform fractionation and is more robust against both setup errors and biological uncertainties related to partial tumor irradiation.}, journal={MEDICAL PHYSICS}, author={Torelli, Nathan and Papp, David and Unkelbach, Jan}, year={2023}, month={Jun} } @article{papp_regos_domokos_bozoki_2023, title={The smallest mono-unstable convex polyhedron with point masses has 8 faces and 11 vertices}, volume={310}, ISSN={["1872-6860"]}, url={https://doi.org/10.1016/j.ejor.2023.04.028}, DOI={10.1016/j.ejor.2023.04.028}, abstractNote={In the study of monostatic polyhedra, initiated by John H. Conway in 1966, the main question is to construct such an object with the minimal number of faces and vertices. By distinguishing between various material distributions and stability types, this expands into a small family of related questions. While many upper and lower bounds on the necessary numbers of faces and vertices have been established, none of these questions has been so far resolved. Adapting an algorithm presented in Bozóki et al. (2022), here we offer the first complete answer to a question from this family: by using the toolbox of semidefinite optimization to efficiently generate the hundreds of thousands of infeasibility certificates, we provide the first-ever proof for the existence of a monostatic polyhedron with point masses, having minimal number (V=11) of vertices (Theorem 3) and a minimal number (F=8) of faces. We also show that V=11 is the smallest number of vertices that a mono-unstable polyhedron can have in all dimensions greater than 1 (Corollary 6).}, number={2}, journal={EUROPEAN JOURNAL OF OPERATIONAL RESEARCH}, author={Papp, David and Regos, Krisztina and Domokos, Gabor and Bozoki, Sandor}, year={2023}, month={Oct}, pages={511–517} } @article{fabiano_torelli_papp_unkelbach_2022, title={A novel stochastic optimization method for handling misalignments of proton and photon doses in combined treatments}, volume={67}, ISSN={["1361-6560"]}, DOI={10.1088/1361-6560/ac858f}, abstractNote={Abstract Objective. Combined proton–photon treatments, where most fractions are delivered with photons and only a few are delivered with protons, may represent a practical approach to optimally use limited proton resources. It has been shown that, when organs at risk (OARs) are located within or near the tumor, the optimal multi-modality treatment uses protons to hypofractionate parts of the target volume and photons to achieve near-uniform fractionation in dose-limiting healthy tissues, thus exploiting the fractionation effect. These plans may be sensitive to range and setup errors, especially misalignments between proton and photon doses. Thus, we developed a novel stochastic optimization method to directly incorporate these uncertainties into the biologically effective dose (BED)-based simultaneous optimization of proton and photon plans. Approach. The method considers the expected value E b and standard deviation σ b of the cumulative BED b in every voxel of a structure. For the target, a piecewise quadratic penalty function of the form b min − E b − 2 σ b + 2 is minimized, aiming for plans in which the expected BED minus two times the standard deviation exceeds the prescribed BED b min . Analogously, E b + 2 σ b − b max + 2 is considered for OARs. Main results. Using a spinal metastasis case and a liver cancer patient, it is demonstrated that the novel stochastic optimization method yields robust combined treatment plans. Tumor coverage and a good sparing of the main OARs are maintained despite range and setup errors, and especially misalignments between proton and photon doses. This is achieved without explicitly considering all combinations of proton and photon error scenarios. Significance. Concerns about range and setup errors for safe clinical implementation of optimized proton–photon radiotherapy can be addressed through an appropriate stochastic planning method.}, number={18}, journal={PHYSICS IN MEDICINE AND BIOLOGY}, author={Fabiano, Silvia and Torelli, Nathan and Papp, David and Unkelbach, Jan}, year={2022}, month={Sep} } @article{papp_yildiz_2021, title={Alfonso: Matlab Package for Nonsymmetric Conic Optimization}, volume={34}, ISSN={["1526-5528"]}, url={https://doi.org/10.1287/ijoc.2021.1058}, DOI={10.1287/ijoc.2021.1058}, abstractNote={ We present alfonso, an open-source Matlab package for solving conic optimization problems over nonsymmetric convex cones. The implementation is based on the authors’ corrected analysis of a method of Skajaa and Ye. It enables optimization over any convex cone as long as a logarithmically homogeneous self-concordant barrier is available for the cone or its dual. This includes many nonsymmetric cones, for example, hyperbolicity cones and their duals (such as sum-of-squares cones), semidefinite and second-order cone representable cones, power cones, and the exponential cone. Besides enabling the solution of problems that cannot be cast as optimization problems over a symmetric cone, algorithms for nonsymmetric conic optimization also offer performance advantages for problems whose symmetric cone programming representation requires a large number of auxiliary variables or has a special structure that can be exploited in the barrier computation. The worst-case iteration complexity of alfonso is the best known for nonsymmetric cone optimization: [Formula: see text] iterations to reach an ε-optimal solution, where ν is the barrier parameter of the barrier function used in the optimization. Alfonso can be interfaced with a Matlab function (supplied by the user) that computes the Hessian of a barrier function for the cone. A simplified interface is also available to optimize over the direct product of cones for which a barrier function has already been built into the software. This interface can be easily extended to include new cones. Both interfaces are illustrated by solving linear programs. The oracle interface and the efficiency of alfonso are also demonstrated using an optimal design of experiments problem in which the tailored barrier computation greatly decreases the solution time compared with using state-of-the-art, off-the-shelf conic optimization software. Summary of Contribution: The paper describes an open-source Matlab package for optimization over nonsymmetric cones. A particularly important feature of this software is that, unlike other conic optimization software, it enables optimization over any convex cone as long as a suitable barrier function is available for the cone or its dual, not limiting the user to a small number of specific cones. Nonsymmetric cones for which such barriers are already known include, for example, hyperbolicity cones and their duals (such as sum-of-squares cones), semidefinite and second-order cone representable cones, power cones, and the exponential cone. Thus, the scope of this software is far larger than most current conic optimization software. This does not come at the price of efficiency, as the worst-case iteration complexity of our algorithm matches the iteration complexity of the most successful interior-point methods for symmetric cones. Besides enabling the solution of problems that cannot be cast as optimization problems over a symmetric cone, our software can also offer performance advantages for problems whose symmetric cone programming representation requires a large number of auxiliary variables or has a special structure that can be exploited in the barrier computation. This is also demonstrated in this paper via an example in which our code significantly outperforms Mosek 9 and SCS 2. }, number={1}, journal={INFORMS JOURNAL ON COMPUTING}, publisher={Institute for Operations Research and the Management Sciences (INFORMS)}, author={Papp, David and Yildiz, Sercan}, year={2021}, month={Sep} } @article{davis_papp_2022, title={DUAL CERTIFICATES AND EFFICIENT RATIONAL SUM-OF-SQUARES DECOMPOSITIONS FOR POLYNOMIAL OPTIMIZATION OVER COMPACT SETS}, volume={32}, ISSN={["1095-7189"]}, url={https://doi.org/10.1137/21M1422574}, DOI={10.1137/21M1422574}, abstractNote={We study the problem of computing weighted sum-of-squares (WSOS) certificates for positive polynomials over a compact semialgebraic set. Building on the theory of interior-point methods for convex optimization, we introduce the concept of dual cone certificates, which allows us to interpret vectors from the dual of the sum-of-squares cone as rigorous nonnegativity certificates of a WSOS polynomial. Whereas conventional WSOS certificates are alternative representations of the polynomials they certify, dual certificates are distinct from the certified polynomials; moreover, each dual certificate certifies a full-dimensional convex cone of WSOS polynomials. As a result, rational WSOS certificates can be constructed from numerically computed dual certificates at little additional cost, without any rounding or projection steps applied to the numerical certificates. As an additional algorithmic application, we present an almost entirely numerical hybrid algorithm for computing the optimal WSOS lower bound of a given polynomial along with a rational dual certificate, with a polynomial-time computational cost per iteration and linear rate of convergence.}, number={4}, journal={SIAM JOURNAL ON OPTIMIZATION}, author={Davis, Maria M. and Papp, David}, year={2022}, pages={2461–2492} } @article{papp_2023, title={Duality of sum of nonnegative circuit polynomials and optimal SONC bounds}, volume={114}, ISSN={["1095-855X"]}, DOI={10.1016/j.jsc.2022.04.015}, abstractNote={Circuit polynomials are polynomials with properties that make it easy to compute sharp and certifiable global lower bounds for them. Consequently, one may use them to find certifiable lower bounds for any polynomial by writing it as a sum of circuit polynomials with known lower bounds. Recent work has shown that sums of nonnegative circuit polynomials (or SONC polynomials for short) can be used to compute global lower bounds (called SONC bounds) for polynomials in this manner very efficiently both in theory and in practice, if the polynomial is bounded from below and its support satisfies a certain nondegeneracy assumption. The quality of the SONC bound depends on the circuits used in the computation but finding the set of circuits that yield the best attainable SONC bound among the astronomical number of candidate circuits is a non-trivial task that has not been addressed so far. We propose an efficient method to compute the optimal SONC lower bound by iteratively identifying the optimal circuits to use in the SONC bounding process. The method is derived from a new proof of the result that every SONC polynomial decomposes into SONC polynomials on the same support. This proof is based on convex programming duality and motivates a column generation approach that is particularly attractive for sparse polynomials of high degree and with many unknowns. The method is implemented and tested on a large set of sparse polynomial optimization problems with up to 40 unknowns, of degree up to 60, and up to 3000 monomials in the support. The results indicate that the method is efficient in practice and requires only a small number of iterations to identify the optimal circuits.}, journal={JOURNAL OF SYMBOLIC COMPUTATION}, author={Papp, David}, year={2023}, pages={246–266} } @article{papp_unkelbach_2022, title={Technical note: Optimal allocation of limited proton therapy resources using model-based patient selection}, volume={6}, ISSN={["2473-4209"]}, DOI={10.1002/mp.15812}, abstractNote={AbstractPurposeWe consider the following scenario: A radiotherapy clinic has a limited number of proton therapy slots available each day to treat cancer patients of a given tumor site. The clinic's goal is to minimize the expected number of complications in the cohort of all patients of that tumor site treated at the clinic, and thereby maximize the benefit of its limited proton resources.MethodsTo address this problem, we extend the normal tissue complication probability (NTCP) model–based approach to proton therapy patient selection to the situation of limited resources at a given institution. We assume that, on each day, a newly diagnosed patient is scheduled for treatment at the clinic with some probability and with some benefit from protons over photons, which is drawn from a probability distribution. When a new patient is scheduled for treatment, a decision for protons or photons must be made, and a patient may wait only for a limited amount of time for a proton slot becoming available. The goal is to determine the thresholds for selecting a patient for proton therapy, which optimally balance the competing goals of making use of all available slots while not blocking slots with patients with low benefit. This problem can be formulated as a Markov decision process (MDP) and the optimal thresholds can be determined via a value‐policy iteration method.ResultsThe optimal thresholds depend on the number of available proton slots, the average number of patients under treatment, and the distribution of values. In addition, the optimal thresholds depend on the current utilization of the facility. For example, if one proton slot is available and a second frees up shortly, the optimal threshold is lower compared to a situation where all but one slot remain blocked for longer.ConclusionsMDP methodology can be used to augment current NTCP model–based patient selection methods to the situation that, on any given day, the number of proton slots is limited. The optimal threshold then depends on the current utilization of the proton facility. Although, the optimal policy yields only a small nominal benefit over a constant threshold, it is more robust against variations in patient load.}, journal={MEDICAL PHYSICS}, author={Papp, David and Unkelbach, Jan}, year={2022}, month={Jun} } @article{papp_yıldız_2021, title={Code and Data Repository for alfonso: Matlab Package for Nonsymmetric Conic Optimization}, url={https://doi.org/10.1287/ijoc.2021.1058.cd}, DOI={10.1287/ijoc.2021.1058.cd}, abstractNote={The software in this repository is a snapshot (last revised on 2020/07/20) of the software that was used in the research reported in the paper alfonso: Matlab package for nonsymmetric conic optimization by Dávid Papp and Sercan Yıldız.}, journal={INFORMS Journal on Computing}, author={Papp, Dávid and Yıldız, Sercan}, year={2021}, month={Sep} } @article{loizeau_fabiano_papp_stuetzer_jakobi_bandurska-luque_troost_richter_unkelbach_2021, title={Optimal Allocation of Proton Therapy Slots in Combined Proton-Photon Radiation Therapy}, volume={111}, ISSN={["1879-355X"]}, DOI={10.1016/j.ijrobp.2021.03.054}, abstractNote={Purpose Proton therapy is a limited resource that is not available to all patients who may benefit from it. We investigated combined proton-photon treatments, in which some fractions are delivered with protons and the remaining fractions with photons, as an approach to maximize the benefit of limited proton therapy resources at a population level. Methods and Materials To quantify differences in normal-tissue complication probability (NTCP) between protons and photons, we considered a cohort of 45 patients with head and neck cancer for whom intensity modulated radiation therapy and intensity modulated proton therapy plans were previously created, in combination with NTCP models for xerostomia and dysphagia considered in the Netherlands for proton patient selection. Assuming limited availability of proton slots, we developed methods to optimally assign proton fractions in combined proton-photon treatments to minimize the average NTCP on a population level. The combined treatments were compared with patient selection strategies in which patients are assigned to single-modality proton or photon treatments. Results There is a benefit of combined proton-photon treatments compared with patient selection, owing to the nonlinearity of NTCP functions; that is, the initial proton fractions are the most beneficial, whereas additional proton fractions have a decreasing benefit when a flatter part of the NTCP curve is reached. This effect was small for the patient cohort and NTCP models considered, but it may be larger if dose-response relationships are better known. In addition, when proton slots are limited, patient selection methods face a trade-off between leaving slots unused and blocking slots for future patients who may have a larger benefit. Combined proton-photon treatments with flexible proton slot assignment provide a method to make optimal use of all available resources. Conclusions Combined proton-photon treatments allow for better use of limited proton therapy resources. The benefit over patient selection schemes depends on the NTCP models and the dose differences between protons and photons. Proton therapy is a limited resource that is not available to all patients who may benefit from it. We investigated combined proton-photon treatments, in which some fractions are delivered with protons and the remaining fractions with photons, as an approach to maximize the benefit of limited proton therapy resources at a population level. To quantify differences in normal-tissue complication probability (NTCP) between protons and photons, we considered a cohort of 45 patients with head and neck cancer for whom intensity modulated radiation therapy and intensity modulated proton therapy plans were previously created, in combination with NTCP models for xerostomia and dysphagia considered in the Netherlands for proton patient selection. Assuming limited availability of proton slots, we developed methods to optimally assign proton fractions in combined proton-photon treatments to minimize the average NTCP on a population level. The combined treatments were compared with patient selection strategies in which patients are assigned to single-modality proton or photon treatments. There is a benefit of combined proton-photon treatments compared with patient selection, owing to the nonlinearity of NTCP functions; that is, the initial proton fractions are the most beneficial, whereas additional proton fractions have a decreasing benefit when a flatter part of the NTCP curve is reached. This effect was small for the patient cohort and NTCP models considered, but it may be larger if dose-response relationships are better known. In addition, when proton slots are limited, patient selection methods face a trade-off between leaving slots unused and blocking slots for future patients who may have a larger benefit. Combined proton-photon treatments with flexible proton slot assignment provide a method to make optimal use of all available resources. Combined proton-photon treatments allow for better use of limited proton therapy resources. The benefit over patient selection schemes depends on the NTCP models and the dose differences between protons and photons.}, number={1}, journal={INTERNATIONAL JOURNAL OF RADIATION ONCOLOGY BIOLOGY PHYSICS}, author={Loizeau, Nicolas and Fabiano, Silvia and Papp, David and Stuetzer, Kristin and Jakobi, Annika and Bandurska-Luque, Anna and Troost, Esther G. C. and Richter, Christian and Unkelbach, Jan}, year={2021}, month={Sep}, pages={196–207} } @article{alfonso: matlab package for nonsymmetric conic optimization_2021, year={2021}, month={Jan} } @article{duality of sum of nonnegative circuit polynomials and optimal sonc bounds_2019, year={2019}, month={Dec} } @article{gaddy_unkelbach_papp_2019, title={Robust spatiotemporal fractionation schemes in the presence of patient setup uncertainty}, volume={46}, ISSN={["2473-4209"]}, url={http://www.scopus.com/inward/record.url?eid=2-s2.0-85067341334&partnerID=MN8TOARS}, DOI={10.1002/mp.13593}, abstractNote={PurposeSpatiotemporal fractionation schemes for photon radiotherapy have recently arisen as a promising technique for healthy tissue sparing. Because spatiotemporally fractionated treatments have a characteristic pattern of delivering high doses to different parts of the tumor in each fraction, uncertainty in patient positioning is an even more pressing concern than in conventional uniform fractionation. Until now, such concerns in patient setup uncertainty have not been addressed in the context of spatiotemporal fractionation.MethodsA stochastic optimization model is used to incorporate patient setup uncertainty to optimize spatiotemporally fractionated plans using expected penalties for deviations from prescription values. First, a robust uniform reference plan is optimized with a stochastic optimization model. Then, a spatiotemporal plan is optimized with a constrained stochastic optimization model that minimizes a primary clinical objective and constrains the spatiotemporal plan to be at least as good as the uniform reference plan with respect to all other objectives. A discrete probability distribution is defined to characterize the random setup error occurring in each fraction. For the optimization of uniform plans, the expected penalties are computed exactly by exploiting the symmetry of the fractions, and for the spatiotemporal plans, quasi‐Monte Carlo sampling is used to approximate the expectation.ResultsUsing five clinical liver cases, it is demonstrated that spatiotemporally fractionated treatment plans maintain the same robust tumor coverage as a stochastic uniform reference plan and exhibit a reduction in the expected mean BED of the uninvolved liver. This is observed for a spectrum of probability distributions of random setup errors with shifts in the patient position of up to 5 mm from the nominal position. For probability distributions with small variance in the patient position, the spatiotemporal plans exhibit an 8–30% reduction in expected mean BED in the healthy liver tissue for shifts up to 2.5 mm and reductions of 5–25% for shifts up to 5 mm.ConclusionsIn the presence of patient setup uncertainty, spatiotemporally fractionated treatment plans exhibit the same robust tumor coverage as their uniformly fractionated counterparts and still retain the benefit in sparing healthy tissues.}, number={7}, journal={MEDICAL PHYSICS}, author={Gaddy, Melissa R. and Unkelbach, Jan and Papp, David}, year={2019}, month={Jul}, pages={2988–3000} } @article{papp_yildiz_2019, title={SUM-OF-SQUARES OPTIMIZATION WITHOUT SEMIDEFINITE PROGRAMMING}, volume={29}, ISSN={["1095-7189"]}, url={http://www.scopus.com/inward/record.url?eid=2-s2.0-85065410798&partnerID=MN8TOARS}, DOI={10.1137/17M1160124}, abstractNote={We propose a homogeneous primal-dual interior-point method to solve sum-of-squares optimization problems by combining non-symmetric conic optimization techniques and polynomial interpolation. The approach optimizes directly over the sum-of-squares cone and its dual, circumventing the semidefinite programming (SDP) reformulation which requires a large number of auxiliary variables. As a result, it has substantially lower theoretical time and space complexity than the conventional SDP-based approach. Although our approach circumvents the semidefinite programming reformulation, an optimal solution to the semidefinite program can be recovered with little additional effort. Computational results confirm that for problems involving high-degree polynomials, the proposed method is several orders of magnitude faster than semidefinite programming.}, number={1}, journal={SIAM JOURNAL ON OPTIMIZATION}, author={Papp, David and Yildiz, Sercan}, year={2019}, pages={822–851} } @article{semi-infinite programming_2019, url={http://dx.doi.org/10.1002/9781118445112.stat02391.pub2}, DOI={10.1002/9781118445112.stat02391.pub2}, abstractNote={Abstract Semi‐infinite programs are optimization problems with either infinitely many constraints or infinitely many variables (but not both). This article reviews the fundamental duality theory and optimality conditions of such optimization problems and the most popular algorithmic strategies for their numerical solution. Easily implementable general methods are presented first, which require the solution of a sequence of conventional (finite) nonlinear optimization problems. Conic optimization problems, for which more efficient specialized methods are available, are also discussed.}, journal={Wiley StatsRef: Statistics Reference Online}, year={2019}, month={Aug} } @article{gaddy_yildiz_unkelbach_papp_2018, title={Optimization of spatiotemporally fractionated radiotherapy treatments with bounds on the achievable benefit}, volume={63}, ISSN={["1361-6560"]}, url={http://dx.doi.org/10.1088/1361-6560/aa9975}, DOI={10.1088/1361-6560/aa9975}, abstractNote={Abstract Spatiotemporal fractionation schemes, that is, treatments delivering different dose distributions in different fractions, can potentially lower treatment side effects without compromising tumor control. This can be achieved by hypofractionating parts of the tumor while delivering approximately uniformly fractionated doses to the surrounding tissue. Plan optimization for such treatments is based on biologically effective dose (BED); however, this leads to computationally challenging nonconvex optimization problems. Optimization methods that are in current use yield only locally optimal solutions, and it has hitherto been unclear whether these plans are close to the global optimum. We present an optimization framework to compute rigorous bounds on the maximum achievable normal tissue BED reduction for spatiotemporal plans. The approach is demonstrated on liver tumors, where the primary goal is to reduce mean liver BED without compromising any other treatment objective. The BED-based treatment plan optimization problems are formulated as quadratically constrained quadratic programming (QCQP) problems. First, a conventional, uniformly fractionated reference plan is computed using convex optimization. Then, a second, nonconvex, QCQP model is solved to local optimality to compute a spatiotemporally fractionated plan that minimizes mean liver BED, subject to the constraints that the plan is no worse than the reference plan with respect to all other planning goals. Finally, we derive a convex relaxation of the second model in the form of a semidefinite programming problem, which provides a rigorous lower bound on the lowest achievable mean liver BED. The method is presented on five cases with distinct geometries. The computed spatiotemporal plans achieve 12–35% mean liver BED reduction over the optimal uniformly fractionated plans. This reduction corresponds to 79–97% of the gap between the mean liver BED of the uniform reference plans and our lower bounds on the lowest achievable mean liver BED. The results indicate that spatiotemporal treatments can achieve substantial reductions in normal tissue dose and BED, and that local optimization techniques provide high-quality plans that are close to realizing the maximum potential normal tissue dose reduction.}, number={1}, journal={PHYSICS IN MEDICINE AND BIOLOGY}, author={Gaddy, Melissa R. and Yildiz, Sercan and Unkelbach, Jan and Papp, David}, year={2018}, month={Jan} } @article{unkelbach_papp_gaddy_andratschke_hong_guckenberger_2018, title={PO-0900: Spatiotemporal fractionation schemes for liver stereotactic body radiotherapy}, volume={127}, ISSN={0167-8140}, url={http://dx.doi.org/10.1016/S0167-8140(18)31210-6}, DOI={10.1016/S0167-8140(18)31210-6}, abstractNote={ESTRO 37 S479 response relations affect the potential benefit of dosepainting and how they can be accounted for using robust optimization. Material and MethodsFor this purpose, robust dose-painting was developed and implemented in our fully automated clinical TPS, such that the expected TCP is optimized for both uncertain dose-response relations and Gaussian systematic (∑=3 mm) and random (σ=5.5 mm) set-up uncertainties.The TCP of the tumor was modeled as the product of the TCPs over all voxels, which were described by sigmoid shaped dose-response relations with uncertain parameters that followed Gaussian distributions.The effect of uncertainty was modeled for different geometrical situations and for a range of TCP parameters.The geometrical situations consisted of 3 phantoms (Fig 1) and a patient case (Fig 2).The TCP parameters varied with ΔTD50 (difference in TD50 between the resistant and sensitive regions) ranging from 0 to 35 Gy.Optimizations were performed for different levels of uncertainty, expressed by the SD of the TD50 (TD50σ), that varied from 0 (no uncertainty) to 10 Gy.For a fair evaluation, dose-painting was compared to non-specific dose escalation with the same dose constraints.}, journal={Radiotherapy and Oncology}, publisher={Elsevier BV}, author={Unkelbach, J. and Papp, D. and Gaddy, M. and Andratschke, N. and Hong, T. and Guckenberger, M.}, year={2018}, month={Apr}, pages={S479–S480} } @book{tóth_nagy_papp_2018, title={Reaction kinetics: Exercises, programs and theorems: Mathematica for deterministic and stochastic kinetics}, url={http://www.scopus.com/inward/record.url?eid=2-s2.0-85061518011&partnerID=MN8TOARS}, DOI={10.1007/978-1-4939-8643-9}, journal={Reaction Kinetics: Exercises, Programs and Theorems: Mathematica for Deterministic and Stochastic Kinetics}, author={Tóth, J. and Nagy, A.L. and Papp, D.}, year={2018}, pages={1–469} } @article{papp_2017, title={SEMI-INFINITE PROGRAMMING USING HIGH-DEGREE POLYNOMIAL INTERPOLANTS AND SEMIDEFINITE PROGRAMMING}, volume={27}, ISSN={["1095-7189"]}, url={http://www.scopus.com/inward/record.url?eid=2-s2.0-85032876902&partnerID=MN8TOARS}, DOI={10.1137/15m1053578}, abstractNote={In a common formulation of semi-infinite programs, the infinite constraint set is a requirement that a function parametrized by the decision variables is nonnegative over an interval. If this function is sufficiently closely approximable by a polynomial or a rational function, then the semi-infinite program can be reformulated as an equivalent semidefinite program. Solving this semidefinite program is challenging if the polynomials involved are of high degree, due to numerical difficulties and bad scaling arising both from the polynomial approximations and from the fact that the semidefinite programming constraints coming from the sum-of-squares representation of nonnegative polynomials are badly scaled. We combine rational function approximation techniques and polynomial programming to overcome these numerical difficulties, using sum-of-squares interpolants. Specifically, it is shown that the conditioning of the reformulations using sum-of-squares interpolants does not deteriorate with increasing degrees, and problems involving sum-of-squares interpolants of hundreds of degrees can be handled without difficulty. The proposed reformulations are sufficiently well scaled that they can be solved easily with every commonly used semidefinite programming solver, such as SeDuMi, SDPT3, and CSDP. Motivating applications include convex optimization problems with semi-infinite constraints and semidefinite conic inequalities, such as those arising in the optimal design of experiments. Numerical results align with the theoretical predictions; in the problems considered, available memory was the only factor limiting the degrees of polynomials, to approximately 1000.}, number={3}, journal={SIAM JOURNAL ON OPTIMIZATION}, author={Papp, David}, year={2017}, pages={1858–1879} } @article{unkelbach_papp_gaddy_andratschke_hong_guckenberger_2017, title={Spatiotemporal fractionation schemes for liver stereotactic body radiotherapy}, volume={125}, ISSN={0167-8140}, url={http://dx.doi.org/10.1016/j.radonc.2017.09.003}, DOI={10.1016/j.radonc.2017.09.003}, abstractNote={

Abstract

Background and purpose

Dose prescription in stereotactic body radiotherapy (SBRT) for liver tumors is often limited by the mean liver dose. We explore the concept of spatiotemporal fractionation as an approach to facilitate further dose escalation in liver SBRT.

Materials and methods

Spatiotemporal fractionation schemes aim at partial hypofractionation in the tumor along with near-uniform fractionation in normal tissues. This is achieved by delivering distinct dose distributions in different fractions, which are designed such that each fraction delivers a high single fraction dose to complementary parts of the tumor while creating a similar dose bath in the surrounding noninvolved liver. Thereby, higher biologically effective doses (BED) can be delivered to the tumor without increasing the mean BED in the liver. Planning of such treatments is performed by simultaneously optimizing multiple dose distributions based on their cumulative BED. We study this concept for five liver cancer patients with different tumor geometries.

Results

Spatiotemporal fractionation presents a method of increasing the ratio of prescribed tumor BED to mean BED in the noninvolved liver by approximately 10–20%, compared to conventional SBRT using identical fractions.

Conclusions

Spatiotemporal fractionation may reduce the risk of liver toxicity or facilitate dose escalation in liver SBRT in circumstances where the mean dose to the non-involved liver is the prescription-limiting factor.}, number={2}, journal={Radiotherapy and Oncology}, publisher={Elsevier BV}, author={Unkelbach, Jan and Papp, Dávid and Gaddy, Melissa R. and Andratschke, Nicolaus and Hong, Theodore and Guckenberger, Matthias}, year={2017}, month={Nov}, pages={357–364} } @inproceedings{papp_2017, title={Univariate polynomial optimization with sum-of-squares interpolants}, volume={213}, url={http://www.scopus.com/inward/record.url?eid=2-s2.0-85033453631&partnerID=MN8TOARS}, DOI={10.1007/978-3-319-66616-7_9}, abstractNote={One of the most common tools in polynomial optimization is the approximation of the cone of nonnegative polynomials with the cone of sum-of-squares polynomials. This leads to polynomial-time solvable approximations for many NP-hard optimization problems using semidefinite programming (SDP). While theoretically satisfactory, the translation of optimization problems involving sum-of-squares polynomials to SDPs is not always practical. First, in the common SDP formulation, the dual variables are semidefinite matrices whose condition numbers grow exponentially with the degree of the polynomials involved, which is detrimental for a floating-point implementation. Second, the SDP representation of sum-of-squares polynomials roughly squares the number of optimization variables, increasing the time and memory complexity of the solution algorithms by several orders of magnitude. In this paper we focus on the first, numerical, issue. We show that a reformulation of the sum-of-squares SDP using polynomial interpolants yields a substantial improvement over the standard formulation, and problems involving sum-of-squares interpolants of hundreds of degrees can be handled without difficulty by commonly used semidefinite programming solvers. Preliminary numerical results using semi-infinite optimization problems align with the theoretical predictions. In all problems considered, available memory is the only factor limiting the degrees of polynomials.}, booktitle={Springer Proceedings in Mathematics and Statistics}, author={Papp, D.}, year={2017}, pages={143–162} } @article{papp_2016, title={ON THE COMPLEXITY OF LOCAL SEARCH IN UNCONSTRAINED QUADRATIC BINARY OPTIMIZATION}, volume={26}, ISSN={["1095-7189"]}, url={http://www.scopus.com/inward/record.url?eid=2-s2.0-84976872183&partnerID=MN8TOARS}, DOI={10.1137/15m1047775}, abstractNote={We consider the problem of finding a local minimum of a binary quadratic function and show by an elementary construction that every descending local search algorithm takes exponential time in the worst case.}, number={2}, journal={SIAM JOURNAL ON OPTIMIZATION}, author={Papp, David}, year={2016}, pages={1257–1261} } @article{gaddy_papp_2016, title={Technical Note: Improving the VMERGE treatment planning algorithm for rotational radiotherapy}, volume={43}, ISSN={["2473-4209"]}, url={http://www.scopus.com/inward/record.url?eid=2-s2.0-84975226392&partnerID=MN8TOARS}, DOI={10.1118/1.4953193}, abstractNote={Purpose:The authors revisit the VMERGE treatment planning algorithm by Craft et al. [“Multicriteria VMAT optimization,” Med. Phys. 39, 686–696 (2012)] for arc therapy planning and propose two changes to the method that are aimed at improving the achieved trade‐off between treatment time and plan quality at little additional planning time cost, while retaining other desirable properties of the original algorithm.Methods:The original VMERGE algorithm first computes an “ideal,” high quality but also highly time consuming treatment plan that irradiates the patient from all possible angles in a fine angular grid with a highly modulated beam and then makes this plan deliverable within practical treatment time by an iterative fluence map merging and sequencing algorithm. We propose two changes to this method. First, we regularize the ideal plan obtained in the first step by adding an explicit constraint on treatment time. Second, we propose a different merging criterion that comprises of identifying and merging adjacent maps whose merging results in the least degradation of radiation dose.Results:The effect of both suggested modifications is evaluated individually and jointly on clinical prostate and paraspinal cases. Details of the two cases are reported.Conclusions:In the authors’ computational study they found that both proposed modifications, especially the regularization, yield noticeably improved treatment plans for the same treatment times than what can be obtained using the original VMERGE method. The resulting plans match the quality of 20‐beam step‐and‐shoot IMRT plans with a delivery time of approximately 2 min.}, number={7}, journal={MEDICAL PHYSICS}, author={Gaddy, Melissa R. and Papp, David}, year={2016}, month={Jul}, pages={4093–4097} } @article{papp_bortfeld_unkelbach_2015, title={A modular approach to intensity-modulated arc therapy optimization with noncoplanar trajectories}, volume={60}, ISSN={["1361-6560"]}, url={http://dx.doi.org/10.1088/0031-9155/60/13/5179}, DOI={10.1088/0031-9155/60/13/5179}, abstractNote={Utilizing noncoplanar beam angles in volumetric modulated arc therapy (VMAT) has the potential to combine the benefits of arc therapy, such as short treatment times, with the benefits of noncoplanar intensity modulated radiotherapy (IMRT) plans, such as improved organ sparing. Recently, vendors introduced treatment machines that allow for simultaneous couch and gantry motion during beam delivery to make noncoplanar VMAT treatments possible. Our aim is to provide a reliable optimization method for noncoplanar isocentric arc therapy plan optimization. The proposed solution is modular in the sense that it can incorporate different existing beam angle selection and coplanar arc therapy optimization methods. Treatment planning is performed in three steps. First, a number of promising noncoplanar beam directions are selected using an iterative beam selection heuristic; these beams serve as anchor points of the arc therapy trajectory. In the second step, continuous gantry/couch angle trajectories are optimized using a simple combinatorial optimization model to define a beam trajectory that efficiently visits each of the anchor points. Treatment time is controlled by limiting the time the beam needs to trace the prescribed trajectory. In the third and final step, an optimal arc therapy plan is found along the prescribed beam trajectory. In principle any existing arc therapy optimization method could be incorporated into this step; for this work we use a sliding window VMAT algorithm. The approach is demonstrated using two particularly challenging cases. The first one is a lung SBRT patient whose planning goals could not be satisfied with fewer than nine noncoplanar IMRT fields when the patient was treated in the clinic. The second one is a brain tumor patient, where the target volume overlaps with the optic nerves and the chiasm and it is directly adjacent to the brainstem. Both cases illustrate that the large number of angles utilized by isocentric noncoplanar VMAT plans can help improve dose conformity, homogeneity, and organ sparing simultaneously using the same beam trajectory length and delivery time as a coplanar VMAT plan.}, number={13}, journal={PHYSICS IN MEDICINE AND BIOLOGY}, author={Papp, David and Bortfeld, Thomas and Unkelbach, Jan}, year={2015}, month={Jul}, pages={5179–5198} } @misc{unkelbach_bortfeld_craft_alber_bangert_bokrantz_chen_li_xing_men_et al._2015, title={Optimization approaches to volumetric modulated arc therapy planning}, volume={42}, ISSN={["2473-4209"]}, url={http://dx.doi.org/10.1118/1.4908224}, DOI={10.1118/1.4908224}, abstractNote={Volumetric modulated arc therapy (VMAT) has found widespread clinical application in recent years. A large number of treatment planning studies have evaluated the potential for VMAT for different disease sites based on the currently available commercial implementations of VMAT planning. In contrast, literature on the underlying mathematical optimization methods used in treatment planning is scarce. VMAT planning represents a challenging large scale optimization problem. In contrast to fluence map optimization in intensity‐modulated radiotherapy planning for static beams, VMAT planning represents a nonconvex optimization problem. In this paper, the authors review the state‐of‐the‐art in VMAT planning from an algorithmic perspective. Different approaches to VMAT optimization, including arc sequencing methods, extensions of direct aperture optimization, and direct optimization of leaf trajectories are reviewed. Their advantages and limitations are outlined and recommendations for improvements are discussed.}, number={3}, journal={MEDICAL PHYSICS}, author={Unkelbach, Jan and Bortfeld, Thomas and Craft, David and Alber, Markus and Bangert, Mark and Bokrantz, Rasmus and Chen, Danny and Li, Ruijiang and Xing, Lei and Men, Chunhua and et al.}, year={2015}, month={Mar}, pages={1367–1377} } @article{chen_mehrotra_papp_2015, title={Scenario generation for stochastic optimization problems via the sparse grid method}, volume={62}, ISSN={["1573-2894"]}, url={http://www.scopus.com/inward/record.url?eid=2-s2.0-84948380676&partnerID=MN8TOARS}, DOI={10.1007/s10589-015-9751-7}, abstractNote={We study the use of sparse grids in the scenario generation (or discretization) problem in stochastic programming problems where the uncertainty is modeled using a continuous multivariate distribution. We show that, under a regularity assumption on the random function involved, the sequence of optimal objective function values of the sparse grid approximations converges to the true optimal objective function values as the number of scenarios increases. The rate of convergence is also established. We treat separately the special case when the underlying distribution is an affine transform of a product of univariate distributions, and show how the sparse grid method can be adapted to the distribution by the use of quadrature formulas tailored to the distribution. We numerically compare the performance of the sparse grid method using different quadrature rules with classic quasi-Monte Carlo (QMC) methods, optimal rank-one lattice rules, and Monte Carlo (MC) scenario generation, using a series of utility maximization problems with up to 160 random variables. The results show that the sparse grid method is very efficient, especially if the integrand is sufficiently smooth. In such problems the sparse grid scenario generation method is found to need several orders of magnitude fewer scenarios than MC and QMC scenario generation to achieve the same accuracy. It is indicated that the method scales well with the dimension of the distribution—especially when the underlying distribution is an affine transform of a product of univariate distributions, in which case the method appears scalable to thousands of random variables.}, number={3}, journal={COMPUTATIONAL OPTIMIZATION AND APPLICATIONS}, author={Chen, Michael and Mehrotra, Sanjay and Papp, David}, year={2015}, month={Dec}, pages={669–692} } @article{unkelbach_papp_2015, title={The emergence of nonuniform spatiotemporal fractionation schemes within the standard BED model}, volume={42}, ISSN={0094-2405}, url={http://dx.doi.org/10.1118/1.4916684}, DOI={10.1118/1.4916684}, abstractNote={Purpose:Nonuniform spatiotemporal radiotherapy fractionation schemes, i.e., delivering distinct dose distributions in different fractions can potentially improve the therapeutic ratio. This is possible if the dose distributions are designed such that similar doses are delivered to normal tissues (exploit the fractionation effect) while hypofractionating subregions of the tumor. In this paper, the authors develop methodology for treatment planning with nonuniform fractions and demonstrate this concept in the context of intensity‐modulated proton therapy (IMPT).Methods:Treatment planning is performed by simultaneously optimizing (possibly distinct) IMPT dose distributions for multiple fractions. This is achieved using objective and constraint functions evaluated for the cumulative biologically equivalent dose (BED) delivered at the end of treatment. BED based treatment planning formulations lead to nonconvex optimization problems, such that local gradient based algorithms require adequate starting positions to find good local optima. To that end, the authors develop a combinatorial algorithm to initialize the pencil beam intensities.Results:The concept of nonuniform spatiotemporal fractionation schemes is demonstrated for a spinal metastasis patient treated in two fractions using stereotactic body radiation therapy. The patient is treated with posterior oblique beams with the kidneys being located in the entrance region of the beam. It is shown that a nonuniform fractionation scheme that hypofractionates the central part of the tumor allows for a skin and kidney BED reduction of approximately 10%–20%.Conclusions:Nonuniform spatiotemporal fractionation schemes represent a novel approach to exploit fractionation effects that deserves further exploration for selected disease sites.}, number={5}, journal={Medical Physics}, publisher={Wiley}, author={Unkelbach, Jan and Papp, Dávid}, year={2015}, month={Apr}, pages={2234–2241} } @article{mehrotra_papp_2014, title={A Cutting Surface Algorithm for Semi-Infinite Convex Programming with an Application to Moment Robust Optimization}, volume={24}, ISSN={1052-6234 1095-7189}, url={http://dx.doi.org/10.1137/130925013}, DOI={10.1137/130925013}, abstractNote={We present and analyze a central cutting surface algorithm for general semi-infinite convex optimization problems and use it to develop a novel algorithm for distributionally robust optimization problems in which the uncertainty set consists of probability distributions with given bounds on their moments. Moments of arbitrary order, as well as nonpolynomial moments, can be included in the formulation. We show that this gives rise to a hierarchy of optimization problems with decreasing levels of risk-aversion, with classic robust optimization at one end of the spectrum and stochastic programming at the other. Although our primary motivation is to solve distributionally robust optimization problems with moment uncertainty, the cutting surface method for general semi-infinite convex programs is also of independent interest. The proposed method is applicable to problems with nondifferentiable semi-infinite constraints indexed by an infinite dimensional index set. Examples comparing the cutting surface algorit...}, number={4}, journal={SIAM Journal on Optimization}, publisher={Society for Industrial & Applied Mathematics (SIAM)}, author={Mehrotra, Sanjay and Papp, Dávid}, year={2014}, month={Jan}, pages={1670–1697} } @article{unkelbach_craft_hong_papp_ramakrishnan_salari_wolfgang_bortfeld_2014, title={Exploiting tumor shrinkage through temporal optimization of radiotherapy}, volume={59}, ISSN={0031-9155 1361-6560}, url={http://dx.doi.org/10.1088/0031-9155/59/12/3059}, DOI={10.1088/0031-9155/59/12/3059}, abstractNote={In multi-stage radiotherapy, a patient is treated in several stages separated by weeks or months. This regimen has been motivated mostly by radiobiological considerations, but also provides an approach to reduce normal tissue dose by exploiting tumor shrinkage. The paper considers the optimal design of multi-stage treatments, motivated by the clinical management of large liver tumors for which normal liver dose constraints prohibit the administration of an ablative radiation dose in a single treatment. We introduce a dynamic tumor model that incorporates three factors: radiation induced cell kill, tumor shrinkage, and tumor cell repopulation. The design of multi-stage radiotherapy is formulated as a mathematical optimization problem in which the total dose to the normal tissue is minimized, subject to delivering the prescribed dose to the tumor. Based on the model, we gain insight into the optimal administration of radiation over time, i.e. the optimal treatment gaps and dose levels. We analyze treatments consisting of two stages in detail. The analysis confirms the intuition that the second stage should be delivered just before the tumor size reaches a minimum and repopulation overcompensates shrinking. Furthermore, it was found that, for a large range of model parameters, approximately one-third of the dose should be delivered in the first stage. The projected benefit of multi-stage treatments in terms of normal tissue sparing depends on model assumptions. However, the model predicts large dose reductions by more than a factor of 2 for plausible model parameters. The analysis of the tumor model suggests that substantial reduction in normal tissue dose can be achieved by exploiting tumor shrinkage via an optimal design of multi-stage treatments. This suggests taking a fresh look at multi-stage radiotherapy for selected disease sites where substantial tumor regression translates into reduced target volumes.}, number={12}, journal={Physics in Medicine and Biology}, publisher={IOP Publishing}, author={Unkelbach, Jan and Craft, David and Hong, Theodore and Papp, Dávid and Ramakrishnan, Jagdish and Salari, Ehsan and Wolfgang, John and Bortfeld, Thomas}, year={2014}, month={May}, pages={3059–3079} } @article{craft_papp_unkelbach_2014, title={Plan averaging for multicriteria navigation of sliding window IMRT and VMAT}, volume={41}, ISSN={0094-2405}, url={http://dx.doi.org/10.1118/1.4859295}, DOI={10.1118/1.4859295}, abstractNote={Purpose:To describe a method for combining sliding window plans [intensity modulated radiation therapy (IMRT) or volumetric modulated arc therapy (VMAT)] for use in treatment plan averaging, which is needed for Pareto surface navigation based multicriteria treatment planning.Methods:The authors show that by taking an appropriately defined average of leaf trajectories of sliding window plans, the authors obtain a sliding window plan whose fluence map is the exact average of the fluence maps corresponding to the initial plans. In the case of static‐beam IMRT, this also implies that the dose distribution of the averaged plan is the exact dosimetric average of the initial plans. In VMAT delivery, the dose distribution of the averaged plan is a close approximation of the dosimetric average of the initial plans.Results:The authors demonstrate the method on three Pareto optimal VMAT plans created for a demanding paraspinal case, where the tumor surrounds the spinal cord. The results show that the leaf averaged plans yield dose distributions that approximate the dosimetric averages of the precomputed Pareto optimal plans well.Conclusions:The proposed method enables the navigation of deliverable Pareto optimal plans directly, i.e., interactive multicriteria exploration of deliverable sliding window IMRT and VMAT plans, eliminating the need for a sequencing step after navigation and hence the dose degradation that is caused by such a sequencing step.}, number={2}, journal={Medical Physics}, publisher={Wiley}, author={Craft, David and Papp, Dávid and Unkelbach, Jan}, year={2014}, month={Jan}, pages={021709} } @article{papp_alizadeh_2014, title={Shape-Constrained Estimation Using Nonnegative Splines}, volume={23}, ISSN={1061-8600 1537-2715}, url={http://dx.doi.org/10.1080/10618600.2012.707343}, DOI={10.1080/10618600.2012.707343}, abstractNote={We consider the problem of nonparametric estimation of unknown smooth functions in the presence of restrictions on the shape of the estimator and on its support using polynomial splines. We provide a general computational framework that treats these estimation problems in a unified manner, without the limitations of the existing methods. Applications of our approach include computing optimal spline estimators for regression, density estimation, and arrival rate estimation problems in the presence of various shape constraints. Our approach can also handle multiple simultaneous shape constraints. The approach is based on a characterization of nonnegative polynomials that leads to semidefinite programming (SDP) and second-order cone programming (SOCP) formulations of the problems. These formulations extend and generalize a number of previous approaches in the literature, including those with piecewise linear and B-spline estimators. We also consider a simpler approach in which nonnegative splines are approximated by splines whose pieces are polynomials with nonnegative coefficients in a nonnegative basis. A condition is presented to test whether a given nonnegative basis gives rise to a spline cone that is dense in the space of nonnegative continuous functions. The optimization models formulated in the article are solvable with minimal running time using off-the-shelf software. We provide numerical illustrations for density estimation and regression problems. These examples show that the proposed approach requires minimal computational time, and that the estimators obtained using our approach often match and frequently outperform kernel methods and spline smoothing without shape constraints. Supplementary materials for this article are provided online.}, number={1}, journal={Journal of Computational and Graphical Statistics}, publisher={Informa UK Limited}, author={Papp, Dávid and Alizadeh, Farid}, year={2014}, month={Jan}, pages={211–231} } @article{craft_bangert_long_papp_unkelbach_2014, title={Shared data for intensity modulated radiation therapy (IMRT) optimization research: the CORT dataset}, volume={3}, ISSN={2047-217X}, url={http://dx.doi.org/10.1186/2047-217x-3-37}, DOI={10.1186/2047-217x-3-37}, abstractNote={Background:We provide common datasets (which we call the CORT dataset: common optimization for radiation therapy) that researchers can use when developing and contrasting radiation treatment planning optimization algorithms. The datasets allow researchers to make one-to-one comparisons of algorithms in order to solve various instances of the radiation therapy treatment planning problem in intensity modulated radiation therapy (IMRT), including beam angle optimization, volumetric modulated arc therapy and direct aperture optimization.}, number={1}, journal={GigaScience}, publisher={Oxford University Press (OUP)}, author={Craft, David and Bangert, Mark and Long, Troy and Papp, Dávid and Unkelbach, Jan}, year={2014}, month={Dec} } @article{papp_unkelbach_2013, title={Direct leaf trajectory optimization for volumetric modulated arc therapy planning with sliding window delivery}, volume={41}, ISSN={0094-2405}, url={http://dx.doi.org/10.1118/1.4835435}, DOI={10.1118/1.4835435}, abstractNote={PURPOSE The authors propose a novel optimization model for volumetric modulated arc therapy (VMAT) planning that directly optimizes deliverable leaf trajectories in the treatment plan optimization problem, and eliminates the need for a separate arc-sequencing step. METHODS In this model, a 360° arc is divided into a given number of arc segments in which the leaves move unidirectionally. This facilitates an algorithm that determines the optimal piecewise linear leaf trajectories for each arc segment, which are deliverable in a given treatment time. Multileaf collimator constraints, including maximum leaf speed and interdigitation, are accounted for explicitly. The algorithm is customized to allow for VMAT delivery using constant gantry speed and dose rate, however, the algorithm generalizes to variable gantry speed if beneficial. RESULTS The authors demonstrate the method for three different tumor sites: a head-and-neck case, a prostate case, and a paraspinal case. The authors first obtain a reference plan for intensity modulated radiotherapy (IMRT) using fluence map optimization and 20 intensity-modulated fields in equally spaced beam directions, which is beyond the standard of care. Modeling the typical clinical setup for the treatment sites considered, IMRT plans using seven or nine beams are also computed. Subsequently, VMAT plans are optimized by dividing the 360° arc into 20 corresponding arc segments. Assuming typical machine parameters (a dose rate of 600 MU/min, and a maximum leaf speed of 3 cm/s), it is demonstrated that the optimized VMAT plans with 2-3 min delivery time are of noticeably better quality than the 7-9 beam IMRT plans. The VMAT plan quality approaches the quality of the 20-beam IMRT benchmark plan for delivery times between 3 and 4 min. CONCLUSIONS The results indicate that high quality treatments can be delivered in a single arc with 20 arc segments if sufficient time is allowed for modulation in each segment.}, number={1}, journal={Medical Physics}, publisher={Wiley}, author={Papp, Dávid and Unkelbach, Jan}, year={2013}, month={Dec}, pages={011701} } @article{alizadeh_papp_2013, title={Estimating arrival rate of nonhomogeneous Poisson processes with semidefinite programming}, volume={208}, ISSN={0254-5330 1572-9338}, url={http://dx.doi.org/10.1007/s10479-011-1020-2}, DOI={10.1007/s10479-011-1020-2}, number={1}, journal={Annals of Operations Research}, publisher={Springer Science and Business Media LLC}, author={Alizadeh, Farid and Papp, David}, year={2013}, month={Jan}, pages={291–308} } @article{mehrotra_papp_2013, title={Generating Moment Matching Scenarios Using Optimization Techniques}, volume={23}, ISSN={1052-6234 1095-7189}, url={http://dx.doi.org/10.1137/110858082}, DOI={10.1137/110858082}, abstractNote={An optimization based method is proposed to generate moment matching scenarios for numerical integration and its use in stochastic programming. The main advantage of the method is its flexibility: it can generate scenarios matching any prescribed set of moments of the underlying distribution rather than matching all moments up to a certain order, and the distribution can be defined over an arbitrary set. This allows for a reduction in the number of scenarios and allows the scenarios to be better tailored to the problem at hand. The method is based on a semi-infinite linear programming formulation of the problem that is shown to be solvable with polynomial iteration complexity. A practical column generation method is implemented. The column generation subproblems are polynomial optimization problems; however, they need not be solved to optimality. It is found that the columns in the column generation approach can be efficiently generated by random sampling. The number of scenarios generated matches a lower...}, number={2}, journal={SIAM Journal on Optimization}, publisher={Society for Industrial & Applied Mathematics (SIAM)}, author={Mehrotra, Sanjay and Papp, Dávid}, year={2013}, month={Jan}, pages={963–999} } @article{papp_alizadeh_2013, title={Semidefinite Characterization of Sum-of-Squares Cones in Algebras}, volume={23}, ISSN={1052-6234 1095-7189}, url={http://dx.doi.org/10.1137/110843265}, DOI={10.1137/110843265}, abstractNote={We extend Nesterov's semidefinite programming characterization of squared functional systems, and Faybusovich's abstraction to bilinear symmetric maps, to cones of sum-of-squares elements in general abstract algebras. Using algebraic techniques such as isomorphism, linear isomorphism, tensor products, sums, and direct sums, we show that many concrete cones are in fact sum-of-squares cones with respect to some algebra and thus are representable by the cone of positive semidefinite matrices. We also consider nonnegativity with respect to a proper cone $\mathcal{K}$ and show that in some cases cones of $\mathcal{K}$-nonnegative functions are either sum of squares or at least semidefinite representable. For example, we show that some well-known Chebyshev systems, when extended to Euclidean Jordan algebras, induce cones that are semidefinite representable. Finally we will discuss some concrete examples and applications, including minimum ellipsoid enclosing given space curves, minimization of eigenvalues of po...}, number={3}, journal={SIAM Journal on Optimization}, publisher={Society for Industrial & Applied Mathematics (SIAM)}, author={Papp, Dávid and Alizadeh, Farid}, year={2013}, month={Jan}, pages={1398–1423} } @book{mehrotra_papp_2012, title={Generating nested quadrature formulas for general weight functions with known moments}, author={Mehrotra, S. and Papp, D.}, year={2012} } @book{collado_papp_2012, title={Network interdiction--models, applications, unexplored directions}, number={4-2012}, author={Collado, R.A. and Papp, D.}, year={2012} } @article{papp_2012, title={Optimal Designs for Rational Function Regression}, volume={107}, ISSN={0162-1459 1537-274X}, url={http://dx.doi.org/10.1080/01621459.2012.656035}, DOI={10.1080/01621459.2012.656035}, abstractNote={We consider the problem of finding optimal nonsequential designs for a large class of regression models involving polynomials and rational functions with heteroscedastic noise also given by a polynomial or rational weight function. Since the design weights can be found easily by existing methods once the support is known, we concentrate on determining the support of the optimal design. The proposed method treats D-, E-, A-, and Φ p -optimal designs in a unified manner, and generates a polynomial whose zeros are the support points of the optimal approximate design, generalizing a number of previously known results of the same flavor. The method is based on a mathematical optimization model that can incorporate various criteria of optimality and can be solved efficiently by well-established numerical optimization methods. In contrast to optimization-based methods previously proposed for the solution of similar design problems, our method also has theoretical guarantee of its algorithmic efficiency; in concordance with the theory, the actual running times of all numerical examples considered in the paper are negligible. The numerical stability of the method is demonstrated in an example involving high-degree polynomials. As a corollary, an upper bound on the size of the support set of the minimally supported optimal designs is also found.}, number={497}, journal={Journal of the American Statistical Association}, publisher={Informa UK Limited}, author={Papp, Dávid}, year={2012}, month={Mar}, pages={400–411} } @article{nagy_papp_tóth_2012, title={ReactionKinetics—A Mathematica package with applications}, volume={83}, ISSN={0009-2509}, url={http://dx.doi.org/10.1016/j.ces.2012.01.039}, DOI={10.1016/j.ces.2012.01.039}, abstractNote={Requirements are formulated for a reaction kinetics program package to be useful for an as wide as possible circle of users and they are illustrated with examples using ReactionKinetics, a Mathematica based package currently being developed by the authors. Treating a realistic problem in any field of reaction kinetics raises a series of problems, both mathematical and computational: we illustrate a number of these also with examples using our package.}, journal={Chemical Engineering Science}, publisher={Elsevier BV}, author={Nagy, A.L. and Papp, D. and Tóth, J.}, year={2012}, month={Dec}, pages={12–23} } @article{rudolf_noyan_papp_alizadeh_2011, title={Bilinear optimality constraints for the cone of positive polynomials}, volume={129}, ISSN={0025-5610 1436-4646}, url={http://dx.doi.org/10.1007/s10107-011-0458-y}, DOI={10.1007/s10107-011-0458-y}, number={1}, journal={Mathematical Programming}, publisher={Springer Science and Business Media LLC}, author={Rudolf, Gábor and Noyan, Nilay and Papp, Dávid and Alizadeh, Farid}, year={2011}, month={May}, pages={5–31} } @book{papp_alizadeh_2011, title={Multivariate arrival rate estimation by sum-of-squares polynomial splines and decomposition}, author={Papp, D. and Alizadeh, F.}, year={2011} } @article{collado_papp_ruszczyński_2011, title={Scenario decomposition of risk-averse multistage stochastic programming problems}, volume={200}, ISSN={0254-5330 1572-9338}, url={http://dx.doi.org/10.1007/s10479-011-0935-y}, DOI={10.1007/s10479-011-0935-y}, number={1}, journal={Annals of Operations Research}, publisher={Springer Science and Business Media LLC}, author={Collado, Ricardo A. and Papp, Dávid and Ruszczyński, Andrzej}, year={2011}, month={Aug}, pages={147–170} } @article{boros_gurvich_makino_papp_2010, title={Acyclic, or totally tight, two-person game forms: Characterization and main properties}, volume={310}, ISSN={0012-365X}, url={http://dx.doi.org/10.1016/j.disc.2009.11.009}, DOI={10.1016/j.disc.2009.11.009}, abstractNote={It is known that a two-person game form g is Nash-solvable if and only if it is tight. We strengthen the concept of tightness as follows: a game form is called totally tight if each of its 2×2 subforms is tight. (It is easy to show that in this case all, not only 2×2, subforms are tight.) We characterize totally tight game forms, and derive from this characterization that they are tight, Nash-solvable, dominance-solvable, acyclic, and assignable. In particular, total tightness and acyclicity are equivalent properties of two-person game forms.}, number={6-7}, journal={Discrete Mathematics}, publisher={Elsevier BV}, author={Boros, Endre and Gurvich, Vladimir and Makino, Kazuhisa and Papp, Dávid}, year={2010}, month={Apr}, pages={1135–1151} } @article{papp_vizvári_2005, title={Effective solution of linear Diophantine equation systems with an application in chemistry}, volume={39}, ISSN={0259-9791 1572-8897}, url={http://dx.doi.org/10.1007/s10910-005-9001-9}, DOI={10.1007/s10910-005-9001-9}, number={1}, journal={Journal of Mathematical Chemistry}, publisher={Springer Science and Business Media LLC}, author={Papp, Dávid and Vizvári, Béla}, year={2005}, month={Nov}, pages={15–31} }