@mastersthesis{Monniaux1, author = {David Monniaux}, title = {R\'eduction de r\'eseaux et factorisation}, school = {\'Ecole normale sup\'erieure de Lyon}, year = 1996, type = {rapport de stage de premi\`ere ann\'ee de magist\`ere}, note = {French}, dvi = {http://www-verimag.imag.fr/~monniaux/biblio/David_Monniaux_1.ps.gz} }
@mastersthesis{Monniaux2, author = {David Monniaux}, title = {M\'ethodes formelles et cryptographie}, school = {\'Ecole normale sup\'erieure de Lyon}, year = 1997, type = {rapport de stage de seconde ann\'ee de magist\`ere}, note = {French (introduction), English (scientific matters)}, dvi = {http://www-verimag.imag.fr/~monniaux/biblio/David_Monniaux_2.dvi.gz}, pdf = {http://www-verimag.imag.fr/~monniaux/biblio/David_Monniaux_2.pdf} }
@mastersthesis{Monniaux3, author = {David Monniaux}, title = {R\'ealisation m\'ecanis\'ee d'interpr\'eteurs abstraits}, school = {Universit\'e Paris VII}, year = 1998, type = {Rapport de {DEA}}, note = {French}, dvi = {http://www-verimag.imag.fr/~monniaux/biblio/David_Monniaux_3.dvi.gz}, pdf = {http://www-verimag.imag.fr/~monniaux/biblio/David_Monniaux_3.pdf} }
@inproceedings{Monniaux_CSFW12, author = {David Monniaux}, title = {Decision Procedures for the Analysis of Cryptographic Protocols by Logics of Belief}, booktitle = {12th Computer Security Foundations Workshop}, publisher = {IEEE}, year = 1999, pdf = {http://www-verimag.imag.fr/~monniaux/biblio/Monniaux_CSFW12.pdf}, ps = {http://www-verimag.imag.fr/~monniaux/biblio/Monniaux_CSFW12.ps.gz}, isbn = {0-7695-0201-6}, pages = {44--54}, annote = {Decision procedures for BAN-like logics of belief are not new. Mao and Boyd (IEEE CSFW, 1993) describe a tableau-based procedure. Kindred and Wing (USENIX Workshop on Electronic Commerce, 1996) describe a decision procedure that uses forward chaining and backward chaining. David's approach uses only forward chaining. Some automatic transformations of the proof system are used to make all rules suitable for forward chaining. David considered trying to use only backward chaining, but that seemed more difficult. Cathy Meadows asked how decidability of such proof systems relates to undecidability results such as that in the paper by Cervesato et al. (described next). David replied that the undecidability results apply to semantic models of authentication protocols, and there is no good such semantics for belief logics.}, abstract = {Belief-logic deductions are used in the analysis of cryptographic protocols. We show a new method to decide such logics. In addition to the familiar BAN logic, it is also applicable to the more advanced versions of protocol security logics, and GNY in particular; and it employs an efficient forward-chaining algorithm the completeness and termination of which are proved. Theoretic proofs, implementation decisions and results are discussed.}, entrysubtype = {intc}, doi = {10.1109/CSFW.1999.779761} }
@inproceedings{Monniaux_SAS99, author = {David Monniaux}, title = {Abstracting cryptographic protocols with tree automata}, booktitle = {Sixth International Static Analysis Symposium (SAS'99)}, publisher = {Springer Verlag}, year = 1999, series = {Lecture Notes in Computer Science}, number = 1694, pages = {149--163}, pdf = {http://www-verimag.imag.fr/~monniaux/biblio/Monniaux_SAS99.pdf}, ps = {http://www-verimag.imag.fr/~monniaux/biblio/Monniaux_SAS99.ps.gz}, abstract = {Cryptographic protocols have so far been analyzed for the most part by means of testing (which does not yield proofs of secrecy) and theorem proving (costly). We propose a new approach, based on abstract interpretation and using regular tree languages. The abstraction we use seems fine-grained enough to be able to certify some protocols. Both the concrete and abstract semantics of the protocol description language and implementation issues are discussed in the paper.}, entrysubtype = {intc} }
@inproceedings{Monniaux_SAS00, author = {David Monniaux}, title = {Abstract Interpretation of Probabilistic Semantics}, booktitle = {Seventh International Static Analysis Symposium (SAS'00)}, series = {Lecture Notes in Computer Science}, year = 2000, publisher = {Springer Verlag}, number = 1824, pages = {322--339}, pdf = {http://www-verimag.imag.fr/~monniaux/biblio/Monniaux_SAS00.pdf}, ps = {http://www-verimag.imag.fr/~monniaux/biblio/Monniaux_SAS00.ps.gz}, znote = {Extended version on the author's web site.}, abstract = {Following earlier models, we lift standard deterministic and nondeterministic semantics of imperative programs to probabilistic semantics. This semantics allows for random external inputs of known or unknown probability and random number generators. We then propose a method of analysis of programs according to this semantics, in the general framework of abstract interpretation. This method lifts an ``ordinary'' abstract lattice, for non-probabilistic programs, to one suitable for probabilistic programs. Our construction is highly generic. We discuss the influence of certain parameters on the precision of the analysis, basing ourselves on experimental results.}, entrysubtype = {intc}, doi = {10.1007/978-3-540-45099-3_17}, isbn = {978-3-540-67668-3} }
@inproceedings{Monniaux_POPL01, author = {David Monniaux}, title = {An Abstract {M}onte-{C}arlo Method for the Analysis of Probabilistic Programs (extended abstract)}, booktitle = {28th Symposium on Principles of Programming Languages (POPL '01)}, year = 2001, organization = {Association for Computer Machinery}, pages = {93--101}, ps = {http://www-verimag.imag.fr/~monniaux/biblio/Monniaux_POPL01.ps.gz}, pdf = {http://www-verimag.imag.fr/~monniaux/biblio/Monniaux_POPL01.pdf}, abstract = {We introduce a new method, combination of random testing and abstract interpretation, for the analysis of programs featuring both probabilistic and non-probabilistic nondeterminism. After introducing ``ordinary'' testing, we show how to combine testing and abstract interpretation and give formulas linking the precision of the results to the number of iterations. We then discuss complexity and optimization issues and end with some experimental results.}, entrysubtype = {intc}, eprinttype = {arxiv}, eprint = {cs/0701195}, doi = {10.1145/360204.360211}, isbn = {1-58113-336-7} }
@inproceedings{Monniaux_ESOP01, author = {David Monniaux}, title = {Backwards abstract interpretation of probabilistic programs}, booktitle = {European Symposium on Programming Languages and Systems (ESOP '01)}, series = {Lecture Notes in Computer Science}, number = 2028, year = 2001, pages = {367--382}, publisher = {Springer Verlag}, pdf = {http://www-verimag.imag.fr/~monniaux/biblio/Monniaux_ESOP01.pdf}, ps = {http://www-verimag.imag.fr/~monniaux/biblio/Monniaux_ESOP01.ps.gz}, abstract = {We introduce an extension of the framework of backwards abstract interpretation to probabilistic programs. This extension is proved to be sound with respect to a semantics that we introduce and prove to be equivalent to the standard semantics of probabilistic programs. We then propose a generic construction lifting ordinary abstract interpretation lattices to probabilistic programs.}, entrysubtype = {intc}, doi = {10.1007/3-540-45309-1_24}, isbn = {978-3-540-41862-7} }
@inproceedings{Monniaux_SAS01, author = {David Monniaux}, title = {An Abstract Analysis of the Probabilistic Termination of Programs}, booktitle = {8th International Static Analysis Symposium (SAS'01)}, series = {Lecture Notes in Computer Science}, year = 2001, publisher = {Springer Verlag}, number = 2126, pages = {111--126}, pdf = {http://www-verimag.imag.fr/~monniaux/biblio/Monniaux_SAS01.pdf}, ps = {http://www-verimag.imag.fr/~monniaux/biblio/Monniaux_SAS01.ps.gz}, abstract = {It is often useful to introduce probabilistic behavior in programs, either because of the use of internal random generators (probabilistic algorithms), either because of some external devices (networks, physical sensors) with known statistics of behavior. Previous works on probabilistic abstract interpretation have addressed safety properties, but somehow neglected probabilistic termination. In this paper, we propose a method to automatically prove the probabilistic termination of programs using exponential bounds on the tail of the distribution. We apply this method to an example and give some directions as to how to implement it. We also show that this method can also be applied to make unsound statistical methods on average running times sound.}, entrysubtype = {intc}, doi = {10.1007/3-540-47764-0_7}, isbn = {978-3-540-42314-0} }
@phdthesis{Monniaux_these, author = {David Monniaux}, title = {Analyse de programmes probabilistes par interpr\'etation abstraite}, school = {Universit\'e {P}aris {IX} {D}auphine}, year = 2001, type = {Thèse de doctorat}, note = {Résumé étendu en français. Contents in English.}, pdf = {http://www-verimag.imag.fr/~monniaux/biblio/Monniaux_these.pdf}, abstract = {The study of probabilistic programs is of considerable interest for the validation of networking protocols, embedded systems, or simply for compiling optimizations. It is also a difficult matter, due to the undecidability of properties on infinite-state deterministic programs, as well as the difficulties arising from probabilistic aspects. In this thesis, we propose a formulaic language for the specification of trace properties of probabilistic, nondeterministic transition systems, encompassing those that can be specified using deterministic Büchi automata. Those properties are in general undecidable on infinite processes. This language has both a concrete semantics in terms of sets of traces, as well as an abstract semantics in terms of measurable functions. We then apply abstract interpretation-based techniques to give upper bounds on the worst-case probability of the studied property. We propose an enhancement of this technique when the state space is partitioned --- for instance along the program points ---, allowing the use of faster iteration methods. We propose two abstract domains suitable for this analysis, one parameterized by an abstract domain suitable for nondeterministic (but not probabilistic) abstract interpretation, one modeling extended normal distributions. An alternative method to get such upper bounds works is to apply forward abstract interpretation on measures. We propose two abstract domains suitable for this analysis, one parameterized by an abstract domain suitable for nondeterministic abstract interpretation, one modeling sub-exponential queues. This latter domain allows proving probabilistic termination of programs. The methods described so far are symbolic and do not make use of the statistical properties of probabilities. On the other hand, a well-known way to obtain informations on probabilistic distributions is the Monte-Carlo method. We propose an abstract Monte-Carlo method featuring randomized abstract interpreters. }, entrysubtype = {thes}, oclc = {495477941}, eprinttype = {HAL}, eprint = {tel-00084287} }
@article{Monniaux_JTIT02, author = {David Monniaux}, editor = {Jean Goubault-Larrecq}, title = {Analysis of Cryptographic Protocols using Logics of Belief: an overview}, journal = {Journal of Telecommunications and Information Technology}, year = 2002, volume = 4, pages = {57--67}, pdf = {http://www-verimag.imag.fr/~monniaux/biblio/Monniaux_JTIT02.pdf}, issn = {1509-4553}, abstract = {When designing a cryptographic protocol or explaining it, one often uses arguments such as ``since this message was signed by machine $B$, machine $A$ can be sure it came from $B$'' in informal proofs justifying how the protocol works. Since it is, in such informal proofs, often easy to overlook an essential assumption, such as a trust relation or the belief that a message is not a replay from a previous session, it seems desirable to write such proofs in a formal system. While such logics do not replace the recent techniques of automatic proofs of safety properties, they help in pointing the weaknesses of the system. In this paper, we present briefly the BAN (Burrows -- Abadi -- Needham) formal system \cite{BAN,BAN_TOCS} as well as some derivative. We show how to prove some properties of a simple protocol, as well as detecting undesirable assumptions. We then explain how the manual search for proofs can be made automatic. Finally, we explain how the lack of proper semantics can be a bit worrying.}, entrysubtype = {intj} }
@incollection{BlanchetCousotEtAl02-NJ, author = {B{.} Blanchet and P{.} Cousot and R{.} Cousot and J{.} Feret and L{.} Mauborgne and A{.} Min{\'e} and D{.} Monniaux and X{.} Rival}, title = {Design and Implementation of a Special-Purpose Static Program Analyzer for Safety-Critical Real-Time Embedded Software}, booktitle = {The Essence of Computation: Complexity, Analysis, Transformation}, editor = {T{.} Mogensen and D{.}A{.} Schmidt and I{.}H{.} Sudborough}, series = {Lecture Notes in Computer Science}, number = 2566, publisher = {Springer Verlag}, year = 2002, pages = {85--108}, entrysubtype = {chap}, pdf = {http://www-verimag.imag.fr/~monniaux/biblio/Static_analyzer_LNCS2566.pdf}, ps = {http://www-verimag.imag.fr/~monniaux/biblio/Static_analyzer_LNCS2566.ps.gz}, abstract = {We report on an experience in the design and implementation of a special-purpose static program analyzer for the verification of critical embedded real-time software.} }
@inproceedings{Monniaux_VMCAI03, author = {David Monniaux}, title = {Abstraction of expectation functions using Gaussian distributions}, booktitle = {Verification, Model Checking, and Abstract Interpretation (VMCAI)}, year = 2003, editor = {Zuck, Lenore D. and Attie, Paul C. and Cortesi, Agostino and Mukhopadhyay, Supratik}, number = 2575, series = {Lecture Notes in Computer Science}, pages = {161--173}, publisher = {Springer Verlag}, abstract = {We consider semantics of infinite-state programs, both probabilistic and nondeterministic, as expectation functions: for any set of states $A$, we associate to each program point a function mapping each state to its expectation of starting a trace reaching $A$. We then compute a safe upper approximation of these functions using abstract interpretation. This computation takes place in an abstract domain of extended Gaussian (normal) distributions.}, ps = {http://www-verimag.imag.fr/~monniaux/biblio/Monniaux_VMCAI03.ps.gz}, pdf = {http://www-verimag.imag.fr/~monniaux/biblio/Monniaux_VMCAI03.pdf}, entrysubtype = {intc}, doi = {10.1007/3-540-36384-X_15}, isbn = {978-3-540-00348-9} }
@inproceedings{BlanchetCousotEtAl_PLDI03, author = {Bruno Blanchet and Patrick Cousot and Radhia Cousot and J{\'e}r{\^o}me Feret and Laurent Mauborgne and Antoine Min{\'e} and David Monniaux and Xavier Rival}, title = {A Static Analyzer for Large Safety-Critical Software}, booktitle = {ACM SIGPLAN Conference on Programming language design and implementation (PLDI)}, pages = {196--207}, year = 2003, organization = {ACM}, pdf = {http://www-verimag.imag.fr/~monniaux/biblio/Static_analyzer_PLDI03.pdf}, ps = {http://www-verimag.imag.fr/~monniaux/biblio/Static_analyzer_PLDI03.ps.gz}, abstract = {We show that abstract interpretation-based static program analysis can be made efficient and precise enough to formally verify a class of properties for a family of large programs with few or no false alarms. This is achieved by refinement of a general purpose static analyzer and later adaptation to particular programs of the family by the end-user through parametrization. This is applied to the proof of soundness of data manipulation operations at the machine level for periodic synchronous safety critical embedded software. The main novelties are the design principle of static analyzers by refinement and adaptation through parametrization, the symbolic manipulation of expressions to improve the precision of abstract transfer functions, ellipsoid, and decision tree abstract domains, all with sound handling of rounding errors in floating point computations, widening strategies (with thresholds, delayed) and the automatic determination of the parameters (parametrized packing).}, entrysubtype = {intc}, eprinttype = {arxiv}, eprint = {cs/0701193}, doi = {10.1145/781131.781153} }
@article{Monniaux_SCP03, author = {David Monniaux}, title = {Abstracting Cryptographic Protocols with Tree Automata}, journal = {Science of Computer Programming}, year = 2003, volume = 47, number = {2--3}, pages = {177--202}, znote = {Journal version of \cite{Monniaux_SAS99}.}, url = {http://dx.doi.org/10.1016/S0167-6423(02)00132-6}, pdf = {http://www-verimag.imag.fr/~monniaux/biblio/Monniaux_SCP03.pdf}, ps = {http://www-verimag.imag.fr/~monniaux/biblio/Monniaux_SCP03.ps.gz}, entrysubtype = {intj} }
@inproceedings{Monniaux_SAS03, author = {David Monniaux}, title = {Abstract interpretation of programs as {M}arkov decision processes}, booktitle = {Static Analysis Symposium (SAS '03)}, year = {2003}, series = {Lecture Notes in Computer Science}, number = {2694}, pages = {237--254}, publisher = {Springer Verlag}, pdf = {http://www-verimag.imag.fr/~monniaux/biblio/Monniaux_SAS03.pdf}, ps = {http://www-verimag.imag.fr/~monniaux/biblio/Monniaux_SAS03.ps.gz}, abstract = {We propose a formal language for the specification of trace properties of probabilistic, nondeterministic transition systems, encompassing the properties expressible in Linear Time Logic. Those formulas are in general undecidable on infinite deterministic transition systems and thus on infinite Markov decision processes. This language has both a semantics in terms of sets of traces, as well as another semantics in terms of measurable functions; we give and prove theorems linking the two semantics. We then apply abstract interpretation-based techniques to give upper bounds on the worst-case probability of the studied property. We propose an enhancement of this technique when the state space is partitioned --- for instance along the program points ---, allowing the use of faster iteration methods.}, entrysubtype = {intc}, doi = {10.1007/3-540-44898-5_13}, isbn = {978-3-540-40325-8} }
@article{Monniaux_SCP05, author = {David Monniaux}, title = {Abstract interpretation of programs as {M}arkov decision processes}, journal = {Science of Computer Programming}, year = {2005}, volume = 58, pages = {179--205}, month = oct, number = {1--2}, url = {http://dx.doi.org/10.1016/j.scico.2005.02.008}, pdf = {http://www-verimag.imag.fr/~monniaux/biblio/Monniaux_SCP05.pdf}, ps = {http://www-verimag.imag.fr/~monniaux/biblio/Monniaux_SCP05.ps.gz}, entrysubtype = {intj}, znote = {Journal version of \cite{Monniaux_SAS03}.}, abstract = {We propose a formal language for the specification of trace properties of probabilistic, nondeterministic transition systems, encompassing the properties expressible in Linear Time Logic. Those formulas are in general undecidable on infinite deterministic transition systems and thus on infinite Markov decision processes. This language has both a semantics in terms of sets of traces, as well as another semantics in terms of measurable functions; we give and prove theorems linking the two semantics. We then apply abstract interpretation-based techniques to give upper bounds on the worst-case probability of the studied property. We propose an enhancement of this technique when the state space is partitioned ---~for instance along the program points~---, allowing the use of faster iteration methods.} }
@inproceedings{Monniaux_CAV05, author = {David Monniaux}, title = {Compositional analysis of floating-point linear numerical filters}, booktitle = {Computer-aided verification (CAV)}, year = 2005, series = {Lecture Notes in Computer Science}, number = 3576, publisher = {Springer Verlag}, pages = {199--212}, pdf = {http://www-verimag.imag.fr/~monniaux/biblio/Monniaux_CAV05.pdf}, abstract = { Digital linear filters are used in a variety of applications (sound treatment, control/command, etc.), implemented in software, in hardware, or a combination thereof. For safety-critical applications, it is necessary to bound all variables and outputs of all filters. We give a compositional, effective abstraction for digital linear filters expressed as block diagrams, yielding sound, precise bounds for fixed-point or floating-point implementations of the filters.}, entrysubtype = {intc}, doi = {10.1007/11513988_21}, isbn = {978-3-540-27231-1} }
@inproceedings{ASTREE_ESOP05, author = {Patrick Cousot and Radhia Cousot and J\'er\^ome Feret and Laurent Mauborgne and Antoine Min\'e and David Monniaux and Xavier Rival}, title = {The {ASTR\'EE} Analyzer}, booktitle = {European symposium on programming (ESOP)}, pages = {21--30}, year = 2005, number = 3444, series = {Lecture Notes in Computer Science}, pdf = {http://www-verimag.imag.fr/~monniaux/biblio/Static_analyzer_ESOP05.pdf}, ps = {http://www-verimag.imag.fr/~monniaux/biblio/Static_analyzer_ESOP05.ps.gz}, abstract = {Astr\'ee is an abstract interpretation-based static program analyzer aiming at proving automatically the absence of run time errors in programs written in the C programming language. It has been applied with success to large embedded control-command safety critical real-time software generated automatically from syn\-chro\-nous specifications, producing a correctness proof for complex software without any false alarm in a few hours of computation.}, entrysubtype = {intc}, doi = {10.1007/978-3-540-31987-0_3}, isbn = {978-3-540-25435-5} }
@inproceedings{Monniaux_APLAS05, author = {David Monniaux}, title = {The parallel implementation of the {A}str\'ee static analyzer}, booktitle = {Programming Languages and Systems (APLAS)}, year = 2005, editor = {Kwangkeun Yi}, number = 3780, series = {Lecture Notes in Computer Science}, publisher = {Springer Verlag}, pdf = {http://www-verimag.imag.fr/~monniaux/biblio/Monniaux_APLAS05.pdf}, ps = {http://www-verimag.imag.fr/~monniaux/biblio/Monniaux_APLAS05.ps.gz}, abstract = {The Astr\'ee static analyzer is a specialized tool that can prove the absence of runtime errors, including arithmetic overflows, in large critical programs. Keeping analysis times reasonable for industrial use is one of the design objectives. In this paper, we discuss the parallel implementation of the analysis.}, entrysubtype = {intc}, eprinttype = {arxiv}, eprint = {cs/0701191}, doi = {10.1007/11575467_7}, isbn = {3-540-29735-9} }
@article{Monniaux_Soufron_Upgrade06, author = {David Monniaux and Jean-Baptiste Soufron}, title = {{DRM} as A Dangerous Alternative to Copyright Licences }, journal = {Upgrade}, year = 2006, volume = 7, number = 3, issn = {1684-5285}, pdf = {http://www.upgrade-cepis.org/issues/2006/3/up7-3Monniaux.pdf}, entrysubtype = {intj-shs} }
@inproceedings{Monniaux_SAS07, author = {David Monniaux}, title = {Optimal abstraction on real-valued programs}, booktitle = {Static analysis (SAS '07)}, pages = {104--120}, year = 2007, editor = {Gilberto Fil\'e and Hanne Riis Nielson}, number = 4634, series = {Lecture Notes in Computer Science}, publisher = {Springer Verlag}, abstract = {In this paper, we show that it is possible to abstract program fragments using real variables using formulas in the theory of real closed fields. This abstraction is compositional and modular. We first propose an exact abstraction for progr ams without loops. Given an abstract domain (in a wide class including intervals and octagons), we then show how to obtain an optimal abstraction of program fra gments with respect to that domain. This abstraction allows computing optimal fixed points inside that abstract domain, without the need for a widening operator .}, ps = {http://www-verimag.imag.fr/~monniaux/biblio/Monniaux_SAS07.ps.gz}, pdf = {http://www-verimag.imag.fr/~monniaux/biblio/Monniaux_SAS07.pdf}, entrysubtype = {intc}, doi = {10.1007/978-3-540-74061-2_7}, isbn = {978-3-540-74060-5} }
@article{Monniaux_TOPLAS08, author = {David Monniaux}, title = {The pitfalls of verifying floating-point computations}, journal = {TOPLAS}, fjournal = {ACM Transactions on programming languages and systems}, year = 2008, month = may, publisher = {ACM}, fpublisher = {Association for Computing Machinery}, issn = {0164-0925}, volume = 30, number = 3, pages = 12, abstract = {Current critical systems commonly use a lot of floating-point computations, and thus the testing or static analysis of programs containing floating-point operators has become a priority. However, correctly defining the semantics of common implementations of floating-point is tricky, because semantics may change with many factors beyond source-code level, such as choices made by compilers. We here give concrete examples of problems that can appear and solutions to implement in analysis software.}, url = {http://hal.archives-ouvertes.fr/hal-00128124/en/}, doi = {10.1145/1353445.1353446}, pdf = {http://hal.archives-ouvertes.fr/docs/00/28/14/29/PDF/floating-point-article.pdf}, entrysubtype = {intj}, eprinttype = {arxiv}, eprint = {cs/0701192} }
@inproceedings{ASTREE_TASE07, author = {Cousot, Patrick and Cousot, Radhia and Feret, Jerome and Min\'e, Antoine and Mauborgne, Laurent and Monniaux, David and Rival, Xavier}, title = {Varieties of Static Analyzers: A Comparison with {ASTR\'EE}}, booktitle = {Theoretical Aspects of Software Engineering (TASE '07)}, year = 2007, organization = {IEEE}, abstract = {We discuss the characteristic properties of ASTR\'EE, an automatic static analyzer for proving the absence of runtime errors in safety-critical real-time synchronous control/command C programs, and compare it with a variety of other program analysis tools.}, entrysubtype = {intc}, isbn = {978-0-7695-2856-4}, doi = {10.1109/TASE.2007.55} }
@inproceedings{Monniaux_EMSOFT07, author = {David Monniaux}, title = {Verification of Device Drivers and Intelligent Controllers: a Case Study}, year = 2007, booktitle = {EMSOFT}, editor = {Christoph Kirsch and Reinhard Wilhelm}, abstract = {The soundness of device drivers generally cannot be verified in isolation, but has to take into account the reactions of the hardware devices. In critical embedded systems, interfaces often were simple ``volatile'' variables, and the interface specification typically a list of bounds on these variables. Some newer systems use ``intelligent'' controllers that handle dynamic worklists in shared memory and perform direct memory accesses, all asynchronously from the main processor. Thus, it is impossible to truly verify the device driver without taking the intelligent device into account, because incorrect programming of the device can lead to dire consequences, such as memory zones being erased. We have successfully verified a device driver extracted from a critical industrial system, asynchronously combined with a model for a USB OHCI controller. This paper studies this case, as well as introduces a model and analysis techniques for this asynchronous composition.}, ps = {http://www-verimag.imag.fr/~monniaux/biblio/Monniaux_EMSOFT07.ps.gz}, pdf = {http://www-verimag.imag.fr/~monniaux/biblio/Monniaux_EMSOFT07.pdf}, pages = {30--36}, organization = {ACM \& IEEE}, isbn = {978-1-59593-825-1}, entrysubtype = {intc}, doi = {10.1145/1289927.1289937} }
@inproceedings{Monniaux_ASIAN06, author = {Patrick Cousot and Radhia Cousot and J\'er\^ome Feret and Laurent Mauborgne and Antoine Min\'e and David Monniaux and Xavier Rival}, title = {Combination of Abstractions in the ASTR\'EE Static Analyzer}, booktitle = {Advances in Computer Science --- ASIAN 2006. Secure Software and Related Issues}, pages = {272--300}, year = 2008, number = 4435, series = {Lecture Notes in Computer Science}, publisher = {Springer Verlag}, isbn = {978-3-540-77504-1}, entrysubtype = {intc}, doi = {10.1007/978-3-540-77505-8_23} }
@inproceedings{Monniaux_LPAR08, author = {David Monniaux}, title = {A Quantifier Elimination Algorithm for Linear Real Arithmetic}, booktitle = {LPAR (Logic for Programming Artificial Intelligence and Reasoning)}, year = 2008, series = {Lecture Notes in Computer Science}, publisher = {Springer Verlag}, pages = {243-257}, number = 5330, doi = {10.1007/978-3-540-89439-1_18}, isbn = {978-3-540-89439-1}, pdf = {http://www-verimag.imag.fr/~monniaux/biblio/Monniaux_LPAR08.pdf}, abstract = {We propose a new quantifier elimination algorithm for the theory of linear real arithmetic. This algorithm uses as subroutines satisfiability modulo this theory and polyhedral projection; there are good algorithms and implementations for both of these. The quantifier elimination algorithm presented in the paper is compared, on examples arising from program analysis problems and on random examples, to several other implementations, all of which cannot solve some of the examples that our algorithm solves easily.}, entrysubtype = {intc}, eprinttype = {arxiv}, eprint = {0803.1575} }
@inproceedings{Monniaux_POPL09, author = {David Monniaux}, title = {Automatic Modular Abstractions for Linear Constraints}, booktitle = {POPL (Principles of programming languages)}, year = 2009, publisher = {ACM}, znote = {The version on the HAL/arXiv sites lacks the links in the bibliography, due to deficiencies in their LaTeX installation.}, pdf = {http://www-verimag.imag.fr/~monniaux/biblio/Monniaux_POPL09.pdf}, abstract = {We propose a method for automatically generating abstract transformers for static analysis by abstract interpretation. The method focuses on linear constraints on programs operating on rational, real or floating-point variables and containing linear assignments and tests. In addition to loop-free code, the same method also applies for obtaining least fixed points as functions of the precondition, which permits the analysis of loops and recursive functions. Our algorithms are based on new quantifier elimination and symbolic manipulation techniques. Given the specification of an abstract domain, and a program block, our method automatically outputs an implementation of the corresponding abstract transformer. It is thus a form of program transformation. The motivation of our work is data-flow synchronous programming languages, used for building control-command embedded systems, but it also applies to imperative and functional programming.}, doi = {10.1145/1480881.1480899}, isbn = {978-1-60558-379-2}, pages = {140--151}, entrysubtype = {intc}, eprinttype = {arxiv}, eprint = {0811.0166} }
@inproceedings{Monniaux_CAV09, author = {David Monniaux}, title = {On using floating-point computations to help an exact linear arithmetic decision procedure}, booktitle = {Computer-aided verification (CAV)}, year = 2009, series = {Lecture Notes in Computer Science}, publisher = {Springer Verlag}, doi = {10.1007/978-3-642-02658-4_42}, number = 5643, pages = {570--583}, pdf = {http://www-verimag.imag.fr/~monniaux/biblio/Monniaux_CAV09.pdf}, abstract = {We consider the decision problem for quantifier-free formulas whose atoms are linear inequalities interpreted over the reals or rationals. This problem may be decided using satisfiability modulo theory (SMT), using a mixture of a SAT solver and a simplex-based decision procedure for conjunctions. State-of-the-art SMT solvers use simplex implementations over rational numbers, which perform well for typical problems arising from model-checking and program analysis (sparse inequalities, small coefficients) but are slow for other applications (denser problems, larger coefficients). We propose a simple preprocessing phase that can be adapted to existing SMT solvers and that may be optionally triggered. Despite using floating-point computations, our method is sound and complete --- it merely affects efficiency. We implemented the method and provide benchmarks showing that this change brings a naive and slow decision procedure (``textbook simplex'' with rational numbers) up to the efficiency of recent SMT solvers, over test cases arising from model-checking, and makes it definitely faster than state-of-the-art SMT solvers on dense examples.}, entrysubtype = {intc}, isbn = {978-3-642-02657-7} }
@phdthesis{Monniaux_HDR, author = {David Monniaux}, title = {Analyse statique~: de la th\'eorie \`a la pratique}, school = {Universit\'e Joseph Fourier}, year = 2009, type = {Habilitation à diriger les recherches}, address = {Grenoble, France}, month = jul, pdf = {http://www-verimag.imag.fr/~monniaux/biblio/Monniaux_HDR.pdf}, abstract = {Software operating critical systems (aircraft, nuclear power plants) should not fail --- whereas most computerised systems of daily life (personal computer, ticket vending machines, cell phone) fail from time to time. This is not a simple engineering problem: it is known, since the works of Turing and Cook, that proving that programs work correctly is intrinsically hard. In order to solve this problem, one needs methods that are, at the same time, efficient (moderate costs in time and memory), safe (all possible failures should be found), and precise (few warnings about nonexistent failures). In order to reach a satisfactory compromise between these goals, one can research fields as diverse as formal logic, numerical analysis or ``classical'' algorithmics. From 2002 to 2007 I participated in the development of the \soft{Astrée} static analyser. This suggested to me a number of side projects, both theoretical and practical (use of formal proof techniques, analysis of numerical filters...). More recently, I became interested in modular analysis of numerical property and in the applications to program analysis of constraint solving techniques (semidefinite programming, SAT and SAT modulo theory).}, entrysubtype = {hdr}, eprinttype = {HAL}, eprint = {tel-00397108} }
@article{Monniaux_HOSC09, author = {David Monniaux}, title = {A minimalistic look at widening operators}, journal = {Higher order and symbolic computation}, year = 2009, month = dec, doi = {10.1007/s10990-009-9046-8}, volume = 22, number = 2, pages = {145--154}, issn = {1388-3690}, oclc = {40529371}, pdf = {http://www-verimag.imag.fr/~monniaux/biblio/Monniaux_HOSC09.pdf}, abstract = {We consider the problem of formalizing in higher-order logic the familiar notion of widening from abstract interpretation. It turns out that many axioms of widening (e.g. widening sequences are ascending) are not useful for proving correctness. After keeping only useful axioms, we give an equivalent characterization of widening as a lazily constructed well-founded tree. In type systems supporting dependent products and sums, this tree can be made to reflect the condition of correct termination of the widening sequence.}, entrysubtype = {intj} }
@inproceedings{Monniaux_CAV10, author = {David Monniaux}, title = {Quantifier elimination by lazy model enumeration}, booktitle = {Computer-aided verification (CAV)}, year = 2010, series = {Lecture Notes in Computer Science}, number = 6174, pages = {585--599}, publisher = {Springer Verlag}, doi = {10.1007/978-3-642-14295-6_51}, isbn = {3642142958}, isbn13 = {9783642142956}, oclc = {663093895}, pdf = {http://www-verimag.imag.fr/~monniaux/biblio/Monniaux_CAV10.pdf}, abstract = {We propose a quantifier elimination scheme based on nested lazy model enumeration through SMT-solving, and projections. This scheme may be applied to any logic that fulfills certain conditions; we illustrate it for linear real arithmetic. The quantifier elimination problem for linear real arithmetic is doubly exponential in the worst case, and so is our method. We have implemented it and benchmarked it against other methods from the literature.}, entrysubtype = {intc} }
@article{Monniaux_LMCS10, author = {David Monniaux}, title = {Automatic Modular Abstractions for Template Numerical Constraints}, journal = {Logical Methods in Computer Science}, year = 2010, month = jun, abstract = {We propose a method for automatically generating abstract transformers for static analysis by abstract interpretation. The method focuses on linear constraints on programs operating on rational, real or floating-point variables and containing linear assignments and tests. Given the specification of an abstract domain, and a program block, our method automatically outputs an implementation of the corresponding abstract transformer. It is thus a form of program transformation. In addition to loop-free code, the same method also applies for obtaining least fixed points as functions of the precondition, which permits the analysis of loops and recursive functions. The motivation of our work is data-flow synchronous programming languages, used for building control-command embedded systems, but it also applies to imperative and functional programming. Our algorithms are based on quantifier elimination and symbolic manipulation techniques over linear arithmetic formulas. We also give less general results for nonlinear constraints and nonlinear program constructs.}, pdf = {Monniaux_LMCS10.pdf}, zurl = {http://www.lmcs-online.org/ojs/viewarticle.php?id=563}, doi = {10.2168/LMCS-6(3:4)2010}, issn = {1860-5974}, entrysubtype = {intj}, eprinttype = {arxiv}, eprint = {1005.4844} }
@inproceedings{Gawlitza_Monniaux_ESOP11, author = {Thomas Gawlitza and David Monniaux}, title = {Improving Strategies via {SMT} Solving}, booktitle = {ESOP}, year = 2011, doi = {10.1007/978-3-642-19718-5_13}, pages = {236--255}, publisher = {Springer Verlag}, series = {Lecture Notes in Computer Science}, number = 6602, isbn = {978-3-642-19717-8}, editor = {Gilles Barthe}, pdf = {Gawlitza_Monniaux_ESOP11.pdf}, abstract = {We consider the problem of computing numerical invariants of programs by abstract interpretation. Our method eschews two traditional sources of imprecision: (i) the use of widening operators for enforcing convergence within a finite number of iterations (ii) the use of merge operations (often, convex hulls) at the merge points of the control flow graph. It instead computes the least inductive invariant expressible in the domain at a restricted set of program points, and analyzes the rest of the code en bloc. We emphasize that we compute this inductive invariant precisely. For that we extend the strategy improvement algorithm of [Gawlitza and Seidl, 2007]. If we applied their method directly, we would have to solve an exponentially sized system of abstract semantic equations, resulting in memory exhaustion. Instead, we keep the system implicit and discover strategy improvements using SAT modulo real linear arithmetic (SMT). For evaluating strategies we use linear programming. Our algorithm has low polynomial space complexity and performs for contrived examples in the worst case exponentially many strategy improvement steps; this is unsurprising, since we show that the associated abstract reachability problem is Pi-p-2-complete.}, entrysubtype = {intc}, eprinttype = {arxiv}, eprint = {1101.2812} }
@inproceedings{Monniaux_Corbineau_ITP2011, author = {David Monniaux and Pierre Corbineau}, title = {On the Generation of {P}ositivstellensatz Witnesses in Degenerate Cases}, booktitle = {Interactive Theorem Proving (ITP)}, editor = {Van Eekelen, Marko and Geuvers, Herman and Schmaltz, Julien and Wiedijk, Freek}, year = 2011, month = aug, volume = 6898, pages = {249--264}, series = {Lecture Notes in Computer Science}, publisher = {Springer Verlag}, isbn = {978-3-642-22862-9}, pdf = {Monniaux_Corbineau_ITP11.pdf}, abstract = {One can reduce the problem of proving that a polynomial is nonnegative, or more generally of proving that a system of polynomial inequalities has no solutions, to finding polynomials that are sums of squares of polynomials and satisfy some linear equality (\tgerman{Positivstellensatz}). This produces a \emph{witness} for the desired property, from which it is reasonably easy to obtain a formal proof of the property suitable for a proof assistant such as Coq. The problem of finding a witness reduces to a feasibility problem in semidefinite programming, for which there exist numerical solvers. Unfortunately, this problem is in general not strictly feasible, meaning the solution can be a convex set with empty interior, in which case the numerical optimization method fails. Previously published methods thus assumed strict feasibility; we propose a workaround for this difficulty. We implemented our method and illustrate its use with examples, including extractions of proofs to Coq.}, entrysubtype = {intc}, eprinttype = {arxiv}, eprint = {1105.4421}, doi = {10.1007/978-3-642-22863-6_19} }
@inproceedings{Monniaux_Gonnord_SAS11, author = {David Monniaux and Laure Gonnord}, editor = {Eran Yahav}, title = {Using bounded model checking to focus fixpoint iterations}, booktitle = {Static analysis (SAS)}, year = 2011, pdf = {Monniaux_Gonnord_SAS2011.pdf}, pages = {369--385}, publisher = {Springer Verlag}, series = {Lecture Notes in Computer Science}, volume = 6887, abstract = {Two classical sources of imprecision in static analysis by abstract interpretation are widening and merge operations. Merge operations can be done away by distinguishing paths, as in trace partitioning, at the expense of enumerating an exponential number of paths. In this article, we describe how to avoid such systematic exploration by focusing on a single path at a time, designated by SMT-solving. Our method combines well with acceleration techniques, thus doing away with widenings as well in some cases. We illustrate it over the well-known domain of convex polyhedra.}, entrysubtype = {intc}, eprinttype = {arxiv}, eprint = {1106.2637}, doi = {10.1007/978-3-642-23702-7_27}, isbn = {978-3-642-23701-0} }
@inproceedings{Monniaux_LeGuen_NSAD11, author = {David Monniaux and Le Guen, Julien}, title = {Stratified Static Analysis Based on Variable Dependencies}, booktitle = {Third International Workshop on Numerical and Symbolic Abstract Domains}, year = 2011, pdf = {Monniaux_LeGuen_NSAD211.pdf}, abstract = {In static analysis by abstract interpretation, one often uses \emph{widening operators} in order to enforce convergence within finite time to an inductive invariant. Certain widening operators, including the classical one over finite polyhedra, exhibit an unintuitive behavior: analyzing the program over a subset of its variables may lead a more precise result than analyzing the original program! In this article, we present simple workarounds for such behavior.}, entrysubtype = {intw}, eprinttype = {arxiv}, eprint = {1109.2405}, doi = {10.1016/j.entcs.2012.10.008} }
@inproceedings{Monniaux_Bodin_APLAS11, author = {David Monniaux and Martin Bodin}, title = {Modular Abstractions of Reactive Nodes using Disjunctive Invariants}, booktitle = {Programming Languages and Systems (APLAS)}, year = 2011, pdf = {Monniaux_Bodin_APLAS11.pdf}, abstract = {We wish to abstract nodes in a reactive programming language, such as Lustre, into nodes with a simpler control structure, with a bound on the number of control states. In order to do so, we compute disjunctive invariants in predicate abstraction, with a bounded number of disjuncts, then we abstract the node, each disjunct representing an abstract state. The computation of the disjunctive invariant is performed by a form of quantifier elimination expressed using SMT-solving. The same method can also be used to obtain disjunctive loop invariants.}, pages = {19--33}, doi = {10.1007/978-3-642-25318-8_5}, isbn = {978-3-642-25317-1}, entrysubtype = {intc}, eprinttype = {arxiv}, eprint = {1109.1905} }
@inproceedings{Caminha_Monniaux_PAAR2012, author = {Caminha Barbosa de Oliveira, Diego and Monniaux, David}, title = {Experiments on the feasibility of using a floating-point simplex in an {SMT} solver}, booktitle = {Workshop on Practical Aspects of Automated Reasoning (PAAR)}, year = 2012, entrysubtype = {intw}, abstract = {SMT solvers use simplex-based decision procedures to solve decision problems whose formulas are quantifier-free and atoms are linear constraints over the rationals. State-of-art SMT solvers use rational (exact) simplex implementations, which have shown good performance for typical software, hardware or protocol verification problems over the years. Yet, most other scientific and technical fields use (inexact) floating-point computations, which are deemed far more efficient than exact ones. It is therefore tempting to use a floating-point simplex implementation inside an SMT solver, though special precautions must be taken to avoid unsoundness. In this work, we describe experimental results, over common benchmarks (SMT-LIB) of the integration of a mature floating-point implementation of the simplex algorithm (GLPK) into an existing SMT solver (OpenSMT). We investigate whether commonly cited reasons for and against the use of floating-point truly apply to real cases from verification problems.}, pdf = {Caminha_Monniaux_PAAR2012.pdf}, issn = {2040-557X}, publisher = {Easychair}, series = {EPiC Series}, volume = {21}, editor = {Pascal Fontaine and Renate A. Schmidt and Stephan Schulz}, url = {http://www.easychair.org/publications/?page=797429640} }
@inproceedings{Phan_Bjorner_Monniaux_SMT2012, title = {Anatomy of Alternating Quantifier Satisfiability (Work in progress)}, author = {Phan, Anh-Dung and Bj{\o}rner, Nikolaj and Monniaux, David}, booktitle = {10th International Workshop on Satisfiability Modulo Theories (SMT)}, year = 2012, entrysubtype = {intw}, pdf = {http://www-verimag.imag.fr/~monniaux/biblio/Phan_Bjorner_Monniaux_SMT2012.pdf}, abstract = {We report on work in progress to generalize an algorithm recently introduced in for checking satisfiability of formulas with quantifier alternation. The algorithm uses two auxiliary procedures: a procedure for producing a candidate formula for quantifier elimination and a procedure for eliminating or partially eliminating quantifiers. We also apply the algorithm for Presburger Arithmetic formulas and evaluate it on formulas from a model checker for Duration Calculus. We report on experiments on different variants of the auxiliary procedures. So far, there is an edge to applying SMT-TEST proposed in [Monniaux, CAV2010], while we found that a simpler approach which just eliminates quantified variables per round is almost as good. Both approaches offer drastic improvements to applying default quantifier elimination.}, editor = {Pascal Fontaine and Amit Goel}, foreignauthor = yes }
@inproceedings{Henry_Monniaux_Moy_SAS2012, author = {Julien Henry and David Monniaux and Matthieu Moy}, title = {Succinct Representations for Abstract Interpretation}, subtitle = {Combined Analysis Algorithms and Experimental Evaluation}, booktitle = {Static analysis (SAS)}, year = 2012, series = {Lecture Notes in Computer Science}, volume = 7460, pages = {283--299}, publisher = {Springer Verlag}, eprint = {1206.4234}, eprinttype = {arxiv}, entrysubtype = {intc}, doi = {10.1007/978-3-642-33125-1_20}, pdf = {http://www-verimag.imag.fr/~monniaux/biblio/Henry_Monniaux_Moy_SAS2012.pdf}, abstract = {Abstract interpretation techniques can be made more precise by distinguishing paths inside loops, at the expense of possibly exponential complexity. SMT-solving techniques and sparse representations of paths and sets of paths avoid this pitfall. We improve previously proposed techniques for guided static analysis and the generation of disjunctive invariants by combining them with techniques for succinct representations of paths and symbolic representations for transitions based on static single assignment. Because of the non-monotonicity of the results of abstract interpretation with widening operators, it is difficult to conclude that some abstraction is more precise than another based on theoretical local precision results. We thus conducted extensive comparisons between our new techniques and previous ones, on a variety of open-source packages.} }
@inproceedings{Henry_Monniaux_Moy_TAPAS2012, author = {Julien Henry and David Monniaux and Matthieu Moy}, title = {{PAGAI}: a path sensitive static analyzer}, booktitle = {Tools for Automatic Program Analysis (TAPAS)}, year = 2012, editor = {Bertrand Jeannet}, abstract = {We describe the design and the implementation of PAGAI, a new static analyzer working over the LLVM compiler infrastructure, which computes inductive invariants on the numerical variables of the analyzed program. PAGAI implements various state-of-the-art algorithms combining abstract interpretation and decision procedures (SMT-solving), focusing on distinction of paths inside the control flow graph while avoiding systematic exponential enumerations. It is parametric in the abstract domain in use, the iteration algorithm, and the decision procedure. We compared the time and precision of various combinations of analysis algorithms and abstract domains, with extensive experiments both on personal benchmarks and widely available GNU programs.}, entrysubtype = {intw}, pdf = {http://www-verimag.imag.fr/~monniaux/biblio/Henry_Monniaux_Moy_TAPAS2012.pdf}, doi = {10.1016/j.entcs.2012.11.003}, eprint = {1207.3937}, eprinttype = {arXiv} }
@article{Gawlitza_Monniaux_LMCS12, author = {Thomas Gawlitza and David Monniaux}, title = {Invariant Generation through Strategy Iteration in Succinctly Represented Control Flow Graphs}, journal = {Logical Methods in Computer Science}, year = 2012, doi = {10.2168/LMCS-8(3:29)2012}, eprinttype = {arxiv}, eprint = {1209.0643}, month = sep, issn = {1860-5974}, entrysubtype = {intj}, team = {SYNC}, abstract = {We consider the problem of computing numerical invariants of programs, for instance bounds on the values of numerical program variables. More specifically, we study the problem of performing static analysis by abstract interpretation using template linear constraint domains. Such invariants can be obtained by Kleene iterations that are, in order to guarantee termination, accelerated by widening operators. In many cases, however, applying this form of extrapolation leads to invariants that are weaker than the strongest inductive invariant that can be expressed within the abstract domain in use. Another well-known source of imprecision of traditional abstract interpretation techniques stems from their use of join operators at merge nodes in the control flow graph. The mentioned weaknesses may prevent these methods from proving safety properties. The technique we develop in this article addresses both of these issues: contrary to Kleene iterations accelerated by widening operators, it is guaranteed to yield the strongest inductive invariant that can be expressed within the template linear constraint domain in use. It also eschews join operators by distinguishing all paths of loop-free code segments. Formally speaking, our technique computes the least fixpoint within a given template linear constraint domain of a transition relation that is succinctly expressed as an existentially quantified linear real arithmetic formula. In contrast to previously published techniques that rely on quantifier elimination, our algorithm is proved to have optimal complexity: we prove that the decision problem associated with our fixpoint problem is $\Pi^p_2$-complete. Our procedure mimics a $\Pi^p_2$ search.} }
@unpublished{Monniaux_ZTransform, author = {David Monniaux}, title = {Applying the {Z}-transform for the static analysis of floating-point numerical filters}, note = {Draft from a possible extended version of CAV 2005.}, eprinttype = {arxiv}, eprint = {0706.0252}, year = {2005} }
@article{Monniaux_Quadrature_2012, author = {Monniaux, David}, title = {Introduction \`{a} la calculabilit\'{e}}, journal = {Quadrature}, volume = 86, pages = {17--28}, year = 2012, entrysubtype = {vulg}, abstract = {Summary: Whether or not diophantine equations (that is, polynomials equations with integer coefficients and solutions) have solutions does not seem, a priori, to be related to bugs in computer programs. Computability theory establishes such a link: a classical result is that the problem of finding out whether a system of diophantine equations has a solution is the same as determining whether the execution a computer program will eventually stop. This article introduces computability theory: the difficulty of defining the class of computable functions (primitive recursive functions are insufficient), Turing machines, the Church thesis, and classical results (Halting problem, Rice's theorem, ect.).}, keywords = {computable functions; turing; diophantine equations; turing machines}, classmath = {*97P20 ( ) 97F60 ( )}, issn = {1142-2785}, zbl = {1254.97018}, pdf = {Monniaux_Quadrature2012.pdf}, team = {SYNC} }
@inproceedings{Fouilhe_et_al_SAS2013, author = {Alexis Fouilh\'e and David Monniaux and Micha\"el P\'erin}, title = {Efficient Generation of Correctness Certificates for the Abstract Domain of Polyhedra}, booktitle = {Static analysis (SAS)}, year = 2013, pdf = {Fouilhe_et_al_SAS_2013.pdf}, eprinttype = {HAL}, eprint = {hal-00806990}, doi = {10.1007/978-3-642-38856-9_19}, entrysubtype = {intc}, isbn = {978-3-642-38855-2}, team = {SYNC,DCS}, abstract = {Polyhedra form an established abstract domain for inferring runtime properties of programs using abstract interpretation. Computations on them need to be certified for the whole static analysis results to be trusted. In this work, we look at how far we can get down the road of a posteriori verification to lower the overhead of certification of the abstract domain of polyhedra. We demonstrate methods for making the cost of inclusion certificate generation negligible. From a performance point of view, our single-representation, constraints-based implementation compares with state-of-the-art implementations.} }
@inproceedings{Braibant_Jourdan_Monniaux_ITP2013, author = {Thomas Braibant and Jacques-Henri Jourdan and David Monniaux}, title = {Implementing hash-consed data structures in {C}oq}, booktitle = {Interactive theorem proving (ITP)}, eprinttype = {HAL}, eprint = {hal-00816672}, doi = {10.1007/978-3-642-39634-2_36}, isbn = {978-3-642-39633-5}, publisher = {Springer Verlag}, entrysubtype = {intc}, series = {Lecture Notes in Computer Science}, volume = 7998, team = {SYNC}, year = 2013, abstract = {We report on three different approaches to use hash-consing in programs certified with the Coq system, using binary decision diagrams (BDD) as running example. The use cases include execution inside Coq, or execution of the extracted OCaml code. There are different trade-offs between faithful use of pristine extracted code, and code that is fine-tuned to make use of OCaml programming constructs not available in Coq. We discuss the possible consequences in terms of performances and guarantees.} }
@inproceedings{Henry_et_al_WCET_LCTES2014, author = {Julien Henry and Mihail Asavoae and David Monniaux and Claire Maiza}, title = {How to compute worst-case execution time by optimization modulo theory and a clever encoding of program semantics}, booktitle = {LCTES}, year = {2014}, pages = {43--52}, doi = {10.1145/2597809.2597817}, editor = {Youtao Zhang and Prasad Kulkarni}, publisher = {ACM}, year = {2014}, isbn = {978-1-4503-2877-7}, eprinttype = {HAL}, eprint = {hal-00998138}, entrysubtype = {intc}, abstract = {In systems with hard real-time constraints, it is necessary to compute upper bounds on the worst-case execution time (WCET) of programs; the closer the bound to the real WCET, the better. This is especially the case of synchronous reactive control loops with a fixed clock; the WCET of the loop body must not exceed the clock period. We compute the WCET (or at least a close upper bound thereof) as the solution of an optimization modulo theory problem that takes into account the semantics of the program, in contrast to other methods that compute the longest path whether or not it is feasible according to these semantics. Optimization modulo theory extends satisfiability modulo theory (SMT) to maximization problems. Immediate encodings of WCET problems into SMT yield formulas intractable for all current production-grade solvers; this is inherent to the DPLL(T) approach to SMT implemented in these solvers. By conjoining some appropriate "cuts" to these formulas, we considerably reduce the computation time of the SMT-solver. We experimented our approach on a variety of control programs, using the OTAWA analyzer both as baseline and as underlying microarchitectural analysis for our analysis, and show notable improvement on the WCET bound on a variety of benchmarks and control programs.}, url = {https://hal.archives-ouvertes.fr/hal-00998138} }
@article{Braibant_Jourdan_Monniaux_JAR2014, year = {2014}, month = jun, issn = {0168-7433}, journal = {Journal of Automated Reasoning}, doi = {10.1007/s10817-014-9306-0}, title = {Implementing and Reasoning About Hash-consed Data Structures in {C}oq}, publisher = {Springer}, keywords = {Coq; Hash-consing; Binary decision diagrams}, author = {Braibant, Thomas and Jourdan, Jacques-Henri and Monniaux, David}, pages = {1--34}, entrysubtype = {intj}, eprinttype = {HAL}, eprint = {hal-00816672}, abstract = {We report on four different approaches to implementing hash-consing in Coq programs. The use cases include execution inside Coq, or execution of the extracted OCaml code. We explore the different trade-offs between faithful use of pristine extracted code, and code that is fine-tuned to make use of OCaml programming constructs not available in Coq. We discuss the possible consequences in terms of performances and guarantees. We use the running example of binary decision diagrams and then demonstrate the generality of our solutions by applying them to other examples of hash-consed data structures.}, url = {https://hal.archives-ouvertes.fr/hal-00816672} }
@inproceedings{Monniaux_Schrammel_SAS2014, author = {David Monniaux and Peter Schrammel}, title = {Speeding Up Logico-Numerical Strategy Iteration}, booktitle = {Static Analysis - 21st International Symposium, {SAS} 2014, Munich, Germany, September 11-13, 2014. Proceedings}, year = {2014}, pages = {253--267}, doi = {10.1007/978-3-319-10936-7_16}, editor = {Markus M{\"{u}}ller{-}Olm and Helmut Seidl}, series = {Lecture Notes in Computer Science}, year = {2014}, volume = {8723}, publisher = {Springer}, isbn = {978-3-319-10935-0}, timestamp = {Thu, 13 Nov 2014 04:44:25 +0100}, biburl = {http://dblp.uni-trier.de/rec/bib/conf/sas/2014}, bibsource = {dblp computer science bibliography, http://dblp.org}, entrysubtype = {intc}, eprinttype = {arXiv}, eprint = {1403.2319}, abstract = {We introduce an efficient combination of polyhedral analysis and predicate partitioning. Template polyhedral analysis abstracts numerical variables inside a program by one polyhedron per control location, with a priori fixed directions for the faces. The strongest inductive invariant in such an abstract domain may be computed by upward strategy iteration. If the transition relation includes disjunctions and existential quantifiers (a succinct representation for an exponential set of paths), this invariant can be computed by a combination of strategy iteration and satisfiability modulo theory (SMT) solving. Unfortunately, the above approaches lead to unacceptable space and time costs if applied to a program whose control states have been partitioned according to predicates. We therefore propose a modification of the strategy iteration algorithm where the strategies are stored succinctly, and the linear programs to be solved at each iteration step are simplified according to an equivalence relation. We have implemented the technique in a prototype tool and we demonstrate on a series of examples that the approach performs significantly better than previous strategy iteration techniques.}, url = {https://arxiv.org/abs/1403.2319}, foreignauthor = yes }
@inproceedings{Alberti_Monniaux_SAC-SVT2015, author = {Francesco Alberti and David Monniaux}, title = {Polyhedra to the rescue of array interpolants}, booktitle = {ACM Symposium on Applied Computing, software verification and testing track}, year = 2015, publisher = {ACM}, entrysubtype = {intc}, eprinttype = {HAL}, isbn = {978-1-4503-3196-8}, doi = {10.1145/2695664.2695784}, pages = {1745--1750}, eprint = {hal-01178600}, abstract = {We propose a new approach to the automated verification of the correctness of programs handling arrays. An abstract interpreter supplies auxiliary numeric invariants to an interpolation-based refinement procedure suited to array programs. Experiments show that this combination approach, implemented in an enhanced version of the Booster software model-checker, performs better than the pure interpolationbased approach, at no additional cost.}, zurl = {https://hal.archives-ouvertes.fr/hal-01178600}, foreignauthor = yes }
@inproceedings{Gonnord_Monniaux_Radanne_PLDI2015, author = {Laure Gonnord and David Monniaux and Gabriel Radanne }, title = {Synthesis of ranking functions using extremal counterexamples}, booktitle = {Programming Language Design and Implementation (PLDI)}, year = 2015, organization = {ACM}, isbn = {978-1-4503-3468-6}, pages = {608--618}, doi = {10.1145/2737924.2737976}, eprinttype = {HAL}, entrysubtype = {intc}, eprint = {hal-01144622}, abstract = {We present a complete method for synthesizing lexicographic linear ranking functions (and thus proving termination), supported by inductive invariants, in the case where the transition relation of the program includes disjunctions and existentials (large block encoding of control flow). Previous work would either synthesize a ranking function at every basic block head, not just loop headers, which reduces the scope of programs that may be proved to be terminating, or expand large block transitions including tests into (exponentially many) elementary transitions, prior to computing the ranking function, resulting in a very large global constraint system. In contrast, our algorithm incrementally refines a global linear constraint system according to extremal counterexamples: only constraints that exclude spurious solutions are included. Experiments with our tool Termite show marked performance and scalability improvements compared to other systems.}, url = {https://hal.archives-ouvertes.fr/hal-01144622v1} }
@inproceedings{Monniaux_Alberti_SAS2015, author = {David Monniaux and Francesco Alberti}, title = {A simple abstraction of arrays and maps by program translation}, booktitle = {Static analysis (SAS)}, year = 2015, series = {Lecture Notes in Computer Science}, volume = 9291, publisher = {Springer Verlag}, doi = {10.1007/978-3-662-48288-9}, isbn = {978-3-662-48287-2}, pages = {217--234}, eprinttype = {HAL}, eprint = {hal-01162795}, abstract = {We present an approach for the static analysis of programs handling arrays, with a Galois connection between the semantics of the array program and semantics of purely scalar operations. The simplest way to implement it is by automatic, syntactic transformation of the array program into a scalar program followed analysis of the scalar program with any static analysis technique (abstract interpretation, acceleration, predicate abstraction,.. .). The scalars invariants thus obtained are translated back onto the original program as universally quantified array invariants. We illustrate our approach on a variety of examples, leading to the ``Dutch flag'' algorithm.}, entrysubtype = {intc}, url = {https://hal.archives-ouvertes.fr/hal-01162795}, foreignauthor = yes }
@inproceedings{DBLP:conf/vmcai/KarpenkovMW16, author = {Egor George Karpenkov and David Monniaux and Philipp Wendler}, editor = {Barbara Jobstmann and K. Rustan M. Leino}, title = {Program Analysis with Local Policy Iteration}, booktitle = {Verification, Model Checking, and Abstract Interpretation (VMCAI)}, series = {Lecture Notes in Computer Science}, volume = {9583}, pages = {127--146}, publisher = {Springer Verlag}, year = {2016}, month = jan, zurl = {http://dx.doi.org/10.1007/978-3-662-49122-5_6}, doi = {10.1007/978-3-662-49122-5_6}, timestamp = {Tue, 05 Jan 2016 12:40:34 +0100}, biburl = {http://dblp.uni-trier.de/rec/bib/conf/vmcai/KarpenkovMW16}, bibsource = {dblp computer science bibliography, http://dblp.org}, abstract = {We present a new algorithm for deriving numerical invariants that combines the precision of max-policy iteration with the flexibility and scalability of conventional Kleene iterations. It is defined in the Configurable Program Analysis (CPA) framework, thus allowing inter-analysis communication. It uses adjustable-block encoding in order to traverse loop-free program sections, possibly containing branching, without introducing extra abstraction. Our technique operates over any template linear constraint domain, including the interval and octagon domains; templates can also be derived from the program source. The implementation is evaluated on a set of benchmarks from the Software Verification Competition (SV-Comp). It competes favorably with state-of-the-art analyzers.}, entrysubtype = {intc}, eprinttype = {arXiv}, eprint = {1509.03424}, zurl = {https://hal.archives-ouvertes.fr/hal-01255314}, foreignauthor = yes }
@inproceedings{DBLP:conf/vmcai/MarechalFKMP16, author = {Alexandre Mar{\'e}chal and Alexis Fouilh{\'e} and Tim King and David Monniaux and Micha{\"e}l P{\'e}rin}, editor = {Barbara Jobstmann and Leino, K. Rustan M.}, title = {Polyhedral Approximation of Multivariate Polynomials Using Handelman's Theorem}, booktitle = {Verification, Model Checking, and Abstract Interpretation (VMCAI)}, series = {Lecture Notes in Computer Science}, volume = 9583, pages = {166--184}, publisher = {Springer Verlag}, year = 2016, month = jan, zurl = {http://dx.doi.org/10.1007/978-3-662-49122-5_8}, doi = {10.1007/978-3-662-49122-5_8}, timestamp = {Tue, 05 Jan 2016 12:40:34 +0100}, biburl = {http://dblp.uni-trier.de/rec/bib/conf/vmcai/MarechalFKMP16}, bibsource = {dblp computer science bibliography, http://dblp.org}, abstract = {Convex polyhedra are commonly used in the static analysis of programs to represent over-approximations of sets of reachable states of numerical program variables. When the analyzed programs contain nonlinear instructions, they do not directly map to standard polyhedral operations: some kind of linearization is needed. Convex polyhe-dra are also used in satisfiability modulo theory solvers which combine a propositional satisfiability solver with a fast emptiness check for polyhedra. Existing decision procedures become expensive when nonlinear constraints are involved: a fast procedure to ensure emptiness of systems of nonlinear constraints is needed. We present a new linearization algorithm based on Handelman's representation of positive polynomials. Given a polyhedron and a polynomial (in)equality, we compute a polyhedron enclosing their intersection as the solution of a parametric linear programming problem. To get a scalable algorithm, we provide several heuristics that guide the construction of the Handelman's representation. To ensure the correctness of our polyhedral approximation , our Ocaml implementation generates certificates verified by a checker certified in Coq.}, eprinttype = {HAL}, eprint = {hal-01223362}, entrysubtype = {intc}, zurl = {https://hal.archives-ouvertes.fr/hal-01223362} }
@inproceedings{Monniaux_CASC2016, author = {David Monniaux}, title = {A Survey of Satisfiability Modulo Theory}, booktitle = {Computer Algebra in Scientific Computing (CASC)}, year = 2016, month = sep, volume = 9890, series = {Lecture Notes in Computer Science}, chapter = 26, entrysubtype = {intc}, publisher = {Springer Verlag}, eprinttype = {HAL}, eprint = {hal-01332051}, abstract = {Satisfiability modulo theory (SMT) consists in testing the satisfiability of first-order formulas over linear integer or real arithmetic, or other theories. In this survey, we explain the combination of propositional satisfiability and decision procedures for conjunctions known as DPLL(T), and the alternative ``natural domain'' approaches. We also cover quantifiers, Craig interpolants, polynomial arithmetic, and how SMT solvers are used in automated software analysis.}, zpdf = {https://arxiv.org/pdf/1606.04786v1}, zurl = {https://hal.archives-ouvertes.fr/hal-01332051}, doi = {10.1007/978-3-319-45641-6_26} }
@inproceedings{Monniaux_Gonnord_SAS2016, author = {David Monniaux and Laure Gonnord}, title = {Cell Morphing: From Array Programs to Array-Free {H}orn Clauses}, booktitle = {Static analysis}, year = 2016, month = sep, volume = 9837, series = {Lecture Notes in Computer Science}, publisher = {Springer Verlag}, chapter = 18, entrysubtype = {intc}, eprint = {hal-01206882}, eprinttype = {HAL}, pages = {361--382}, abstract = {Automatically verifying safety properties of programs is hard. Many approaches exist for verifying programs operating on Boolean and integer values (e.g. abstract interpretation, counterexample-guided abstraction refinement using interpolants), but transposing them to array properties has been fraught with difficulties. Our work addresses that issue with a powerful and flexible abstraction that morphes concrete array cells into a finite set of abstract ones. This abstraction is parametric both in precision and in the back-end analysis used. From our programs with arrays, we generate nonlinear Horn clauses over scalar variables only, in a common format with clear and unambiguous logical semantics, for which there exist several solvers. We thus avoid the use of solvers operating over arrays, which are still very immature. Experiments with our prototype VAPHOR show that this approach can prove automatically and without user annotations the functional correctness of several classical examples, including selection sort, bubble sort, insertion sort, as well as examples from literature on array analysis.}, zpdf = {https://hal.archives-ouvertes.fr/hal-01206882v3/document}, zurl = {https://hal.archives-ouvertes.fr/hal-01206882v3} }
@inproceedings{Karpenkov_Monniaux_HVC2016, author = {Egor George Karpenkov and David Monniaux}, title = {Formula Slicing: Inductive Invariants from Preconditions}, booktitle = {Hardware and Software: Verification and Testing (Haifa Verification Conference)}, year = 2016, editor = {Bloem, Roderick and Arbel, Eli}, isbn = {978-3-319-49051-9}, volume = 10028, series = {Lecture Notes in Computer Science}, month = nov, publisher = {Springer Verlag}, entrysubtype = {intc}, abstract = {We propose a ``formula slicing'' method for finding inductive invariants. It is based on the observation that many loops in the program affect only a small part of the memory, and many invariants which were valid before a loop are still valid after. Given a precondition of the loop, obtained from the preceding program fragment, we weaken it until it becomes inductive. The weakening procedure is guided by counterexamples-to-induction given by an SMT solver. Our algorithm applies to programs with arbitrary loop structure, and it computes the strongest invariant in an abstract domain of weakenings of preconditions. We call this algorithm ``formula slicing'', as it effectively performs ``slicing'' on formulas derived from symbolic execution. We evaluate our algorithm on the device driver benchmarks from the International Competition on Software Verification (SV-COMP), and we show that it is competitive with the state-of-the-art verification techniques.}, pdf = {https://arxiv.org/pdf/1609.09288v2} }
@inproceedings{Touzeau_Maiza_Monniaux_JRWRTC2016, author = {Valentin Touzeau and Claire Ma{\"iza} and David Monniaux}, title = {Model Checking of Cache for {WCET} Analysis Refinement}, booktitle = {10th Junior Researcher Workshop on Real-Time Computing}, year = 2016, entrysubtype = {intw}, url = {http://jrwrtc2016.gforge.inria.fr/docs/JRWRTC16_proceedings.pdf} }
@article{Gautron_Monniaux_2016, author = {Virginie Gautron and David Monniaux}, title = {De la surveillance secrète à la prédiction des risques : les dérives du fichage dans le champ de la lutte contre le terrorisme}, journal = {Archives de politique criminelle}, issuetitle = {Terrorismes}, year = 2016, publisher = {A. Pédone}, volume = 38, pages = {123--135}, eprint = {hal-01446359}, eprinttype = {HAL}, entrysubtype = {frj-shs}, isbn = {978-2-233-00808-4}, issn = {0242-5637}, url = {https://hal.archives-ouvertes.fr/hal-01446359} }
@proceedings{VMCAI_2017, editor = {Ahmed Bouajjani and David Monniaux}, title = {Verification, Model Checking, and Abstract Interpretation (VMCAI)}, publisher = {Springer Verlag}, year = 2017, month = jan, series = {Lecture Notes in Computer Science}, volume = 10145, isbn = {978-3-319-52233-3}, entrysubtype = {intc-chair}, doi = {10.1007/978-3-319-52234-0} }
@inproceedings{Touzeau_et_al_CAV2017, author = {Valentin Touzeau and Claire Ma{\"i}za and David Monniaux and Jan Reineke}, title = {Ascertaining Uncertainty for Efficient Exact Cache Analysis}, booktitle = {Computer-aided verification (CAV)}, year = 2017, editor = {Viktor Kuncak and Rupak Majumdar}, publisher = {Springer Verlag}, entrysubtype = {intc}, eprint = {hal-01592048}, eprinttype = {HAL}, zurl = {https://hal.archives-ouvertes.fr/hal-01592048}, foreignauthor = yes }
@inproceedings{Bakhirkin_Monniaux_SAS2017, author = {Alexey Bakhirkin and David Monniaux}, title = {Combining Forward and Backward Abstract Interpretation of {H}orn Clauses}, booktitle = {Static analysis (SAS)}, year = 2017, editor = {Francesco Ranzato}, series = {Lecture Notes in Computer Science}, volume = 10422, publisher = {Springer Verlag}, entrysubtype = {intc}, eprint = {hal-01551447v2}, eprinttype = {HAL}, zurl = {https://hal.archives-ouvertes.fr/hal-01551447v2/} }
@inproceedings{Marechal_Monniaux_Perin_SAS2017, author = {Alexandre Maréchal, David Monniaux and Micha{\"e}l P{\'e}rin}, title = {Scalable Minimizing-Operators on Polyhedra via Parametric Linear Programming}, booktitle = {Static analysis (SAS)}, year = 2017, editor = {Francesco Ranzato}, series = {Lecture Notes in Computer Science}, volume = 10422, publisher = {Springer Verlag}, entrysubtype = {intc}, eprint = {hal-01555998}, eprinttype = {HAL}, zurl = {https://hal.archives-ouvertes.fr/hal-01555998} }
@article{Monniaux_Acta_Informatica_2018, title = {On the decidability of the existence of polyhedral invariants in transition systems}, author = {Monniaux, David}, journal = {Acta Informatica}, publisher = {Springer Verlag}, year = 2019, volume = 56, number = 4, pages = {385--389}, pdf = {https://hal.archives-ouvertes.fr/hal-01587125/file/article_Monniaux_computability_polyhedral_abstraction.pdf}, hal_id = {hal-01587125}, hal_version = {v2}, entrysubtype = {intj}, eprint = {1709.04382}, eprinttype = {arxiv}, date = {2018-05-18}, issn = {0001-5903}, doi = {10.1007/s00236-018-0324-y}, url = {https://hal.archives-ouvertes.fr/hal-01587125/} }
@inproceedings{Bakhirkin_Monniaux_SAS2018, author = {Alexey Bakhirkin and David Monniaux}, title = {Extending Constraint-Only Representation of Polyhedra with Boolean Constraints}, entrysubtype = {intc}, booktitle = {Static analysis (SAS)}, year = 2018, isbn = {978-3-319-99724-7}, zurl = {https://hal.archives-ouvertes.fr/hal-01841837}, eprinttype = {HAL}, eprint = {hal-01841837}, pages = {127--145} }
@article{Touzeau:2019:FEA:3302515.3290367, title = {Fast and Exact Analysis for LRU Caches}, author = {Touzeau, Valentin and Maiza, Claire and Monniaux, David and Reineke, Jan}, address = {New York, NY, USA}, journal = {Proc. ACM Program. Lang.}, month = jan, pages = {54:1--54:29}, publisher = {ACM}, entrysubtype = {intj}, url = {http://doi.acm.org/10.1145/3290367}, volume = {3}, team = {PACSS, SYNC}, year = {2019}, doi = {10.1145/3290367}, issn = {2475-1421}, keywords = {Abstract Interpretation, Cache Analysis, LRU}, url = {https://hal.archives-ouvertes.fr/hal-01908648v2}, foreignauthor = yes }
@inproceedings{Boulme_et_al_SYNASC2018, author = {Sylvain Boulm{\'e} and Alexandre Mar{\'e}chal and David Monniaux and Michael P{\'e}rin and Hang Yu}, title = {The Verified Polyhedron Library: an overview}, booktitle = {20th International Symposium on Symbolic and Numeric Algorithms for Scientific Computing (SYNASC 2018)}, year = 2019, editor = {Erika {\'A}brah{\'a}m and Viorel Negru and Dana Petcu and Daniela Zaharie and Tetsuo Ida and Tudor Jebelean and Watt, Stephen M.}, pages = {9--17}, publisher = {IEEE Computer Society}, entrysubtype = {intc}, isbn = {978-1-7281-0625-0} }
@inproceedings{Coti_et_al_ICCS2019, author = {Camille Coti and David Monniaux and Hang Yu}, title = {Parallel Parametric Linear Programming Solving, and Application to Polyhedral Computations}, booktitle = {Computational Science – ICCS 2019}, year = 2019, volume = 11540, series = {Lecture Notes in Computer Science}, pages = {566--572}, month = jun, entrysubtype = {intc}, eprint = {1904.06079}, eprinttype = {arXiv}, doi = {10.1007/978-3-030-22750-0_52} }
@inproceedings{Yu_Monniaux_SAS2019, author = {Hang Yu and David Monniaux}, editor = {Bor{-}Yuh Evan Chang}, title = {An Efficient Parametric Linear Programming Solver and Application to Polyhedral Projection}, fbooktitle = {Static Analysis - 26th International Symposium, {SAS} 2019, Porto, Portugal, October 8-11, 2019, Proceedings}, booktitle = {Static analysis (SAS)}, series = {Lecture Notes in Computer Science}, volume = {11822}, pages = {203--224}, publisher = {Springer Verlag}, year = {2019}, entrysubtype = {intc}, zurl = {https://doi.org/10.1007/978-3-030-32304-2\_11}, doi = {10.1007/978-3-030-32304-2_11}, timestamp = {Mon, 07 Oct 2019 12:38:12 +0200}, biburl = {https://dblp.org/rec/bib/conf/sas/YuM19}, bibsource = {dblp computer science bibliography, https://dblp.org} }
@article{Monniaux:2019:CCA:3368192.3366018, author = {Monniaux, David and Touzeau, Valentin}, title = {On the Complexity of Cache Analysis for Different Replacement Policies}, shortjournal = {J. ACM}, journal = {Journal of the ACM}, issue_date = {November 2019}, volume = {66}, number = {6}, month = nov, year = {2019}, issn = {0004-5411}, pages = {41:1--41:22}, articleno = {41}, numpages = {22}, zurl = {http://doi.acm.org.ins2i.bib.cnrs.fr/10.1145/3366018}, doi = {10.1145/3366018}, acmid = {3366018}, publisher = {ACM}, address = {New York, NY, USA}, keywords = {Caches, NP-completeness, PSPACE-completeness, cache analysis, complexity, static analysis}, eprint = {1811.01740}, eprinttype = {arXiv}, entrysubtype = {intj} }
@article{Coti_Monniaux_Yu_2020, author = {Camille Coti and David Monniaux and Hang Yu}, title = {A task‐based approach to parallel parametric linear programming solving, and application to polyhedral computations}, journal = {Concurrency and computation: practice and experience}, year = {2021}, volume = 33, issue = 6, pages = {e6050}, doi = {10.1002/cpe.6050}, eprint = {2009.14435}, eprinttype = {arXiv}, entrysubtype = {intj} }
@article{DBLP:journals/pacmpl/SixBM20, author = {Cyril Six and Sylvain Boulm{\'{e}} and David Monniaux}, title = {Certified and efficient instruction scheduling: application to interlocked {VLIW} processors}, journal = {Proc. {ACM} Program. Lang.}, volume = {4}, number = {{OOPSLA}}, pages = {129:1--129:29}, year = {2020}, zurl = {https://doi.org/10.1145/3428197}, doi = {10.1145/3428197}, timestamp = {Wed, 17 Feb 2021 08:54:06 +0100}, biburl = {https://dblp.org/rec/journals/pacmpl/SixBM20.bib}, bibsource = {dblp computer science bibliography, https://dblp.org}, eprinttype = {HAL}, eprint = {hal-02185883}, entrysubtype = {intj} }
@inproceedings{DBLP:conf/sas/BraineGM21, author = {Julien Braine and Laure Gonnord and David Monniaux}, title = {Data Abstraction: {A} General Framework to Handle Program Verification of Data Structures}, booktitle = {Static analysis (SAS)}, series = {Lecture Notes in Computer Science}, volume = {12913}, pages = {215--235}, publisher = {Springer}, year = {2021}, eprinttype = {HAL}, eprint = {hal-03321868}, doi = {10.1007/978-3-030-88806-0_11}, zurl = {https://inria.hal.science/hal-03321868}, entrysubtype = {intc} }
@inproceedings{DBLP:conf/lctrts/MonniauxS21, author = {David Monniaux and Cyril Six}, editor = {J{\"{o}}rg Henkel and Xu Liu}, title = {Simple, light, yet formally verified, global common subexpression elimination and loop-invariant code motion}, booktitle = {{LCTES} '21: 22nd {ACM} {SIGPLAN/SIGBED} International Conference on Languages, Compilers, and Tools for Embedded Systems, Virtual Event, Canada, 22 June, 2021}, pages = {85--96}, publisher = {{ACM}}, year = {2021}, zurl = {https://doi.org/10.1145/3461648.3463850}, doi = {10.1145/3461648.3463850}, timestamp = {Sat, 08 Jan 2022 02:24:39 +0100}, biburl = {https://dblp.org/rec/conf/lctrts/MonniauxS21.bib}, bibsource = {dblp computer science bibliography, https://dblp.org}, entrysubtype = {intc} }
@inproceedings{Monniaux_Boulme_ESOP22, author = {David Monniaux and Sylvain Boulm{\'{e}}}, title = {The Trusted Computing Base of the CompCert Verified Compiler}, booktitle = {European Symposium on Programming Languages and Systems (ESOP '22)}, series = {Lecture Notes in Computer Science}, doi = {10.1007/978-3-030-99336-8_8}, volume = {13240}, pages = {204--233}, publisher = {Springer}, year = {2022}, eprint = {hal-03541595v1}, eprinttype = {HAL}, entrysubtype = {intc} }
@article{Monniaux_FMSD22, title = {The complexity gap in the static analysis of cache accesses grows if procedure calls are added}, author = {Monniaux, David}, eprint = {hal-03545774}, eprinttype = {HAL}, journal = {Formal Methods in System Design}, publisher = {{Springer Verlag}}, year = {2022}, doi = {10.1007/s10703-022-00392-w}, zpdf = {https://hal.archives-ouvertes.fr/hal-03545774/file/Monniaux_stack_cache_complexity_article.pdf}, hal_id = {hal-03545774}, hal_version = {v1}, entrysubtype = {intj} }
@inproceedings{DBLP:conf/arith/MonniauxP22, author = {David Monniaux and Alice Pain}, title = {Formally verified 32- and 64-bit integer division using double-precision floating-point arithmetic}, booktitle = {29th {IEEE} Symposium on Computer Arithmetic, {ARITH} 2022, Lyon, France, September 12-14, 2022}, pages = {128--132}, publisher = {{IEEE}}, year = {2022}, zurl = {https://doi.org/10.1109/ARITH54963.2022.00032}, doi = {10.1109/ARITH54963.2022.00032}, timestamp = {Thu, 29 Dec 2022 15:06:50 +0100}, biburl = {https://dblp.org/rec/conf/arith/MonniauxP22.bib}, bibsource = {dblp computer science bibliography, https://dblp.org}, eprintype = {HAL}, eprint = {hal-03722203}, entrysubtype = {intc} }
@techreport{braine:hal-01337140, title = {Verifying Programs with Arrays and Lists}, author = {Braine, Julien and Gonnord, Laure and Monniaux, David}, eprint = {hal-01337140}, eprinttype = {HAL}, type = {Internship report}, institution = {{ENS Lyon}}, year = {2016}, month = jun, keywords = {Abstract Interpretation ; Program Verification}, pdf = {https://hal.archives-ouvertes.fr/hal-01337140/file/rapport.pdf}, hal_id = {hal-01337140}, hal_version = {v1} }
@article{10.1145/3529507, author = {Monniaux, David and Six, Cyril}, title = {Formally Verified Loop-Invariant Code Motion and Assorted Optimizations}, year = {2022}, issue_date = {January 2023}, publisher = {Association for Computing Machinery}, address = {New York, NY, USA}, volume = {22}, number = {1}, issn = {1539-9087}, zurl = {https://doi.org/10.1145/3529507}, doi = {10.1145/3529507}, abstract = {We present an approach for implementing a formally certified loop-invariant code motion optimization by composing an unrolling pass and a formally certified yet efficient global subexpression elimination. This approach is lightweight: each pass comes with a simple and independent proof of correctness. Experiments show the approach significantly narrows the performance gap between the CompCert certified compiler and state-of-the-art optimizing compilers. Our static analysis employs an efficient yet verified hashed set structure, resulting in the fast compilation.}, journal = {ACM Trans. Embed. Comput. Syst.}, month = dec, articleno = {3}, numpages = {27}, keywords = {CompCert, common subexpression elimination, Verified compilation}, entrysubtype = {intj}, eprint = {hal-03628646}, eprinttype = {HAL} }
@inproceedings{DBLP:conf/fmcad/VigourouxEMMP22, author = {Thomas Vigouroux and Cristian Ene and David Monniaux and Laurent Mounier and Marie{-}Laure Potet}, editor = {Alberto Griggio and Neha Rungta}, title = {BaxMC: a {CEGAR} approach to Max{\#}SAT}, booktitle = {22nd Formal Methods in Computer-Aided Design, {FMCAD} 2022, Trento, Italy, October 17-21, 2022}, pages = {170--178}, publisher = {{IEEE}}, year = {2022}, zurl = {https://doi.org/10.34727/2022/isbn.978-3-85448-053-2\_23}, doi = {10.34727/2022/ISBN.978-3-85448-053-2_23}, timestamp = {Mon, 13 Feb 2023 21:53:10 +0100}, biburl = {https://dblp.org/rec/conf/fmcad/VigourouxEMMP22.bib}, bibsource = {dblp computer science bibliography, https://dblp.org}, entrysubtype = {intc} }
@inproceedings{DBLP:conf/tap/MonniauxGBL23, author = {David Monniaux and L{\'{e}}o Gourdin and Sylvain Boulm{\'{e}} and Olivier Lebeltel}, editor = {Virgile Prevosto and Cristina Seceleanu}, title = {Testing a Formally Verified Compiler}, booktitle = {Tests and Proofs - 17th International Conference, {TAP} 2023, Leicester, UK, July 18-19, 2023, Proceedings}, series = {Lecture Notes in Computer Science}, volume = {14066}, pages = {40--48}, publisher = {Springer}, year = {2023}, zurl = {https://doi.org/10.1007/978-3-031-38828-6\_3}, doi = {10.1007/978-3-031-38828-6_3}, timestamp = {Fri, 18 Aug 2023 08:45:14 +0200}, biburl = {https://dblp.org/rec/conf/tap/MonniauxGBL23.bib}, bibsource = {dblp computer science bibliography, https://dblp.org}, entrysubtype = {intc}, eprint = {hal-04096390}, eprinttype = {HAL} }
@article{10.1145/3622799, author = {Gourdin, L\'{e}o and Bonneau, Benjamin and Boulm\'{e}, Sylvain and Monniaux, David and B\'{e}rard, Alexandre}, title = {Formally Verifying Optimizations with Block Simulations}, year = 2023, issue_date = {October 2023}, publisher = {Association for Computing Machinery}, address = {New York, NY, USA}, volume = 7, number = {OOPSLA2}, url = {https://doi.org/10.1145/3622799}, doi = {10.1145/3622799}, abstract = {CompCert (ACM Software System Award 2021) is the first industrial-strength compiler with a mechanically checked proof of correctness. Yet, CompCert remains a moderately optimizing C compiler. Indeed, some optimizations of “gcc -O1” such as Lazy Code Motion (LCM) or Strength Reduction (SR) were still missing: developing these efficient optimizations together with their formal proofs remained a challenge. Cyril Six et al. have developed efficient formally verified translation validators for certifying the results of superblock schedulers and peephole optimizations. We revisit and generalize their approach into a framework (integrated into CompCert) able to validate many more optimizations: an enhanced superblock scheduler, but also Dead Code Elimination (DCE), Constant Propagation (CP), and more noticeably, LCM and SR. In contrast to other approaches to translation validation, we co-design our untrusted optimizations and their validators. Our optimizations provide hints, in the forms of invariants or CFG morphisms, that help keep the formally verified validators both simple and efficient. Such designs seem applicable beyond CompCert.}, journal = {Proc. ACM Program. Lang.}, month = oct, articleno = 224, numpages = 30, keywords = {Translation validation, Symbolic execution, Formal verification of compiler optimizations, the Coq proof assistant}, entrysubtype = {intj}, eprint = {hal-04102940}, eprinttype = {HAL} }
@incollection{monniaux:hal-03857312, title = {Completeness in static analysis by abstract interpretation, a personal point of view}, author = {Monniaux, David}, zurl = {https://hal.science/hal-03857312}, booktitle = {{Challenges of Software Verification}}, editor = {Vincenzo Arceri and Agostino Cortesi and Pietro Ferrara and Martina Olliaro}, publisher = {{Springer Singapore}}, year = {2023}, month = jul, zpdf = {https://hal.science/hal-03857312v2/file/Monniaux_completeness_in_static_analysis_article.pdf}, hal_id = {hal-03857312}, hal_version = {v2}, doi = {10.1007/978-981-19-9601-6_6} }
This file was generated by bibtex2html 1.99.