From 6f79a4a921b9d842a973cffc9dcbaa7cebaf4d1e Mon Sep 17 00:00:00 2001
From: Tim Daly
Date: Fri, 9 Mar 2018 21:37:43 0500
Subject: [PATCH] books/bookvolbib add references
MIMEVersion: 1.0
ContentType: text/plain; charset=UTF8
ContentTransferEncoding: 8bit
Goal: Proving Axiom Correct
\index{Myreen, Magnus O.}
\begin{chunk}{axiom.bib}
@article{Myre10,
author = "Myreen, Magnus O.",
title = {{Verified JustInTime Compiler on x86}},
journal = "ACM SIGLAN Notices  POPL'10",
volume = "45",
number = "1",
year = "2010",
pages = "107118",
abstract =
"This paper presents a method for creating formally correct justin
time (JIT) compilers. The tractability of our approach is demonstrated
through, what we believe is the first, verification of a JIT
compiler with respect to a realistic semantics of selfmodifying x86
machine code. Our semantics includes a model of the instruction
cache. Two versions of the verified JIT compiler are presented: one
generates all of the machine code at once, the other one is incremental
i.e. produces code ondemand. All proofs have been performed
inside the HOL4 theorem prover.",
paper = "Myre10.pdf",
keywords = "printed"
}
\end{chunk}
\index{Myreen, Magnus O.}
\index{Gordon, Michael J.C.}
\begin{chunk}{axiom.bib}
@article{Myre09,
author = "Myreen, Magnus O. and Gordon, Michael J.C.",
title = {{Verified LISP Implementations on ARM, x86 and PowerPC}},
journal = "LNCS",
volume = "5674",
pages = "359374",
year = "2009",
abstract =
"This paper reports on a case study, which we believe is the
first to produce a formally verified endtoend implementation of a func
tional programming language running on commercial processors. Inter
preters for the core of McCarthy’s LISP 1.5 were implemented in ARM,
x86 and PowerPC machine code, and proved to correctly parse, evaluate
and print LISP sexpressions. The proof of evaluation required working
on top of verified implementations of memory allocation and garbage
collection. All proofs are mechanised in the HOL4 theorem prover.",
paper = "Myre09.pdf",
keywords = "printed"
}
\end{chunk}
\index{Myreen, Magnus O.}
\index{Slind, Konrad}
\index{Gordon, Michael J.C.}
\begin{chunk}{axiom.bib}
@article{Myre09a,
author = "Myreen, Magnus O. and Slind, Konrad and Gordon, Michael J.C.",
title = {{Extensible ProofProducing Compilation}},
journal = "LNCS",
volume = "5501",
pages = "216",
year = "2009",
abstract =
"This paper presents a compiler which produces machine code from
functions defined in the logic of a theorem prover, and at the same
time proves that the generated code executes the source functions.
Unlike previously published work on proofproducing compilation from a
theorem prover, our compiler provides broad support for userdefined
extensions, targets multiple carefully modelled commercial machine
lan guages, and does not require termination proofs for input
functions. As a case study, the compiler is used to construct verified
interpreters for a small LISPlike language. The compiler has been
implemented in the HOL4 theorem prover.",
paper = "Myre09a.pdf",
keywords = "printed"
}
\end{chunk}
\begin{chunk}{axiom.bib}
@misc{Domi18,
author = "Domipheus",
title = {{Designing a CPU in VHDL}},
link = "\url{http://labs.domipheus.com/blog/tpuseriesquicklinks}",
year = "2018",
abstract = "A VHDL CPU"
}
\end{chunk}

books/axiom.bib  323 +++++++++++++++++++
books/bookvolbib.pamphlet  358 ++++++++++++++++++++++
changelog  2 +
patch  415 ++++++
src/axiomwebsite/patches.html  2 +
5 files changed, 486 insertions(+), 614 deletions()
diff git a/books/axiom.bib b/books/axiom.bib
index 7daa0a6..41cd613 100644
 a/books/axiom.bib
+++ b/books/axiom.bib
@@ 299,14 +299,14 @@
abstract =
"It is widely recognized that programming languages should oer
features to help structure programs. To achieve this goal, languages
 like Ada , Modula2 , object oriented languages, and functional
 languages have been developed. The structur ing techniques available
+ like Ada , Modula2 , objectoriented languages, and functional
+ languages have been developed. The structuring techniques available
so far (like modules, classes, parametric polymorphism) are still not
enough or not appropriate for some application areas. In symbolic
computation, in particular computer algebra, several problems occur
that are dicult to handle with any existing programming
language. Indeed, nearly all available computer algebra systems suer
 from the fact that the underlying pro gramming language imposes too
+ from the fact that the underlying programming language imposes too
many restrictions.
We propose to develop a language that combines the essential features
@@ 316,8 +316,8 @@
a general purpose programming language. The main innovation will be
the application of sophisticated type systems to the needs of computer
algebra systems. We will demonstrate the capabilities of the language
 by using it to implement a small computer algebra library. This im
 plementation will be compared against a straightforward Lisp
+ by using it to implement a small computer algebra library. This
+ implementation will be compared against a straightforward Lisp
implementation and against existing computer algebra systems. Our
development should have an impact both on the programming languages
world and on the computer algebra world.",
@@ 1767,7 +1767,7 @@ paper = "Brea89.pdf"
which aims to provide key infrastructure for symbolic computation
research. A primary outcome of the project is that we have developed
a new way of combining computer algebra systems using the Symbolic
 Computation Software Compos ability Protocol (SCSCP), in which both
+ Computation Software Composability Protocol (SCSCP), in which both
protocol messages and data are encoded in the OpenMath format. We
describe SCSCP middleware and APIs, outline some implementations for
various Computer Algebra Systems (CAS), and show how SCSCPcompliant
@@ 2054,7 +2054,7 @@ paper = "Brea89.pdf"
year = "1991",
pages = "7485",
abstract =
 "We present algorithms for unification and anti unification in the
+ "We present algorithms for unification and antiunification in the
Calculus of Constructions, where occurrences of free variables (the
variables subject to instantiation) are restricted to higherorder
patterns, a notion investigated for the simplytyped $\lambda$calculus
@@ 2083,11 +2083,11 @@ paper = "Brea89.pdf"
Among the basic views of types we find
the socalled descriptive systems, where types describe properties of
untyped logic programs, and prescriptive systems, where types are
 essential to the meaning of programs. A typical ap plication of
+ essential to the meaning of programs. A typical application of
descriptive types is the approximation of the meaning of a logic
program as a subset of the Herbrand universe on which a predicate
might be true. The value of prescriptive systems lies primarily in
 program devel opment, for example, through early detection of errors
+ program development, for example, through early detection of errors
in programs which manifest themselves as type inconsistencies, or as
added documentation for the intended and legal use of predicates.
@@ 2095,7 +2095,7 @@ paper = "Brea89.pdf"
and type reconstruction, respectively. Type inference is a form of
analysis of untyped logic programs, while type reconstruction attempts
to fill in some omitted type information in typed logic programs and
 generalizes the prob lem of type checking. Even though analogous
+ generalizes the problem of type checking. Even though analogous
problems arise in functional programming, algorithms addressing these
problems are quite different in our setting.
@@ 2606,8 +2606,8 @@ paper = "Brea89.pdf"
Each type equation is accompanied by a pair of conversion functions
that are (at least partial) inverses. We show that so long as the
equational theory satisfies a reasonably permissive syntactic
 constraint, the resulting type system admits a complete type infer
 ence algorithm that produces unique principal types. The main
+ constraint, the resulting type system admits a complete type
+ inference algorithm that produces unique principal types. The main
innovation required in type inference is the replacement of ordinary
free unification by unification in the userspecified equational
theory. The syntactic constraint ensures that the latter is unitary,
@@ 3741,7 +3741,7 @@ paper = "Brea89.pdf"
"A computer program to honestly plot curves y = f(x) must locate
maxima and minima in the domain of the graph. To do so it may have to
solve a classic problem in computation – global optimization.
 Reducing an easy problem to a hard one is usually not an ad vantage,
+ Reducing an easy problem to a hard one is usually not an advantage,
but in fact there is a route to solving both problems if the function
can be evaluated using interval arithmetic. Since some computer
algebra systems supply a version of interval arithmetic, it seems we
@@ 3753,7 +3753,7 @@ paper = "Brea89.pdf"
location of poles, the values at maxima or minima, or the behavior
of a curve at asymptotes) are misrepresented, By “mathematical” we
mean curves like those generally needed in scientific disciplines
 where functions are represented by composi tion of common
+ where functions are represented by composition of common
mathematical operations: rational operations ($+, –, *, /$),
exponential and log, trigonometric functions as well as continuous
and differentiable functions from applied mathematics.",
@@ 7050,7 +7050,7 @@ paper = "Brea89.pdf"
"We introduce simple object calculi that support method override and
object subsumption. We give an untyped calculus, typing rules, and
equational rules. We illustrate the expressiveness of our calculi and
 the pit falls that we avoid.",
+ the pitfalls that we avoid.",
paper = "Abad94a.pdf"
}
@@ 7108,15 +7108,15 @@ paper = "Brea89.pdf"
booktitle = "Proc. Types 2006: Conf. of the Types Project",
year = "2006",
abstract =
 "Proof assistants are complex applications whose develop ment has
+ "Proof assistants are complex applications whose development has
never been properly systematized or documented. This work is a
contribution in this direction, based on our experience with the
 devel opment of Matita: a new interactive theorem prover based—as
+ development of Matita: a new interactive theorem prover based—as
Coq—on the Calculus of Inductive Constructions (CIC). In particular,
we analyze its architecture focusing on the dependencies of its
components, how they implement the main functionalities, and their
degree of reusability. The work is a first attempt to provide a ground
 for a more direct com parison between different systems and to
+ for a more direct comparison between different systems and to
highlight the common functionalities, not only in view of
reusability but also to encourage a more systematic comparison of
different softwares and architectural solutions.",
@@ 7296,7 +7296,7 @@ paper = "Brea89.pdf"
algorithm is bidirectional: given a term in external syntax and a
type expected for the term, it propagates as much typing information
as possible towards the leaves of the term. Traditional
 monodirectional algorithms, instead, proceed in a bottom up way by
+ monodirectional algorithms, instead, proceed in a bottomup way by
inferring the type of a subterm and comparing (unifying) it with the
type expected by its context only at the end. We propose some novel
bidirectional rules for CIC that are particularly effective. Among
@@ 8194,7 +8194,7 @@ paper = "Brea89.pdf"
loop iterations. The talk will then describe our work to generate
firstorder properties of programs with unbounded data structures,
such as arrays. For doing so, we use saturationbased firstorder
 theorem proving and extend first order provers with support for
+ theorem proving and extend firstorder provers with support for
program analysis. Since program analysis requires reasoning in the
combination of firstorder theories of data structures, the talk
also discusses new features in firstorder theorem proving, such as
@@ 8391,10 +8391,10 @@ paper = "Brea89.pdf"
abstract =
"Lists are a pervasive data structure in functional programs. The
generality and simplicity of their structure makes them expensive.
 HindleyMllner type inference and partial evalua tion are all that is
+ HindleyMllner type inference and partial evaluation are all that is
needed to optimise this structure, yielding considerable improvements
in space and time consumption for some interesting programs. This
 framework is applica ble to many data types and their optimised
+ framework is applicable to many data types and their optimised
representations, such as lists and parallel implementations of bags,
or arrays and quadtrees.",
paper = "Hall94.pdf, printed"
@@ 9151,7 +9151,7 @@ paper = "Brea89.pdf"
year = "1997",
abstract =
"Proofs in the Nuprl system, an implementation of a constructive
 type theory, yield “correctbyconstruction” pro grams. In this
+ type theory, yield “correctbyconstruction” programs. In this
paper a new methodology is presented for extracting efficient and
readable programs from inductive proofs. The resulting extracted
programs are in a form suitable for use in hierarchical
@@ 9243,6 +9243,14 @@ paper = "Brea89.pdf"
keywords = "printed"
}
+@misc{Domi18,
+ author = "Domipheus",
+ title = {{Designing a CPU in VHDL}},
+ link = "\url{http://labs.domipheus.com/blog/tpuseriesquicklinks}",
+ year = "2018",
+ abstract = "A VHDL CPU"
+}
+
@article{Fill03,
author = "Filliatre, JeanChristophe",
title = {{Verification of NonFunctional Programs using Interpretations
@@ 9313,27 +9321,100 @@ paper = "Brea89.pdf"
keywords = "printed"
}
@article{Myre12,
 author = "Myreen, Magnus O.",
 title = {{Functional Programs: Conversions between Deep and Shallow
 Embeddings}},
+@book{Morg98,
+ author = "Morgan, Carroll",
+ title = {{Programming from Specifcations, 2nd Ed.}},
+ publisher = "Prentice Hall",
+ year = "1998",
+ link =
+"\url{http://www.cse.unsw.edu.au/~carrollm/ProgrammingFromSpecifications.pdf}",
+ paper = "Morg98.pdf"
+}
+
+@article{Myre14,
+ author = "Myreen, Magnus O. and Davis, Jared",
+ title = {{The Reflective Milawa Theorem Prover is Sound}},
+ journal = "LNAI",
+ pages = "421436",
+ year = "2014",
+ abstract =
+ "Milawa is a theorem prover styled after ACL2 but with a small kernel
+ and a powerful reflection mechanism. We have used the HOL4 theorem
+ prover to formalize the logic of Milawa, prove the logic sound, and
+ prove that the source code for the Milawa kernel (2,000 lines of Lisp)
+ is faithful to the logic. Going further, we have combined these
+ results with our previous verification of an x86 machinecode
+ implementation of a Lisp runtime. Our toplevel HOL4 theorem states
+ that when Milawa is run on top of our verified Lisp, it will only
+ print theorem statements that are semantically true. We believe that
+ this toplevel theorem is the most comprehensive formal evidence of a
+ theorem prover’s soundness to date.",
+ paper = "Myre14.pdf",
+ keywords = "printed"
+}
+
+@article{Myre09,
+ author = "Myreen, Magnus O. and Gordon, Michael J.C.",
+ title = {{Verified LISP Implementations on ARM, x86 and PowerPC}},
journal = "LNCS",
 volume = "7406",
 pages = "412417",
 year = "2012",
+ volume = "5674",
+ pages = "359374",
+ year = "2009",
abstract =
 "This paper presents a method which simplifies verification of deeply
 embedded functional programs. We present a technique by which
 proofcertified equations describing the effect of functional
 programs (shallow embeddings) can be automatically extracted from their
 operational semantics. Our method can be used in reverse, i.e. from
 shallow to deep embeddings, and thus for implementing certifying code
 synthesis: we have implemented a tool which maps HOL functions to
 equivalent Lisp functions, for which we have a verified Lisp runtime.
 A key benefit, in both directions, is that the verifier does not need
 to understand the operational semantics that gives meanings to the
 deep embeddings.",
 paper = "Myre12.pdf",
+ "This paper reports on a case study, which we believe is the first
+ to produce a formally verified endtoend implementation of a
+ functional programming language running on commercial
+ processors. Interpreters for the core of McCarthy’s LISP 1.5
+ were implemented in ARM, x86 and PowerPC machine code, and proved
+ to correctly parse, evaluate and print LISP sexpressions. The
+ proof of evaluation required working on top of verified
+ implementations of memory allocation and garbage collection. All
+ proofs are mechanised in the HOL4 theorem prover.",
+ paper = "Myre09.pdf",
+ keywords = "printed"
+}
+
+@article{Myre09a,
+ author = "Myreen, Magnus O. and Slind, Konrad and Gordon, Michael J.C.",
+ title = {{Extensible ProofProducing Compilation}},
+ journal = "LNCS",
+ volume = "5501",
+ pages = "216",
+ year = "2009",
+ abstract =
+ "This paper presents a compiler which produces machine code from
+ functions defined in the logic of a theorem prover, and at the same
+ time proves that the generated code executes the source functions.
+ Unlike previously published work on proofproducing compilation from a
+ theorem prover, our compiler provides broad support for userdefined
+ extensions, targets multiple carefully modelled commercial machine
+ languages, and does not require termination proofs for input
+ functions. As a case study, the compiler is used to construct verified
+ interpreters for a small LISPlike language. The compiler has been
+ implemented in the HOL4 theorem prover.",
+ paper = "Myre09a.pdf",
+ keywords = "printed"
+}
+
+@article{Myre10,
+ author = "Myreen, Magnus O.",
+ title = {{Verified JustInTime Compiler on x86}},
+ journal = "ACM SIGLAN Notices  POPL'10",
+ volume = "45",
+ number = "1",
+ year = "2010",
+ pages = "107118",
+ abstract =
+ "This paper presents a method for creating formally correct justin
+ time (JIT) compilers. The tractability of our approach is demonstrated
+ through, what we believe is the first, verification of a JIT
+ compiler with respect to a realistic semantics of selfmodifying x86
+ machine code. Our semantics includes a model of the instruction
+ cache. Two versions of the verified JIT compiler are presented: one
+ generates all of the machine code at once, the other one is incremental
+ i.e. produces code ondemand. All proofs have been performed
+ inside the HOL4 theorem prover.",
+ paper = "Myre10.pdf",
keywords = "printed"
}
@@ 9367,38 +9448,30 @@ paper = "Brea89.pdf"
keywords = "printed"
}
@article{Myre14,
 author = "Myreen, Magnus O. and Davis, Jared",
 title = {{The Reflective Milawa Theorem Prover is Sound}},
 journal = "LNAI",
 pages = "421436",
 year = "2014",
+@article{Myre12,
+ author = "Myreen, Magnus O.",
+ title = {{Functional Programs: Conversions between Deep and Shallow
+ Embeddings}},
+ journal = "LNCS",
+ volume = "7406",
+ pages = "412417",
+ year = "2012",
abstract =
 "Milawa is a theorem prover styled after ACL2 but with a small kernel
 and a powerful reflection mechanism. We have used the HOL4 theorem
 prover to formalize the logic of Milawa, prove the logic sound, and
 prove that the source code for the Milawa kernel (2,000 lines of Lisp)
 is faithful to the logic. Going further, we have combined these
 results with our previous verification of an x86 machinecode
 implementation of a Lisp runtime. Our toplevel HOL4 theorem states
 that when Milawa is run on top of our verified Lisp, it will only
 print theorem statements that are semantically true. We believe that
 this toplevel theorem is the most comprehensive formal evidence of a
 theorem prover’s soundness to date.",
 paper = "Myre14.pdf",
+ "This paper presents a method which simplifies verification of deeply
+ embedded functional programs. We present a technique by which
+ proofcertified equations describing the effect of functional
+ programs (shallow embeddings) can be automatically extracted from their
+ operational semantics. Our method can be used in reverse, i.e. from
+ shallow to deep embeddings, and thus for implementing certifying code
+ synthesis: we have implemented a tool which maps HOL functions to
+ equivalent Lisp functions, for which we have a verified Lisp runtime.
+ A key benefit, in both directions, is that the verifier does not need
+ to understand the operational semantics that gives meanings to the
+ deep embeddings.",
+ paper = "Myre12.pdf",
keywords = "printed"
}
@book{Morg98,
 author = "Morgan, Carroll",
 title = {{Programming from Specifcations, 2nd Ed.}},
 publisher = "Prentice Hall",
 year = "1998",
 link =
"\url{http://www.cse.unsw.edu.au/~carrollm/ProgrammingFromSpecifications.pdf}",
 paper = "Morg98.pdf"
}

@inproceedings{Talp92,
author = "Talpin, JeanPierre and Jouvelot, Pierre",
title = {{The Type and Effect Discipline}},
@@ 9972,7 +10045,7 @@ paper = "Brea89.pdf"
Statements received at one particular location from other sites fall
into two categories: with or without the qualification ``evidently
 impeccable'', a notion that is methodologi cally precise and
+ impeccable'', a notion that is methodologically precise and
sound. For statements having this quality assessment the evidence may
come from the other site or from the local site itself, but in both
cases it is verified locally. In cases where there is no evidence of
@@ 10591,7 +10664,7 @@ paper = "Brea89.pdf"
pages = "859882",
abstract =
"We define the ‘‘combinatorial part’’ of a Tarski formula in which
 equalities and inequalities are in factored or partially factored
+ equalities and inequalities are in factored or partiallyfactored
form. The combinatorial part of a formula contains only
‘‘monomial inequalities’’,which are sign conditions on monomials. We
give efficient algorithms for answering some basic questions about
@@ 10714,12 +10787,12 @@ paper = "Brea89.pdf"
Deduction in Natural Style}},
abstract =
"The Theorema project aims at integrating computation and deduction in
 a system that can be used by the working scientist for build ing and
 checking mathematical models, including the design and verica tion of
+ a system that can be used by the working scientist for building and
+ checking mathematical models, including the design and verication of
new algorithms. Currently, the system uses the rewrite engine of the
computer algebra system Mathematica for building and combining a
number of automatic/interactive provers (highorder predicatelogic,
 in duction for lists/tuples and natural numbers, etc.) in natural
+ induction for lists/tuples and natural numbers, etc.) in natural
deduction style and in natural language presentation. These provers
can be used for dening and proving properties of mathematical models
and algorithms, while a specially provided ``computing engine'' can
@@ 11776,8 +11849,8 @@ paper = "Brea89.pdf"
theories, which can be related to one another via inclusion and theory
interpretation. IMPS provides relatively large primitive inference
steps to facilitate human control of the deductive process and human
 comprehension of the resulting proofs. An initial theory library con
 taining over a thousand repeatable proofs covers significant portions
+ comprehension of the resulting proofs. An initial theory library
+ containing over a thousand repeatable proofs covers significant portions
of logic, algebra, and analysis and provides some support for modeling
applications in computer science.",
paper = "Farm93a.pdf"
@@ 12863,7 +12936,7 @@ paper = "Brea89.pdf"
case with computer algebra systems and typetheory based interactive
theorem provers such as Coq. We provide some custom instrumentation
inside Coq to support a computer algebra system (CAS) communication
 protocol known as SC SCP. We describe general aspects of viewing
+ protocol known as SCSCP. We describe general aspects of viewing
OpenMath terms produced by a CAS in the calculus of Coq, as well as
viewing pure Coq terms in a simpler type system that is behind OpenMath.",
paper = "Kome11.pdf",
@@ 12993,7 +13066,7 @@ paper = "Brea89.pdf"
publisher = "IEEE Computer Society Press",
year = "1991",
abstract =
 "The HOL system is a powerful tool for proving higher order formulae.
+ "The HOL system is a powerful tool for proving higherorder formulae.
However, proofs have to be performed interactively and only little
automation using tactics is possible. Even though interaction is
desirable to guide major and creative backward proof steps of complex
@@ 13111,7 +13184,7 @@ paper = "Brea89.pdf"
pages = "132",
abstract =
"Industry standard implementations of math.h claim (often without
 formal proof) tight bounds on floating point errors. We demonstrate a
+ formal proof) tight bounds on floatingpoint errors. We demonstrate a
novel static analysis that proves these bounds and verifies the
correctness of these implementations. Our key insight is a reduction
of this verification task to a set of mathematical optimization
@@ 13518,7 +13591,7 @@ paper = "Brea89.pdf"
link = "\url{http://ceurws.org/Vol1010/paper05.pdf}",
year = "2013",
abstract =
 "This research compares the author’s experience in program ming
+ "This research compares the author’s experience in programming
algebra in Haskell and in Agda (currently the former experience is
large, and the latter is small). There are discussed certain hopes and
doubts related to the dependently typed and verified programming of
@@ 13762,7 +13835,7 @@ paper = "Brea89.pdf"
and proofs. This involves inferring information that is often left
implicit in an ordinary mathematical text, and resolving ambiguities
in mathematical expressions. We refer to the process of passing from a
 quasiformal and partially specified expression to a completely
+ quasiformal and partiallyspecified expression to a completely
precise formal one as {\sl elaboration}. We describe an elaboration
algorithms for dependent type theory that has been implemented in the
Lean theorem prover. Lean's elaborator supports higherorder
@@ 15107,7 +15180,7 @@ paper = "Brea89.pdf"
One method is based on formulating real numbers as functions that map
rational tolerances to rational approximations. This approach, which
was developed by constructive mathematicians as a concrete
 formalization of the real numbers, has lead to a surpris ingly
+ formalization of the real numbers, has lead to a surprisingly
successful implementation. The second method formulates real numbers
as potentially infinite sequences of digits, evaluated on demand.
This approach has frequently been advocated by proponents of lazy
@@ 24959,7 +25032,7 @@ paper = "Brea89.pdf"
vibrant and stimulating environment.
After many years of development, a decision was made to rename
 Scratch pad II to Axiom and to release it as a product. Dick and
+ Scratchpad II to Axiom and to release it as a product. Dick and
Robert Sutor were the primary authors of the book {\sl Axiom: The
Scientific Computation System}. In the foreword of the book, written
by David and Gregory Chudnovsky, it is stated that ``The Scratchpad
@@ 25070,8 +25143,8 @@ paper = "Brea89.pdf"
theorem provers, though e±cient, were written in basic programming
languages and on primitive computers. Now there exist more powerful
and mature geometric theorem provers of which some have already been
 published as commercial software. On the other hand, building ef
 fective equations solvers is still at the experimental stage and
+ published as commercial software. On the other hand, building
+ effective equations solvers is still at the experimental stage and
remains for further research and development.",
paper = "Chouxx.pdf"
}
@@ 28023,7 +28096,7 @@ paper = "Brea89.pdf"
fulfill this condition.
Among the methods to split polynomial systems into smaller pieces
 probably the Groebner factor izer method attracted the most
+ probably the Groebner factorizer method attracted the most
theoretical attention, see Czapor ([4, 5]), Davenport ([6]), Melenk, M
̈oller and Neun ([16, 17]) and Gr ̈abe ([13, 14]). General purpose
Computer Algebra Systems (CAS) are well suited for such an approach,
@@ 34388,22 +34461,22 @@ paper = "Brea89.pdf"
provide an entry into the current research literature, but not to present
the most recent research results.
 The first half of the course (taught by me) will deal with basic math
 ematics and algorithms for computer algebra, primarily at the level of
+ The first half of the course (taught by me) will deal with basic
+ mathematics and algorithms for computer algebra, primarily at the level of
arithmetic and elementary abstract algebra, including an introduction
to GCDs and the solution of univariate polynomial equations. This
leads into the second half of the course (taught by Dr Jim Skea) on
 the more advanced problems of polynomial factorization, indefinite in
 tegration, multivariate polynomial equations, etc.
+ the more advanced problems of polynomial factorization, indefinite
+ integration, multivariate polynomial equations, etc.
The first week provides an introduction to the computing aspects
 of computer algebra, and contains almost no mathematics. It is in
 tended to show how the later theory can be implemented for practical
+ of computer algebra, and contains almost no mathematics. It is
+ intended to show how the later theory can be implemented for practical
computation. The second week provides a rapid but superficial survey
of the abstract algebra that is most important for computer algebra.
The next five weeks will build on this abstract basis to do some more
 concrete mathematics in more details, referring back to the basis es
 tablished in the first two weeks as necessary.
+ concrete mathematics in more details, referring back to the basis
+ setablished in the first two weeks as necessary.
At the end of each set of notes will be exercises, one (or more) of
which will be assessed.",
@@ 34566,7 +34639,7 @@ paper = "Brea89.pdf"
subject to a wide variety of physical forces and interactions is
exceedingly difficult. The construction of a single simulator capable
of dealing with all possible physical processes is completely
 impracti cal and, it seems to us, wrongheaded. Instead, we propose
+ impractical and, it seems to us, wrongheaded. Instead, we propose
to build custom simulators designed for a particular collection of
physical objects, where a particular set of physical phenomena are
involved. For such an approach to be practical, an environment must
@@ 35786,7 +35859,7 @@ paper = "Brea89.pdf"
pages = "157172",
abstract =
"We address the problem of representing mathematical structures in a
 proof assistant which: 1) is based on a type theory with depen dent
+ proof assistant which: 1) is based on a type theory with dependent
types, telescopes and a computational version of Leibniz equality; 2)
implements coercive subtyping, accepting multiple coherent paths
between type families; 3) implements a restricted form of higher order
@@ 35849,7 +35922,7 @@ paper = "Brea89.pdf"
pages = "3346",
abstract =
"We describe how the theory of Groebner bases, an important part
 of computational algebra, can be developed within Martin Lof’s
+ of computational algebra, can be developed within MartinLof’s
type theory. In particular, we aim for an integrated development
of the algorithms for computing Groebner bases: we want to prove,
constructively in type theory, the existence of Groebner bases and
@@ 36253,7 +36326,7 @@ paper = "Brea89.pdf"
year = "2010",
link = "\url{http://www.risc.jku.at/publications/download/risc_4181/synasc_postproceedings.pdf}",
abstract =
 "We present and illustrate a method for the gen eration of the
+ "We present and illustrate a method for the generation of the
termination conditions for nested loops with abrupt termination
statements. The conditions are (firstorder) formulae obtained by
certain transformations of the program text. The loops are treated
@@ 36330,16 +36403,16 @@ paper = "Brea89.pdf"
year = "1998",
pages = "1732",
abstract =
 "We consider geometric theorems that can be stated construc tively by
+ "We consider geometric theorems that can be stated constructively by
introducing points, while each newly introduced point may be
represented in terms of the previously constructed points using
Clifford algebraic operators. To prove a concrete theorem, one first
substitutes the expressions of the dependent points into the
 conclusion Clifford poly nomial to obtain an expression that involves
 only the free points and pa rameters. A termrewriting system is
+ conclusion Clifford polynomial to obtain an expression that involves
+ only the free points and parameters. A termrewriting system is
developed that can simplify such an expression to 0, and thus prove
the theorem. A large class of theorems can be proved effectively in
 this coordinatefree manner. This paper de scribes the method in
+ this coordinatefree manner. This paper describes the method in
detail and reports on our preliminary experiments.",
paper = "Fevr98.pdf"
}
@@ 36357,7 +36430,7 @@ paper = "Brea89.pdf"
functional translation of imperative programs, based on a combination
of the type and effect discipline and monads. Then an incomplete proof
of the specification is built in the Type Theory, whose gaps would
 corre spond to proof obligations. On sequential imperative programs,
+ correspond to proof obligations. On sequential imperative programs,
we get the same proof obligations as those given by FloydHoare
logic. Compared to the latter, our approach also includes functional
constructions in a straightforward way. This work has been
@@ 36516,7 +36589,7 @@ paper = "Brea89.pdf"
robust enough to support a hierarchy comprising a broad variety of
algebraic structures, from types with a choice operator to algebraically
closed fields. Interfaces for the structures enjoy the handiness of a
 classi cal setting, without requiring any axiom. Finally, we show how
+ classical setting, without requiring any axiom. Finally, we show how
externally extensible some of these instances are by discussing a
lemma seminal in defining the discrete logarithm, and a matrix
decomposition problem.",
@@ 36711,7 +36784,7 @@ paper = "Brea89.pdf"
topdown approach of structured programming. The deep requirement of
structured programming, however, is that programming should be based
on welldefined abstractions with clear meaning rather than on
 incidental characteris tics of computing machinery. This requirement
+ incidental characteristics of computing machinery. This requirement
can be met by object oriented programming and, in fact, object
oriented programs may have better structure than programs obtained by
functional decomposition.
@@ 36817,7 +36890,7 @@ paper = "Brea89.pdf"
"We reconsider the wellknown concept of Haskellstyle type classes
within the logical framework of Isabelle. So far, axiomatic type
classes in Isabelle merely account for the logical aspect as
 predicates over types, while the opera tional part is only a
+ predicates over types, while the operational part is only a
convention based on raw overloading. Our more elaborate approach to
constructive type classes provides a seamless integration with
Isabelle locales, which are able to manage both operations and logical
@@ 37035,8 +37108,8 @@ paper = "Brea89.pdf"
link = "\url{http://wwwsop.inria.fr/members/Evelyne.Hubert/publications/sncsp.pdf}",
abstract =
"This is the first in a series of two tutorial articles devoted to
 triangulation decomposition algorithms. The value of these notes
 resides in the uniform presen tation of triangulationdecomposition
+ triangulationdecomposition algorithms. The value of these notes
+ resides in the uniform presentation of triangulationdecomposition
of polynomial and differential radical ideals with detailed proofs of
all the presented results.We emphasize the study of the mathematical
objects manipulated by the algorithms and show their properties in
@@ 37081,7 +37154,7 @@ paper = "Brea89.pdf"
year = "1982",
abstract =
"Are design and use of computer algebra systems disjoint or
 complementary activi ties? Raising and answering this question are
+ complementary activities? Raising and answering this question are
equally controversial, since a clear distinction between languages
features and library facilities is hard to make. Instead of even
attempting to answer this rather academic question it is argued why it
@@ 37418,15 +37491,15 @@ paper = "Brea89.pdf"
As a starting point, we have defined
and formalized a syntax, semantics, type system and specification
 language for MiniMaple . For verification, we automatically trans
 late the (types and specification) annotated MiniMaple program into a
+ language for MiniMaple . For verification, we automatically
+ translate the (types and specification) annotated MiniMaple program into a
behaviorally equivalent program in the intermediate language Why3ML of
the verification tool Why3; from the translated program, Why3
generates verification conditions whose correctness can be proved by
various automated and interactive theorem provers (e.g. Z3 and Coq).
Furthermore, we have defined a denotational semantics of MiniMaple and
its specification language and proved the soundness of the translation
 with re spect to the operational semantics of Why3ML. Finally, we
+ with respect to the operational semantics of Why3ML. Finally, we
discuss the application of our verification framework to the Maple
package DifferenceDifferential developed at our institute to compute
bivariate differencedifferential dimension polynomials using relative
@@ 37904,7 +37977,7 @@ paper = "Brea89.pdf"
pages = "152168",
abstract =
"A notion of dependent coercion is introduced and studied in the
 context of depen dent type theories. It extends our earlier work on
+ context of dependent type theories. It extends our earlier work on
coercive subtyping into a uniform framework which increases the
expressive power with new applications. A dependent coercion
introduces a subtyping relation between a type and a family of types
@@ 38354,7 +38427,7 @@ paper = "Brea89.pdf"
then the system should be able to suggest possible next steps.
The key idea of Lucas Interpretation is to compute the steps of a
 calculation following a pro gram written in a novel CTPbased
+ calculation following a program written in a novel CTPbased
programming language, i.e. computation provides the next steps. User
guidance is generated by combining deduction and computation: the
latter is performed by a specific language interpreter, which works
@@ 38391,8 +38464,8 @@ paper = "Brea89.pdf"
levels from highschool to university.
Starting from prototypes of TPbased educational mathematics systems,
 conceptual founda tions are considered: TPbased software which
 implements reasoning as an essential part of math ematical thinking
+ conceptual foundations are considered: TPbased software which
+ implements reasoning as an essential part of mathematical thinking
technology. Educational objectives for the development of TPbased
systems are discussed and concluded with some predictions on possible
impact of TPbased educational mathematics assistants.
@@ 38543,7 +38616,7 @@ paper = "Brea89.pdf"
pages = "771775",
year = "1992",
abstract =
 "We exhibit a methodology for formulating and verifying meta theorems
+ "We exhibit a methodology for formulating and verifying metatheorems
about deductive systems in the Elf language, an implementation of the
LF Logical Framework with an operational semantics in the spirit of
logic programming. It is based on the mechanical verification of
@@ 38551,7 +38624,7 @@ paper = "Brea89.pdf"
reconstruction and schemachecking. The latter is justified by
induction principles for closed LF objects, which can be constructed
over a given signature. We illustrate our technique through several
 examples, the most extensive of which is an interpre tation of
+ examples, the most extensive of which is an interpretation of
classical logic in minimal logic through a continuationpassingstyle
transformation on proofs.",
paper = "Pfen92b.pdf"
@@ 38622,9 +38695,9 @@ paper = "Brea89.pdf"
link =
"\url{http://www.risc.jku.at/publications/download/risc_3913/PopJeb.pdf}",
abstract =
 "We present a method for verifying recursive functional pro grams. We
+ "We present a method for verifying recursive functional programs. We
define a Verification Condition Generator (VCG) which covers the most
 frequent types of recursive programs. These programs may op erate on
+ frequent types of recursive programs. These programs may operate on
arbitrary domains. Soundness and Completeness of the VCG are proven on
the meta level, and this provides a warranty that any system based on
our results will be sound.",
@@ 39075,7 +39148,7 @@ paper = "Brea89.pdf"
modules) that have rigorously specified but very abstract
interfaces. We focus on the combination of the functorbased
programming style with software engineering principles in large
 development projects. The gen erated target code is highly efficient
+ development projects. The generated target code is highly efficient
and can be easily embedded into foreign application environments.",
paper = "Schr00.pdf"
}
@@ 39235,7 +39308,7 @@ paper = "Brea89.pdf"
pages = "151185",
abstract =
"This article provides an overview of current work on universitylevel
 computer assisted instruction at Stanford University. Brief
+ computerassisted instruction at Stanford University. Brief
descriptions are given of the main areas of current interest. The
main emphasis is on the courses now being used: Introduction to Logic,
Axiomatic Set Theory, Old Church Slavonic, History of the Russian
@@ 39843,11 +39916,11 @@ paper = "Brea89.pdf"
proof and publication, to archival and sharing of data and code.
OpenDreamKit will be built out of a sustainable ecosystem of
 communitydeveloped open software, databases, and ser vices,
+ communitydeveloped open software, databases, and services,
including popular tools such as LINBOX, MPIR, SAGE (sagemath.org),
GAP, PARI/GP, LMFDB, and SINGULAR. We will extend the JUPYTER Notebook
environment to provide a flexible user interface. By improving and
 unifying existing build ing blocks, OpenDreamKit will maximise both
+ unifying existing building blocks, OpenDreamKit will maximise both
sustainability and impact, with beneficiaries extending to scientific
computing, physics, chemistry, biology and more, and including
researchers, teachers, and industrial practitioners.
diff git a/books/bookvolbib.pamphlet b/books/bookvolbib.pamphlet
index 683d337..dc68b61 100644
 a/books/bookvolbib.pamphlet
+++ b/books/bookvolbib.pamphlet
@@ 634,14 +634,14 @@ paragraph for those unfamiliar with the terms.
abstract =
"It is widely recognized that programming languages should oer
features to help structure programs. To achieve this goal, languages
 like Ada , Modula2 , object oriented languages, and functional
 languages have been developed. The structur ing techniques available
+ like Ada , Modula2 , objectoriented languages, and functional
+ languages have been developed. The structuring techniques available
so far (like modules, classes, parametric polymorphism) are still not
enough or not appropriate for some application areas. In symbolic
computation, in particular computer algebra, several problems occur
that are dicult to handle with any existing programming
language. Indeed, nearly all available computer algebra systems suer
 from the fact that the underlying pro gramming language imposes too
+ from the fact that the underlying programming language imposes too
many restrictions.
We propose to develop a language that combines the essential features
@@ 651,8 +651,8 @@ paragraph for those unfamiliar with the terms.
a general purpose programming language. The main innovation will be
the application of sophisticated type systems to the needs of computer
algebra systems. We will demonstrate the capabilities of the language
 by using it to implement a small computer algebra library. This im
 plementation will be compared against a straightforward Lisp
+ by using it to implement a small computer algebra library. This
+ implementation will be compared against a straightforward Lisp
implementation and against existing computer algebra systems. Our
development should have an impact both on the programming languages
world and on the computer algebra world.",
@@ 2551,7 +2551,7 @@ paper = "Brea89.pdf"
which aims to provide key infrastructure for symbolic computation
research. A primary outcome of the project is that we have developed
a new way of combining computer algebra systems using the Symbolic
 Computation Software Compos ability Protocol (SCSCP), in which both
+ Computation Software Composability Protocol (SCSCP), in which both
protocol messages and data are encoded in the OpenMath format. We
describe SCSCP middleware and APIs, outline some implementations for
various Computer Algebra Systems (CAS), and show how SCSCPcompliant
@@ 2944,7 +2944,7 @@ paper = "Brea89.pdf"
year = "1991",
pages = "7485",
abstract =
 "We present algorithms for unification and anti unification in the
+ "We present algorithms for unification and antiunification in the
Calculus of Constructions, where occurrences of free variables (the
variables subject to instantiation) are restricted to higherorder
patterns, a notion investigated for the simplytyped $\lambda$calculus
@@ 2977,11 +2977,11 @@ paper = "Brea89.pdf"
Among the basic views of types we find
the socalled descriptive systems, where types describe properties of
untyped logic programs, and prescriptive systems, where types are
 essential to the meaning of programs. A typical ap plication of
+ essential to the meaning of programs. A typical application of
descriptive types is the approximation of the meaning of a logic
program as a subset of the Herbrand universe on which a predicate
might be true. The value of prescriptive systems lies primarily in
 program devel opment, for example, through early detection of errors
+ program development, for example, through early detection of errors
in programs which manifest themselves as type inconsistencies, or as
added documentation for the intended and legal use of predicates.
@@ 2989,7 +2989,7 @@ paper = "Brea89.pdf"
and type reconstruction, respectively. Type inference is a form of
analysis of untyped logic programs, while type reconstruction attempts
to fill in some omitted type information in typed logic programs and
 generalizes the prob lem of type checking. Even though analogous
+ generalizes the problem of type checking. Even though analogous
problems arise in functional programming, algorithms addressing these
problems are quite different in our setting.
@@ 3634,8 +3634,8 @@ paper = "Brea89.pdf"
Each type equation is accompanied by a pair of conversion functions
that are (at least partial) inverses. We show that so long as the
equational theory satisfies a reasonably permissive syntactic
 constraint, the resulting type system admits a complete type infer
 ence algorithm that produces unique principal types. The main
+ constraint, the resulting type system admits a complete type
+ inference algorithm that produces unique principal types. The main
innovation required in type inference is the replacement of ordinary
free unification by unification in the userspecified equational
theory. The syntactic constraint ensures that the latter is unitary,
@@ 5270,7 +5270,7 @@ when shown in factored form.
"A computer program to honestly plot curves y = f(x) must locate
maxima and minima in the domain of the graph. To do so it may have to
solve a classic problem in computation – global optimization.
 Reducing an easy problem to a hard one is usually not an ad vantage,
+ Reducing an easy problem to a hard one is usually not an advantage,
but in fact there is a route to solving both problems if the function
can be evaluated using interval arithmetic. Since some computer
algebra systems supply a version of interval arithmetic, it seems we
@@ 5282,7 +5282,7 @@ when shown in factored form.
location of poles, the values at maxima or minima, or the behavior
of a curve at asymptotes) are misrepresented, By “mathematical” we
mean curves like those generally needed in scientific disciplines
 where functions are represented by composi tion of common
+ where functions are represented by composition of common
mathematical operations: rational operations ($+, –, *, /$),
exponential and log, trigonometric functions as well as continuous
and differentiable functions from applied mathematics.",
@@ 9571,7 +9571,7 @@ when shown in factored form.
"We introduce simple object calculi that support method override and
object subsumption. We give an untyped calculus, typing rules, and
equational rules. We illustrate the expressiveness of our calculi and
 the pit falls that we avoid.",
+ the pitfalls that we avoid.",
paper = "Abad94a.pdf"
}
@@ 9649,15 +9649,15 @@ when shown in factored form.
booktitle = "Proc. Types 2006: Conf. of the Types Project",
year = "2006",
abstract =
 "Proof assistants are complex applications whose develop ment has
+ "Proof assistants are complex applications whose development has
never been properly systematized or documented. This work is a
contribution in this direction, based on our experience with the
 devel opment of Matita: a new interactive theorem prover based—as
+ development of Matita: a new interactive theorem prover based—as
Coq—on the Calculus of Inductive Constructions (CIC). In particular,
we analyze its architecture focusing on the dependencies of its
components, how they implement the main functionalities, and their
degree of reusability. The work is a first attempt to provide a ground
 for a more direct com parison between different systems and to
+ for a more direct comparison between different systems and to
highlight the common functionalities, not only in view of
reusability but also to encourage a more systematic comparison of
different softwares and architectural solutions.",
@@ 9889,7 +9889,7 @@ when shown in factored form.
algorithm is bidirectional: given a term in external syntax and a
type expected for the term, it propagates as much typing information
as possible towards the leaves of the term. Traditional
 monodirectional algorithms, instead, proceed in a bottom up way by
+ monodirectional algorithms, instead, proceed in a bottomup way by
inferring the type of a subterm and comparing (unifying) it with the
type expected by its context only at the end. We propose some novel
bidirectional rules for CIC that are particularly effective. Among
@@ 10985,7 +10985,7 @@ when shown in factored form.
loop iterations. The talk will then describe our work to generate
firstorder properties of programs with unbounded data structures,
such as arrays. For doing so, we use saturationbased firstorder
 theorem proving and extend first order provers with support for
+ theorem proving and extend firstorder provers with support for
program analysis. Since program analysis requires reasoning in the
combination of firstorder theories of data structures, the talk
also discusses new features in firstorder theorem proving, such as
@@ 11225,10 +11225,10 @@ when shown in factored form.
abstract =
"Lists are a pervasive data structure in functional programs. The
generality and simplicity of their structure makes them expensive.
 HindleyMllner type inference and partial evalua tion are all that is
+ HindleyMllner type inference and partial evaluation are all that is
needed to optimise this structure, yielding considerable improvements
in space and time consumption for some interesting programs. This
 framework is applica ble to many data types and their optimised
+ framework is applicable to many data types and their optimised
representations, such as lists and parallel implementations of bags,
or arrays and quadtrees.",
paper = "Hall94.pdf, printed"
@@ 12163,7 +12163,7 @@ when shown in factored form.
year = "1997",
abstract =
"Proofs in the Nuprl system, an implementation of a constructive
 type theory, yield “correctbyconstruction” pro grams. In this
+ type theory, yield “correctbyconstruction” programs. In this
paper a new methodology is presented for extracting efficient and
readable programs from inductive proofs. The resulting extracted
programs are in a form suitable for use in hierarchical
@@ 12266,6 +12266,17 @@ when shown in factored form.
\end{chunk}
+\begin{chunk}{axiom.bib}
+@misc{Domi18,
+ author = "Domipheus",
+ title = {{Designing a CPU in VHDL}},
+ link = "\url{http://labs.domipheus.com/blog/tpuseriesquicklinks}",
+ year = "2018",
+ abstract = "A VHDL CPU"
+}
+
+\end{chunk}
+
\index{Filliatre, JeanChristophe}
\begin{chunk}{axiom.bib}
@article{Fill03,
@@ 12349,29 +12360,122 @@ when shown in factored form.
\end{chunk}
+\index{Morgan, Carroll}
+\begin{chunk}{axiom.bib}
+@book{Morg98,
+ author = "Morgan, Carroll",
+ title = {{Programming from Specifcations, 2nd Ed.}},
+ publisher = "Prentice Hall",
+ year = "1998",
+ link =
+"\url{http://www.cse.unsw.edu.au/~carrollm/ProgrammingFromSpecifications.pdf}",
+ paper = "Morg98.pdf"
+}
+
+\end{chunk}
+
\index{Myreen, Magnus O.}
+\index{Davis, Jared}
\begin{chunk}{axiom.bib}
@article{Myre12,
 author = "Myreen, Magnus O.",
 title = {{Functional Programs: Conversions between Deep and Shallow
 Embeddings}},
+@article{Myre14,
+ author = "Myreen, Magnus O. and Davis, Jared",
+ title = {{The Reflective Milawa Theorem Prover is Sound}},
+ journal = "LNAI",
+ pages = "421436",
+ year = "2014",
+ abstract =
+ "Milawa is a theorem prover styled after ACL2 but with a small kernel
+ and a powerful reflection mechanism. We have used the HOL4 theorem
+ prover to formalize the logic of Milawa, prove the logic sound, and
+ prove that the source code for the Milawa kernel (2,000 lines of Lisp)
+ is faithful to the logic. Going further, we have combined these
+ results with our previous verification of an x86 machinecode
+ implementation of a Lisp runtime. Our toplevel HOL4 theorem states
+ that when Milawa is run on top of our verified Lisp, it will only
+ print theorem statements that are semantically true. We believe that
+ this toplevel theorem is the most comprehensive formal evidence of a
+ theorem prover’s soundness to date.",
+ paper = "Myre14.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Myreen, Magnus O.}
+\index{Gordon, Michael J.C.}
+\begin{chunk}{axiom.bib}
+@article{Myre09,
+ author = "Myreen, Magnus O. and Gordon, Michael J.C.",
+ title = {{Verified LISP Implementations on ARM, x86 and PowerPC}},
journal = "LNCS",
 volume = "7406",
 pages = "412417",
 year = "2012",
+ volume = "5674",
+ pages = "359374",
+ year = "2009",
abstract =
 "This paper presents a method which simplifies verification of deeply
 embedded functional programs. We present a technique by which
 proofcertified equations describing the effect of functional
 programs (shallow embeddings) can be automatically extracted from their
 operational semantics. Our method can be used in reverse, i.e. from
 shallow to deep embeddings, and thus for implementing certifying code
 synthesis: we have implemented a tool which maps HOL functions to
 equivalent Lisp functions, for which we have a verified Lisp runtime.
 A key benefit, in both directions, is that the verifier does not need
 to understand the operational semantics that gives meanings to the
 deep embeddings.",
 paper = "Myre12.pdf",
+ "This paper reports on a case study, which we believe is the first
+ to produce a formally verified endtoend implementation of a
+ functional programming language running on commercial
+ processors. Interpreters for the core of McCarthy’s LISP 1.5
+ were implemented in ARM, x86 and PowerPC machine code, and proved
+ to correctly parse, evaluate and print LISP sexpressions. The
+ proof of evaluation required working on top of verified
+ implementations of memory allocation and garbage collection. All
+ proofs are mechanised in the HOL4 theorem prover.",
+ paper = "Myre09.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Myreen, Magnus O.}
+\index{Slind, Konrad}
+\index{Gordon, Michael J.C.}
+\begin{chunk}{axiom.bib}
+@article{Myre09a,
+ author = "Myreen, Magnus O. and Slind, Konrad and Gordon, Michael J.C.",
+ title = {{Extensible ProofProducing Compilation}},
+ journal = "LNCS",
+ volume = "5501",
+ pages = "216",
+ year = "2009",
+ abstract =
+ "This paper presents a compiler which produces machine code from
+ functions defined in the logic of a theorem prover, and at the same
+ time proves that the generated code executes the source functions.
+ Unlike previously published work on proofproducing compilation from a
+ theorem prover, our compiler provides broad support for userdefined
+ extensions, targets multiple carefully modelled commercial machine
+ languages, and does not require termination proofs for input
+ functions. As a case study, the compiler is used to construct verified
+ interpreters for a small LISPlike language. The compiler has been
+ implemented in the HOL4 theorem prover.",
+ paper = "Myre09a.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Myreen, Magnus O.}
+\begin{chunk}{axiom.bib}
+@article{Myre10,
+ author = "Myreen, Magnus O.",
+ title = {{Verified JustInTime Compiler on x86}},
+ journal = "ACM SIGLAN Notices  POPL'10",
+ volume = "45",
+ number = "1",
+ year = "2010",
+ pages = "107118",
+ abstract =
+ "This paper presents a method for creating formally correct justin
+ time (JIT) compilers. The tractability of our approach is demonstrated
+ through, what we believe is the first, verification of a JIT
+ compiler with respect to a realistic semantics of selfmodifying x86
+ machine code. Our semantics includes a model of the instruction
+ cache. Two versions of the verified JIT compiler are presented: one
+ generates all of the machine code at once, the other one is incremental
+ i.e. produces code ondemand. All proofs have been performed
+ inside the HOL4 theorem prover.",
+ paper = "Myre10.pdf",
keywords = "printed"
}
@@ 12412,48 +12516,34 @@ when shown in factored form.
\end{chunk}

\index{Myreen, Magnus O.}
\index{Davis, Jared}
\begin{chunk}{axiom.bib}
@article{Myre14,
 author = "Myreen, Magnus O. and Davis, Jared",
 title = {{The Reflective Milawa Theorem Prover is Sound}},
 journal = "LNAI",
 pages = "421436",
 year = "2014",
+@article{Myre12,
+ author = "Myreen, Magnus O.",
+ title = {{Functional Programs: Conversions between Deep and Shallow
+ Embeddings}},
+ journal = "LNCS",
+ volume = "7406",
+ pages = "412417",
+ year = "2012",
abstract =
 "Milawa is a theorem prover styled after ACL2 but with a small kernel
 and a powerful reflection mechanism. We have used the HOL4 theorem
 prover to formalize the logic of Milawa, prove the logic sound, and
 prove that the source code for the Milawa kernel (2,000 lines of Lisp)
 is faithful to the logic. Going further, we have combined these
 results with our previous verification of an x86 machinecode
 implementation of a Lisp runtime. Our toplevel HOL4 theorem states
 that when Milawa is run on top of our verified Lisp, it will only
 print theorem statements that are semantically true. We believe that
 this toplevel theorem is the most comprehensive formal evidence of a
 theorem prover’s soundness to date.",
 paper = "Myre14.pdf",
+ "This paper presents a method which simplifies verification of deeply
+ embedded functional programs. We present a technique by which
+ proofcertified equations describing the effect of functional
+ programs (shallow embeddings) can be automatically extracted from their
+ operational semantics. Our method can be used in reverse, i.e. from
+ shallow to deep embeddings, and thus for implementing certifying code
+ synthesis: we have implemented a tool which maps HOL functions to
+ equivalent Lisp functions, for which we have a verified Lisp runtime.
+ A key benefit, in both directions, is that the verifier does not need
+ to understand the operational semantics that gives meanings to the
+ deep embeddings.",
+ paper = "Myre12.pdf",
keywords = "printed"
}
\end{chunk}
\index{Morgan, Carroll}
\begin{chunk}{axiom.bib}
@book{Morg98,
 author = "Morgan, Carroll",
 title = {{Programming from Specifcations, 2nd Ed.}},
 publisher = "Prentice Hall",
 year = "1998",
 link =
"\url{http://www.cse.unsw.edu.au/~carrollm/ProgrammingFromSpecifications.pdf}",
 paper = "Morg98.pdf"
}

\end{chunk}

\index{Talpin, JeanPierre}
\index{Jouvelot, Pierre}
\begin{chunk}{axiom.bib}
@@ 13195,7 +13285,7 @@ when shown in factored form.
Statements received at one particular location from other sites fall
into two categories: with or without the qualification ``evidently
 impeccable'', a notion that is methodologi cally precise and
+ impeccable'', a notion that is methodologically precise and
sound. For statements having this quality assessment the evidence may
come from the other site or from the local site itself, but in both
cases it is verified locally. In cases where there is no evidence of
@@ 13967,7 +14057,7 @@ when shown in factored form.
pages = "859882",
abstract =
"We define the ‘‘combinatorial part’’ of a Tarski formula in which
 equalities and inequalities are in factored or partially factored
+ equalities and inequalities are in factored or partiallyfactored
form. The combinatorial part of a formula contains only
‘‘monomial inequalities’’,which are sign conditions on monomials. We
give efficient algorithms for answering some basic questions about
@@ 14146,12 +14236,12 @@ when shown in factored form.
Deduction in Natural Style}},
abstract =
"The Theorema project aims at integrating computation and deduction in
 a system that can be used by the working scientist for build ing and
 checking mathematical models, including the design and verica tion of
+ a system that can be used by the working scientist for building and
+ checking mathematical models, including the design and verication of
new algorithms. Currently, the system uses the rewrite engine of the
computer algebra system Mathematica for building and combining a
number of automatic/interactive provers (highorder predicatelogic,
 in duction for lists/tuples and natural numbers, etc.) in natural
+ induction for lists/tuples and natural numbers, etc.) in natural
deduction style and in natural language presentation. These provers
can be used for dening and proving properties of mathematical models
and algorithms, while a specially provided ``computing engine'' can
@@ 15552,8 +15642,8 @@ when shown in factored form.
theories, which can be related to one another via inclusion and theory
interpretation. IMPS provides relatively large primitive inference
steps to facilitate human control of the deductive process and human
 comprehension of the resulting proofs. An initial theory library con
 taining over a thousand repeatable proofs covers significant portions
+ comprehension of the resulting proofs. An initial theory library
+ containing over a thousand repeatable proofs covers significant portions
of logic, algebra, and analysis and provides some support for modeling
applications in computer science.",
paper = "Farm93a.pdf"
@@ 16937,7 +17027,7 @@ when shown in factored form.
case with computer algebra systems and typetheory based interactive
theorem provers such as Coq. We provide some custom instrumentation
inside Coq to support a computer algebra system (CAS) communication
 protocol known as SC SCP. We describe general aspects of viewing
+ protocol known as SCSCP. We describe general aspects of viewing
OpenMath terms produced by a CAS in the calculus of Coq, as well as
viewing pure Coq terms in a simpler type system that is behind OpenMath.",
paper = "Kome11.pdf",
@@ 17105,7 +17195,7 @@ when shown in factored form.
publisher = "IEEE Computer Society Press",
year = "1991",
abstract =
 "The HOL system is a powerful tool for proving higher order formulae.
+ "The HOL system is a powerful tool for proving higherorder formulae.
However, proofs have to be performed interactively and only little
automation using tactics is possible. Even though interaction is
desirable to guide major and creative backward proof steps of complex
@@ 17256,7 +17346,7 @@ when shown in factored form.
pages = "132",
abstract =
"Industry standard implementations of math.h claim (often without
 formal proof) tight bounds on floating point errors. We demonstrate a
+ formal proof) tight bounds on floatingpoint errors. We demonstrate a
novel static analysis that proves these bounds and verifies the
correctness of these implementations. Our key insight is a reduction
of this verification task to a set of mathematical optimization
@@ 17764,7 +17854,7 @@ when shown in factored form.
link = "\url{http://ceurws.org/Vol1010/paper05.pdf}",
year = "2013",
abstract =
 "This research compares the author’s experience in program ming
+ "This research compares the author’s experience in programming
algebra in Haskell and in Agda (currently the former experience is
large, and the latter is small). There are discussed certain hopes and
doubts related to the dependently typed and verified programming of
@@ 18060,7 +18150,7 @@ when shown in factored form.
and proofs. This involves inferring information that is often left
implicit in an ordinary mathematical text, and resolving ambiguities
in mathematical expressions. We refer to the process of passing from a
 quasiformal and partially specified expression to a completely
+ quasiformal and partiallyspecified expression to a completely
precise formal one as {\sl elaboration}. We describe an elaboration
algorithms for dependent type theory that has been implemented in the
Lean theorem prover. Lean's elaborator supports higherorder
@@ 19772,7 +19862,7 @@ when shown in factored form.
One method is based on formulating real numbers as functions that map
rational tolerances to rational approximations. This approach, which
was developed by constructive mathematicians as a concrete
 formalization of the real numbers, has lead to a surpris ingly
+ formalization of the real numbers, has lead to a surprisingly
successful implementation. The second method formulates real numbers
as potentially infinite sequences of digits, evaluated on demand.
This approach has frequently been advocated by proponents of lazy
@@ 22358,7 +22448,7 @@ Comm. Math. Helv., Vol 18 pp 283308, (1946)
abstract = "
The general goal of this thesis is to investigate and develop computer
algebra tools for the simplification resp. evaluation of definite
 integrals. One way of finding the value of a def inite integral is
+ integrals. One way of finding the value of a definite integral is
via the evaluation of an antiderivative of the integrand. In the
nineteenth century Joseph Liouville was among the first who analyzed
the structure of elementary antiderivatives of elementary functions
@@ 22375,7 +22465,7 @@ Comm. Math. Helv., Vol 18 pp 283308, (1946)
differentiating under the integral sign or creative telescoping).
The main result of this thesis extends the results mentioned above to
 a complete algo rithm for parametric elementary integration for a
+ a complete algorithm for parametric elementary integration for a
certain class of integrands covering a majority of the special
functions appearing in practice such as orthogonal polynomials,
polylogarithms, Bessel functions, etc. A general framework is provided
@@ 22388,7 +22478,7 @@ Comm. Math. Helv., Vol 18 pp 283308, (1946)
types of integrands are treated.
As subproblems of the integration algorithm one also has to find
 solutions of linear or dinary differential equations of a certain
+ solutions of linear ordinary differential equations of a certain
type. Some contributions are also made to solve those problems in our
setting, where the results directly dealing with systems of
differential equations have been joint work with Moulay A. Barkatou.
@@ 34245,7 +34335,7 @@ Universit{\"a}t Karsruhe, Karlsruhe, Germany 1994
vibrant and stimulating environment.
After many years of development, a decision was made to rename
 Scratch pad II to Axiom and to release it as a product. Dick and
+ Scratchpad II to Axiom and to release it as a product. Dick and
Robert Sutor were the primary authors of the book {\sl Axiom: The
Scientific Computation System}. In the foreword of the book, written
by David and Gregory Chudnovsky, it is stated that ``The Scratchpad
@@ 34406,8 +34496,8 @@ Universit{\"a}t Karsruhe, Karlsruhe, Germany 1994
theorem provers, though e±cient, were written in basic programming
languages and on primitive computers. Now there exist more powerful
and mature geometric theorem provers of which some have already been
 published as commercial software. On the other hand, building ef
 fective equations solvers is still at the experimental stage and
+ published as commercial software. On the other hand, building
+ effective equations solvers is still at the experimental stage and
remains for further research and development.",
paper = "Chouxx.pdf"
}
@@ 39232,7 +39322,7 @@ Schenectady, NY, USA, 1984
fulfill this condition.
Among the methods to split polynomial systems into smaller pieces
 probably the Groebner factor izer method attracted the most
+ probably the Groebner factorizer method attracted the most
theoretical attention, see Czapor ([4, 5]), Davenport ([6]), Melenk, M
̈oller and Neun ([16, 17]) and Gr ̈abe ([13, 14]). General purpose
Computer Algebra Systems (CAS) are well suited for such an approach,
@@ 49690,22 +49780,22 @@ LCCN QA76.7.S54 v22:7 SIGPLAN Notices, vol 22, no 7 (July 1987)
provide an entry into the current research literature, but not to present
the most recent research results.
 The first half of the course (taught by me) will deal with basic math
 ematics and algorithms for computer algebra, primarily at the level of
+ The first half of the course (taught by me) will deal with basic
+ mathematics and algorithms for computer algebra, primarily at the level of
arithmetic and elementary abstract algebra, including an introduction
to GCDs and the solution of univariate polynomial equations. This
leads into the second half of the course (taught by Dr Jim Skea) on
 the more advanced problems of polynomial factorization, indefinite in
 tegration, multivariate polynomial equations, etc.
+ the more advanced problems of polynomial factorization, indefinite
+ integration, multivariate polynomial equations, etc.
The first week provides an introduction to the computing aspects
 of computer algebra, and contains almost no mathematics. It is in
 tended to show how the later theory can be implemented for practical
+ of computer algebra, and contains almost no mathematics. It is
+ intended to show how the later theory can be implemented for practical
computation. The second week provides a rapid but superficial survey
of the abstract algebra that is most important for computer algebra.
The next five weeks will build on this abstract basis to do some more
 concrete mathematics in more details, referring back to the basis es
 tablished in the first two weeks as necessary.
+ concrete mathematics in more details, referring back to the basis
+ setablished in the first two weeks as necessary.
At the end of each set of notes will be exercises, one (or more) of
which will be assessed.",
@@ 49957,7 +50047,7 @@ Oxford University Press (2000) ISBN0195125169
subject to a wide variety of physical forces and interactions is
exceedingly difficult. The construction of a single simulator capable
of dealing with all possible physical processes is completely
 impracti cal and, it seems to us, wrongheaded. Instead, we propose
+ impractical and, it seems to us, wrongheaded. Instead, we propose
to build custom simulators designed for a particular collection of
physical objects, where a particular set of physical phenomena are
involved. For such an approach to be practical, an environment must
@@ 51930,7 +52020,7 @@ Rocky Mountain J. Math. 14 119139. (1984)
pages = "157172",
abstract =
"We address the problem of representing mathematical structures in a
 proof assistant which: 1) is based on a type theory with depen dent
+ proof assistant which: 1) is based on a type theory with dependent
types, telescopes and a computational version of Leibniz equality; 2)
implements coercive subtyping, accepting multiple coherent paths
between type families; 3) implements a restricted form of higher order
@@ 52036,7 +52126,7 @@ A.K Peters, Natick, MA. (2003) ISBN 1568811349
pages = "3346",
abstract =
"We describe how the theory of Groebner bases, an important part
 of computational algebra, can be developed within Martin Lof’s
+ of computational algebra, can be developed within MartinLof’s
type theory. In particular, we aim for an integrated development
of the algorithms for computing Groebner bases: we want to prove,
constructively in type theory, the existence of Groebner bases and
@@ 52887,7 +52977,7 @@ A.E.R.E. Report R.8730. HMSO. (1977)
year = "2010",
link = "\url{http://www.risc.jku.at/publications/download/risc_4181/synasc_postproceedings.pdf}",
abstract =
 "We present and illustrate a method for the gen eration of the
+ "We present and illustrate a method for the generation of the
termination conditions for nested loops with abrupt termination
statements. The conditions are (firstorder) formulae obtained by
certain transformations of the program text. The loops are treated
@@ 52983,16 +53073,16 @@ A.E.R.E. Report R.8730. HMSO. (1977)
year = "1998",
pages = "1732",
abstract =
 "We consider geometric theorems that can be stated construc tively by
+ "We consider geometric theorems that can be stated constructively by
introducing points, while each newly introduced point may be
represented in terms of the previously constructed points using
Clifford algebraic operators. To prove a concrete theorem, one first
substitutes the expressions of the dependent points into the
 conclusion Clifford poly nomial to obtain an expression that involves
 only the free points and pa rameters. A termrewriting system is
+ conclusion Clifford polynomial to obtain an expression that involves
+ only the free points and parameters. A termrewriting system is
developed that can simplify such an expression to 0, and thus prove
the theorem. A large class of theorems can be proved effectively in
 this coordinatefree manner. This paper de scribes the method in
+ this coordinatefree manner. This paper describes the method in
detail and reports on our preliminary experiments.",
paper = "Fevr98.pdf"
}
@@ 53014,7 +53104,7 @@ A.E.R.E. Report R.8730. HMSO. (1977)
functional translation of imperative programs, based on a combination
of the type and effect discipline and monads. Then an incomplete proof
of the specification is built in the Type Theory, whose gaps would
 corre spond to proof obligations. On sequential imperative programs,
+ correspond to proof obligations. On sequential imperative programs,
we get the same proof obligations as those given by FloydHoare
logic. Compared to the latter, our approach also includes functional
constructions in a straightforward way. This work has been
@@ 53316,7 +53406,7 @@ Invent. Math., vol. 121, 1995, pp. 211222.
robust enough to support a hierarchy comprising a broad variety of
algebraic structures, from types with a choice operator to algebraically
closed fields. Interfaces for the structures enjoy the handiness of a
 classi cal setting, without requiring any axiom. Finally, we show how
+ classical setting, without requiring any axiom. Finally, we show how
externally extensible some of these instances are by discussing a
lemma seminal in defining the discrete logarithm, and a matrix
decomposition problem.",
@@ 53866,7 +53956,7 @@ Grove, IL, USA and Oxford, UK, 1992
topdown approach of structured programming. The deep requirement of
structured programming, however, is that programming should be based
on welldefined abstractions with clear meaning rather than on
 incidental characteris tics of computing machinery. This requirement
+ incidental characteristics of computing machinery. This requirement
can be met by object oriented programming and, in fact, object
oriented programs may have better structure than programs obtained by
functional decomposition.
@@ 54032,7 +54122,7 @@ Grove, IL, USA and Oxford, UK, 1992
"We reconsider the wellknown concept of Haskellstyle type classes
within the logical framework of Isabelle. So far, axiomatic type
classes in Isabelle merely account for the logical aspect as
 predicates over types, while the opera tional part is only a
+ predicates over types, while the operational part is only a
convention based on raw overloading. Our more elaborate approach to
constructive type classes provides a seamless integration with
Isabelle locales, which are able to manage both operations and logical
@@ 54415,8 +54505,8 @@ IEEE Comput. Soc. Press, pp. 678687.
link = "\url{http://wwwsop.inria.fr/members/Evelyne.Hubert/publications/sncsp.pdf}",
abstract =
"This is the first in a series of two tutorial articles devoted to
 triangulation decomposition algorithms. The value of these notes
 resides in the uniform presen tation of triangulationdecomposition
+ triangulationdecomposition algorithms. The value of these notes
+ resides in the uniform presentation of triangulationdecomposition
of polynomial and differential radical ideals with detailed proofs of
all the presented results.We emphasize the study of the mathematical
objects manipulated by the algorithms and show their properties in
@@ 54469,7 +54559,7 @@ IEEE Comput. Soc. Press, pp. 678687.
year = "1982",
abstract =
"Are design and use of computer algebra systems disjoint or
 complementary activi ties? Raising and answering this question are
+ complementary activities? Raising and answering this question are
equally controversial, since a clear distinction between languages
features and library facilities is hard to make. Instead of even
attempting to answer this rather academic question it is argued why it
@@ 54986,15 +55076,15 @@ Springer Verlag Heidelberg, 1989, ISBN 0387969802
As a starting point, we have defined
and formalized a syntax, semantics, type system and specification
 language for MiniMaple . For verification, we automatically trans
 late the (types and specification) annotated MiniMaple program into a
+ language for MiniMaple . For verification, we automatically
+ translate the (types and specification) annotated MiniMaple program into a
behaviorally equivalent program in the intermediate language Why3ML of
the verification tool Why3; from the translated program, Why3
generates verification conditions whose correctness can be proved by
various automated and interactive theorem provers (e.g. Z3 and Coq).
Furthermore, we have defined a denotational semantics of MiniMaple and
its specification language and proved the soundness of the translation
 with re spect to the operational semantics of Why3ML. Finally, we
+ with respect to the operational semantics of Why3ML. Finally, we
discuss the application of our verification framework to the Maple
package DifferenceDifferential developed at our institute to compute
bivariate differencedifferential dimension polynomials using relative
@@ 55766,7 +55856,7 @@ AddisonWesley (March 1979) ISBN 0201144611
pages = "152168",
abstract =
"A notion of dependent coercion is introduced and studied in the
 context of depen dent type theories. It extends our earlier work on
+ context of dependent type theories. It extends our earlier work on
coercive subtyping into a uniform framework which increases the
expressive power with new applications. A dependent coercion
introduces a subtyping relation between a type and a family of types
@@ 56491,7 +56581,7 @@ Journal of the ACM, Vol. 25, No. 2, April 1978, pp. 271282
then the system should be able to suggest possible next steps.
The key idea of Lucas Interpretation is to compute the steps of a
 calculation following a pro gram written in a novel CTPbased
+ calculation following a program written in a novel CTPbased
programming language, i.e. computation provides the next steps. User
guidance is generated by combining deduction and computation: the
latter is performed by a specific language interpreter, which works
@@ 56532,8 +56622,8 @@ Journal of the ACM, Vol. 25, No. 2, April 1978, pp. 271282
levels from highschool to university.
Starting from prototypes of TPbased educational mathematics systems,
 conceptual founda tions are considered: TPbased software which
 implements reasoning as an essential part of math ematical thinking
+ conceptual foundations are considered: TPbased software which
+ implements reasoning as an essential part of mathematical thinking
technology. Educational objectives for the development of TPbased
systems are discussed and concluded with some predictions on possible
impact of TPbased educational mathematics assistants.
@@ 56888,7 +56978,7 @@ J. Inst. Maths Applics. 8 1635. (1971)
pages = "771775",
year = "1992",
abstract =
 "We exhibit a methodology for formulating and verifying meta theorems
+ "We exhibit a methodology for formulating and verifying metatheorems
about deductive systems in the Elf language, an implementation of the
LF Logical Framework with an operational semantics in the spirit of
logic programming. It is based on the mechanical verification of
@@ 56896,7 +56986,7 @@ J. Inst. Maths Applics. 8 1635. (1971)
reconstruction and schemachecking. The latter is justified by
induction principles for closed LF objects, which can be constructed
over a given signature. We illustrate our technique through several
 examples, the most extensive of which is an interpre tation of
+ examples, the most extensive of which is an interpretation of
classical logic in minimal logic through a continuationpassingstyle
transformation on proofs.",
paper = "Pfen92b.pdf"
@@ 57053,9 +57143,9 @@ Acta Math. 68 (1937) 145254.
link =
"\url{http://www.risc.jku.at/publications/download/risc_3913/PopJeb.pdf}",
abstract =
 "We present a method for verifying recursive functional pro grams. We
+ "We present a method for verifying recursive functional programs. We
define a Verification Condition Generator (VCG) which covers the most
 frequent types of recursive programs. These programs may op erate on
+ frequent types of recursive programs. These programs may operate on
arbitrary domains. Soundness and Completeness of the VCG are proven on
the meta level, and this provides a warranty that any system based on
our results will be sound.",
@@ 57871,7 +57961,7 @@ Num. Math. 16 205223. (1970)
modules) that have rigorously specified but very abstract
interfaces. We focus on the combination of the functorbased
programming style with software engineering principles in large
 development projects. The gen erated target code is highly efficient
+ development projects. The generated target code is highly efficient
and can be easily embedded into foreign application environments.",
paper = "Schr00.pdf"
}
@@ 58220,7 +58310,7 @@ PrenticeHall 1971
pages = "151185",
abstract =
"This article provides an overview of current work on universitylevel
 computer assisted instruction at Stanford University. Brief
+ computerassisted instruction at Stanford University. Brief
descriptions are given of the main areas of current interest. The
main emphasis is on the courses now being used: Introduction to Logic,
Axiomatic Set Theory, Old Church Slavonic, History of the Russian
@@ 59283,11 +59373,11 @@ Math. Tables Aids Comput. 10 9196. (1956)
proof and publication, to archival and sharing of data and code.
OpenDreamKit will be built out of a sustainable ecosystem of
 communitydeveloped open software, databases, and ser vices,
+ communitydeveloped open software, databases, and services,
including popular tools such as LINBOX, MPIR, SAGE (sagemath.org),
GAP, PARI/GP, LMFDB, and SINGULAR. We will extend the JUPYTER Notebook
environment to provide a flexible user interface. By improving and
 unifying existing build ing blocks, OpenDreamKit will maximise both
+ unifying existing building blocks, OpenDreamKit will maximise both
sustainability and impact, with beneficiaries extending to scientific
computing, physics, chemistry, biology and more, and including
researchers, teachers, and industrial practitioners.
diff git a/changelog b/changelog
index 0414540..7baf53f 100644
 a/changelog
+++ b/changelog
@@ 1,3 +1,5 @@
+20180309 tpd src/axiomwebsite/patches.html 20180309.02.tpd.patch
+20180309 tpd books/bookvolbib add references
20180309 tpd src/axiomwebsite/patches.html 20180309.01.tpd.patch
20180309 tpd books/bookvolbib add references
20180304 tpd src/axiomwebsite/patches.html 20180304.01.tpd.patch
diff git a/patch b/patch
index 7e31a35..780bf5f 100644
 a/patch
+++ b/patch
@@ 2,387 +2,92 @@ books/bookvolbib add references
Goal: Proving Axiom Correct
\index{Harper, Robert}
\begin{chunk}{axiom.bib}
@phdthesis{Harp85,
 author = "Harper, Robert",
 title = {{Aspects of the Implementation of Type Theory}},
 school = "Cornell University",
 year = "1985",
 comment = "TR 85675",
 abstract =
 "This thesis is about building an automated programming logic. For our
 purposes an automated programming logic consists of
 \begin{itemize}
 \item A formal system for reasoning about programs
 \item A proof development environment which includes, at least, an
 editor for the construction of proofs in the logic
 \item Mechanized decision methods to assist in the proof development
 process
 \item A library mechanism for managing collections of theorems
 \end{itemize}",
 paper = "Harp85.pdf"
}

\end{chunk}

\index{Blanqui, Frederic}
\index{jouannaud, JeanPierre}
\index{Okada, Mitsuhiro}
\begin{chunk}{axiom.bib}
@inproceedings{Blan99,
 author = "Blanqui, Frederic and jouannaud, JeanPierre and Okada, Mitsuhiro",
 title = {{The Calculus of Algebraic Constructions}},
 booktitle = "Rewriting Techniques and Applications RTA99",
 year = "1999",
 publisher = "LNCS 1631",
 link = "\url{https://hal.inria.fr/inria00105545v1/document}",
 abstract =
 "This paper is concerned with the foundations of the Calculus of
 Algebraic Constructions (CAC), an extension of the Calculus of
 Constructions by inductive data types. CAC generalizes inductive
 types equipped with higherorder primitive recursion, by providing
 definition s of functions by patternmatching which capture recursor
 definitions for arbitrary nondependent and nonpolymorphic inductive
 types satisfying a strictly positivity condition. CAC also
 generalizes the firstorder framework of abstract data types by
 providing dependent types and higherorder rewrite rules.",
 paper = "Blan99.pdf",
 keywords = "printed"
}

\end{chunk}

\index{Blanqui, Frederic}
\begin{chunk}{axiom.bib}
@article{Blan05,
 author = "Blanqui, Frederic",
 title = {{Inductivev Types in the Calculus of Algebraic Constructions}},
 journal = "Fundamenta Informaticae",
 volume = "65",
 number = "12",
 pages = "6186",
 year = "2005",
 abstract =
 "In a previous work, we proved that an important part of the Calculus
 of Inductive Constructions (CIC), the basis of the Coq proof
 assistant, can be seen as a Calculus of Algebraic Constructions
 (CAC), an extension of the Calculus of Constructions with functions
 and predicates defined by higherorder rewrite rules. In this
 paper, we prove that almost all CIC can be seen as a CAC, and that it
 can be further extended with nonstrictly positive types and
 inductiverecursive types together with nonfree constructors and
 patternmatching on defined symbols.",
 paper = "Blan05.pdf"
}

\end{chunk}

\index{Floyd, Robert W.}
\begin{chunk}{axiom.bib}
@inproceedings{Floy67,
 author = "Floyd, Robert W.",
 title = {{Assigning Meanings to Programs}},
 booktitle = "Proc. Symp. in Applied Mathematics",
 year = "1967",
 pages = "1932",
 publisher = "American Mathematical Society",
 paper = "Floy67.pdf",
 keywords = "printed"
}

\end{chunk}

\index{Back, R.J.R}
\begin{chunk}{axiom.bib}
@article{Back81,
 author = "Back, R.J.R",
 title = {{On Correct Refinement of Programs}},
 journal = "J. Computer and System Sciences",
 volume = "23",
 number = "1",
 pages = "4968",
 year = "1981",
 abstract =
 "The stepwise refinement technique is studied from a mathematical
 point of view. A relation of correct refinement between programs is
 defined, based on the principle that refinement steps should be
 correctness preserving. Refinement between programs will therefore
 depend on the criterion of program correctness used. The application
 of the refinement relation in showing the soundness of different
 techniques for refining programs is discussed. Special attention is
 given to the use of abstraction in program construction. Refinement
 with respect to partial and total correctness will be studied in more
 detail, both for deterministic and nondeterministic programs. The
 relationship between these refinement relations and the approximation
 relation of fixpoint semantics will be studied, as well as the
 connection with the predicate transformers used in program
 verification.",
 paper = "Back81.pdf",
 keywords = "printed"
}

\end{chunk}

\index{Morgan, Carroll}
\begin{chunk}{axiom.bib}
@book{Morg98,
 author = "Morgan, Carroll",
 title = {{Programming from Specifcations, 2nd Ed.}},
 publisher = "Prentice Hall",
 year = "1998",
 link =
"\url{http://www.cse.unsw.edu.au/~carrollm/ProgrammingFromSpecifications.pdf}",
 paper = "Morg98.pdf"
}

\end{chunk}

\index{Filliatre, JeanChristophe}
\begin{chunk}{axiom.bib}
@phdthesis{Fill99,
 author = "Filliatre, JeanChristophe",
 title = {{Preuve de programmes imp\'eratifs en th\'eorie des types}},
 school = {{Universit\'e ParisSud}},
 year = "1999",
 link = "\url{}",
 paper = "Fill99.pdf"
}

\end{chunk}

\index{Caldwell, James L.}
\begin{chunk}{axiom.bib}
@inproceedings{Cald97,
 author = "Caldwell, James L.",
 title = {{Moving proofsasprograms into practice}},
 booktitle = "Automated Software Engineering",
 publisher = "IEEE",
 year = "1997",
 abstract =
 "Proofs in the Nuprl system, an implementation of a constructive
 type theory, yield “correctbyconstruction” pro grams. In this
 paper a new methodology is presented for extracting efficient and
 readable programs from inductive proofs. The resulting extracted
 programs are in a form suitable for use in hierarchical
 verifications in that they are amenable to clean partial evaluation
 via extensions to the Nuprl rewrite system. The method is based on two
 elements: specifications written with careful use of the Nuprl
 settype to restrict the extracts to strictly computational content;
 and on proofs that use induction tactics that generate extracts
 using familiar fixedpoint combinators of the untyped lambda
 calculus. In this paper the methodology is described and its
 application is illustrated by example.",
 paper = "Cald97.pdf",
 keywords = "printed"
}

\end{chunk}

\index{Talpin, JeanPierre}
\index{Jouvelot, Pierre}
\begin{chunk}{axiom.bib}
@inproceedings{Talp92,
 author = "Talpin, JeanPierre and Jouvelot, Pierre",
 title = {{The Type and Effect Discipline}},
 booktitle = "Conf. on Logic in Computer Science",
 publisher = "Computer Science Press",
 year = "1992",
 abstract =
 "The {\sl type and effect discipline} is a new framework for
 reconstructing the principal type and the minimal effect of
 expressions in implicitly typed polymorphic functional languages that
 support imperative constructs. The type and effect discipline
 outperforms other polymorphic type systems. Just as types abstract
 collections of concrete values, {\sl effects} denote imperative
 operations on regions. {\sl Regions} abstract sets of possibly aliased
 memory locations.

 Effects are used to control type generalization in the presence of
 imperative constructs while regions delimit observable
 sideeffects. The observable effects of an expression range over the
 regions that are free in its type environment and its type; effects
 related to local data structures can be discarded during type
 reconstruction. The type of an expression can be generalized with
 respect to the variables that are not free in the type environment or
 in the observable effect.",
 paper = "Talp92.pdf",
 keywords = "printed"
}

\end{chunk}

\index{Davis, Jared Curran}
\begin{chunk}{axiom.bib}
@phdthesis{Davi09,
 author = "Davis, Jared Curran",
 title = {{A SelfVerifying Theorem Prover}},
 school = "University of Texas at Austin",
 year = "2009",
 abstract =
 "Programs have precise semantics, so we can use mathematical proof to
 establish their properties. These proofs are often too large to
 validate with the usual “social process” of mathematics, so instead we
 create and check them with theoremproving software. This software
 must be advanced enough to make the proof process tractable, but this
 very sophistication casts doubt upon the whole enterprise: who
 verifies the verifier?

 We begin with a simple proof checker, Level 1, that only accepts
 proofs composed of the most primitive steps, like Instantiation and
 Cut. This program is so straightforward the ordinary, social process
 can establish its soundness and the consistency of the logical
 theory it implements (so we know theorems are “always true”).

 Next, we develop a series of increasingly capable proof checkers,
 Level 2, Level 3, etc. Each new proof checker accepts new kinds of
 proof steps which were not accepted in the previous levels. By taking
 advantage of these new proof steps, higherlevel proofs can be
 written more concisely than lowerlevel proofs, and can take less time
 to construct and check. Our highestlevel proof checker, Level 11, can
 be thought of as a simplified version of the ACL2 or NQTHM theorem
 provers. One contribution of this work is to show how such systems can
 be verified.

 To establish that the Level 11 proof checker can be trusted, we first
 use it, without trusting it, to prove the fidelity of every Level n to
 Level 1: whenever Level n accepts a proof of some $\phi$, there exists a
 Level 1 proof of $\phi$. We then mechanically translate the Level 11 proof
 for each Level n into a Level n − 1 proof—that is, we create a Level 1
 proof of Level 2’s fidelity, a Level 2 proof of Level 3’s fidelity,
 and so on. This layering shows that each level can be trusted, and
 allows us to manage the sizes of these proofs.

 In this way, our system proves its own fidelity, and trusting Level 11
 only requires us to trust Level 1.",
 paper = "Davi09.pdf"
}

\end{chunk}

\index{Myreen, Magnus O.}
\index{Davis, Jared}
\begin{chunk}{axiom.bib}
@article{Myre14,
 author = "Myreen, Magnus O. and Davis, Jared",
 title = {{The Reflective Milawa Theorem Prover is Sound}},
 journal = "LNAI",
 pages = "421436",
 year = "2014",
+@article{Myre10,
+ author = "Myreen, Magnus O.",
+ title = {{Verified JustInTime Compiler on x86}},
+ journal = "ACM SIGLAN Notices  POPL'10",
+ volume = "45",
+ number = "1",
+ year = "2010",
+ pages = "107118",
abstract =
 "Milawa is a theorem prover styled after ACL2 but with a small kernel
 and a powerful reflection mechanism. We have used the HOL4 theorem
 prover to formalize the logic of Milawa, prove the logic sound, and
 prove that the source code for the Milawa kernel (2,000 lines of Lisp)
 is faithful to the logic. Going further, we have combined these
 results with our previous verification of an x86 machinecode
 implementation of a Lisp runtime. Our toplevel HOL4 theorem states
 that when Milawa is run on top of our verified Lisp, it will only
 print theorem statements that are semantically true. We believe that
 this toplevel theorem is the most comprehensive formal evidence of a
 theorem prover’s soundness to date.".
 paper = "Myre14.pdf",
+ "This paper presents a method for creating formally correct justin
+ time (JIT) compilers. The tractability of our approach is demonstrated
+ through, what we believe is the first, verification of a JIT
+ compiler with respect to a realistic semantics of selfmodifying x86
+ machine code. Our semantics includes a model of the instruction
+ cache. Two versions of the verified JIT compiler are presented: one
+ generates all of the machine code at once, the other one is incremental
+ i.e. produces code ondemand. All proofs have been performed
+ inside the HOL4 theorem prover.",
+ paper = "Myre10.pdf",
keywords = "printed"
}
\end{chunk}
\index{Davis, Jared}
\index{Myreen, Magnus O.}
+\index{Gordon, Michael J.C.}
\begin{chunk}{axiom.bib}
@article{Davi15,
 author = "Davis, Jared and Myreen, Magnus O.",
 title = {{The Reflective Milawa Theorem Prover is Sound}},
 journal = "J. Automated Reasoning",
 volume = "55",
 number = "2",
 pages = "117183",
 year = "2015",
+@article{Myre09,
+ author = "Myreen, Magnus O. and Gordon, Michael J.C.",
+ title = {{Verified LISP Implementations on ARM, x86 and PowerPC}},
+ journal = "LNCS",
+ volume = "5674",
+ pages = "359374",
+ year = "2009",
abstract =
 "This paper presents, we believe, the most comprehensive evidence of a
 theorem prover’s soundness to date. Our subject is the Milawa theorem
 prover. We present evidence of its soundness down to the machine
 code. Milawa is a theorem prover styled after NQTHM and ACL2. It is
 based on an idealised version of ACL2’s computational logic and
 provides the user with highlevel tactics similar to ACL2’s. In
 contrast to NQTHM and ACL2, Milawa has a small kernel that is somewhat
 like an LCFstyle system. We explain how the Milawa theorem prover is
 constructed as a sequence of reflective extensions from its
 kernel. The kernel establishes the soundness of these extensions
 during Milawa’s bootstrapping process. Going deeper, we explain how
 we have shown that the Milawa kernel is sound using the HOL4 theorem
 prover. In HOL4, we have formalized its logic, proved the logic sound,
 and proved that the source code for the Milawa kernel (1,700 lines of
 Lisp) faithfully implements this logic. Going even further, we have
 combined these results with the x86 machinecode level verification of
 the Lisp runtime Jitawa. Our toplevel theorem states that Milawa can
 never claim to prove anything that is false when it is run on this
 Lisp runtime.",
 paper = "Davi15.pdf",
+ "This paper reports on a case study, which we believe is the
+ first to produce a formally verified endtoend implementation of a func
+ tional programming language running on commercial processors. Inter
+ preters for the core of McCarthy’s LISP 1.5 were implemented in ARM,
+ x86 and PowerPC machine code, and proved to correctly parse, evaluate
+ and print LISP sexpressions. The proof of evaluation required working
+ on top of verified implementations of memory allocation and garbage
+ collection. All proofs are mechanised in the HOL4 theorem prover.",
+ paper = "Myre09.pdf",
keywords = "printed"
}
\end{chunk}
\index{Myreen, Magnus O.}
+\index{Slind, Konrad}
+\index{Gordon, Michael J.C.}
\begin{chunk}{axiom.bib}
@article{Myre12,
 author = "Myreen, Magnus O.",
 title = {{Functional Programs: Conversions between Deep and Shallow
 Embeddings}},
+@article{Myre09a,
+ author = "Myreen, Magnus O. and Slind, Konrad and Gordon, Michael J.C.",
+ title = {{Extensible ProofProducing Compilation}},
journal = "LNCS",
 volume = "7406",
 pages = "412417",
 year = "2012",
+ volume = "5501",
+ pages = "216",
+ year = "2009",
abstract =
 "This paper presents a method which simplifies verification of deeply
 embedded functional programs. We present a technique by which
 proofcertified equations describing the effect of functional
 programs (shallow embeddings) can be automatically extracted from their
 operational semantics. Our method can be used in reverse, i.e. from
 shallow to deep embeddings, and thus for implementing certifying code
 synthesis: we have implemented a tool which maps HOL functions to
 equivalent Lisp functions, for which we have a verified Lisp runtime.
 A key benefit, in both directions, is that the verifier does not need
 to understand the operational semantics that gives meanings to the
 deep embeddings.",
 paper = "Myre12.pdf",
+ "This paper presents a compiler which produces machine code from
+ functions defined in the logic of a theorem prover, and at the same
+ time proves that the generated code executes the source functions.
+ Unlike previously published work on proofproducing compilation from a
+ theorem prover, our compiler provides broad support for userdefined
+ extensions, targets multiple carefully modelled commercial machine
+ lan guages, and does not require termination proofs for input
+ functions. As a case study, the compiler is used to construct verified
+ interpreters for a small LISPlike language. The compiler has been
+ implemented in the HOL4 theorem prover.",
+ paper = "Myre09a.pdf",
keywords = "printed"
}
\end{chunk}
\index{Myreen, Magnus O.}
\index{Davis, Jared}
\begin{chunk}{axiom.bib}
@article{Myre11,
 author = "Myreen, Magnus O. and Davis, Jared",
 title = {{A Verified Runtime for a Verified Theorem Prover}},
 journal = "NCS",
 volume = "6898",
 pages = "265280",
 year = "2011",
 abstract =
 "Theorem provers, such as ACL2, HOL, Isabelle and Coq, rely on the
 correctness of runtime systems for programming languages like ML,
 OCaml or Common Lisp. These runtime systems are complex and critical
 to the integrity of the theorem provers.

 In this paper, we present a new Lisp runtime which has been formally
 verified and can run the Milawa theorem prover. Our runtime consists
 of 7,500 lines of machine code and is able to complete a 4 gigabyte
 Milawa proof effort. When our runtime is used to carry out Milawa
 proofs, less unverified code must be trusted than with any other
 theorem prover.

 Our runtime includes a justintime compiler, a copying garbage collector,
 a parser and a printer, all of which are HOL4verified down to
 the concrete x86 code. We make heavy use of our previously developed
 tools for machinecode verification. This work demonstrates that our
 approach to machinecode verification scales to nontrivial
 applications.",
 paper = "Myre11.pdf",
 keywords = "printed"
+@misc{Domi18,
+ author = "Domipheus",
+ title = {{Designing a CPU in VHDL}},
+ link = "\url{http://labs.domipheus.com/blog/tpuseriesquicklinks}",
+ year = "2018",
+ abstract = "A VHDL CPU"
}
\end{chunk}
diff git a/src/axiomwebsite/patches.html b/src/axiomwebsite/patches.html
index 20f6c5a..810c053 100644
 a/src/axiomwebsite/patches.html
+++ b/src/axiomwebsite/patches.html
@@ 5918,6 +5918,8 @@ books/bookvol10.1 add new chapter
proposal
20180309.01.tpd.patch
books/bookvolbib add references
+20180309.02.tpd.patch
+books/bookvolbib add references

1.9.1