aboutsummaryrefslogtreecommitdiffhomepage
diff options
context:
space:
mode:
-rw-r--r--.circleci/config.yml2
-rw-r--r--.gitignore6
-rw-r--r--.gitlab-ci.yml34
-rw-r--r--.travis.yml9
-rw-r--r--CHANGES6
-rw-r--r--CONTRIBUTING.md3
-rw-r--r--Makefile14
-rw-r--r--Makefile.build24
-rw-r--r--Makefile.common4
-rw-r--r--checker/indtypes.ml12
-rw-r--r--checker/inductive.ml10
-rw-r--r--checker/modops.ml22
-rw-r--r--checker/subtyping.ml4
-rw-r--r--configure.ml19
-rw-r--r--default.nix98
-rw-r--r--dev/README50
-rw-r--r--dev/README.md46
-rw-r--r--dev/ci/ci-common.sh2
-rw-r--r--dev/ci/docker/bionic_coq/Dockerfile10
-rw-r--r--dev/ci/user-overlays/07746-cleanup-unused-various.sh6
-rw-r--r--dev/ci/user-overlays/07820-mattam82-hints-constants.sh6
-rw-r--r--dev/ci/user-overlays/07863-rm-sorts-contents.sh6
-rw-r--r--dev/ci/user-overlays/07902-ppedrot-camlp5-parser.sh8
-rw-r--r--dev/ci/user-overlays/07906-univs-of-constr.sh9
-rw-r--r--dev/ci/user-overlays/7080-herbelin-master+swapping-modules-constr-context.sh7
-rw-r--r--dev/ci/user-overlays/README.md2
-rw-r--r--dev/doc/README.md77
-rw-r--r--dev/doc/changes.md103
-rw-r--r--dev/doc/setup.txt269
-rw-r--r--dev/doc/translate.txt495
-rw-r--r--doc/sphinx/language/cic.rst231
-rw-r--r--doc/sphinx/language/gallina-extensions.rst14
-rw-r--r--doc/sphinx/language/gallina-specification-language.rst27
-rw-r--r--doc/sphinx/proof-engine/tactics.rst18
-rw-r--r--doc/sphinx/user-extensions/syntax-extensions.rst206
-rw-r--r--engine/evarutil.ml4
-rw-r--r--engine/evarutil.mli4
-rw-r--r--engine/evd.ml6
-rw-r--r--engine/evd.mli4
-rw-r--r--engine/uState.ml2
-rw-r--r--engine/univGen.ml4
-rw-r--r--engine/univGen.mli2
-rw-r--r--engine/univSubst.ml12
-rw-r--r--engine/univSubst.mli2
-rw-r--r--engine/universes.mli4
-rw-r--r--grammar/coqpp_ast.mli95
-rw-r--r--grammar/coqpp_lex.mll167
-rw-r--r--grammar/coqpp_main.ml349
-rw-r--r--grammar/coqpp_parse.mly256
-rw-r--r--grammar/q_util.mlp2
-rw-r--r--grammar/tacextend.mlp9
-rw-r--r--ide/idetop.ml5
-rw-r--r--kernel/cooking.ml2
-rw-r--r--kernel/cooking.mli3
-rw-r--r--kernel/indtypes.ml15
-rw-r--r--kernel/inductive.ml10
-rw-r--r--kernel/modops.ml4
-rw-r--r--kernel/nativecode.ml14
-rw-r--r--kernel/nativecode.mli2
-rw-r--r--kernel/nativelibrary.ml2
-rw-r--r--kernel/subtyping.ml4
-rw-r--r--kernel/term_typing.ml6
-rw-r--r--lib/cWarnings.ml6
-rw-r--r--lib/envars.ml2
-rw-r--r--lib/util.ml4
-rw-r--r--library/libobject.ml16
-rw-r--r--library/library.ml36
-rw-r--r--parsing/extend.ml2
-rw-r--r--parsing/g_constr.mlg (renamed from parsing/g_constr.ml4)351
-rw-r--r--parsing/g_prim.mlg (renamed from parsing/g_prim.ml4)69
-rw-r--r--parsing/pcoq.ml11
-rw-r--r--parsing/pcoq.mli3
-rw-r--r--plugins/btauto/g_btauto.mlg (renamed from plugins/btauto/g_btauto.ml4)6
-rw-r--r--plugins/cc/g_congruence.mlg (renamed from plugins/cc/g_congruence.ml4)14
-rw-r--r--plugins/fourier/g_fourier.mlg (renamed from plugins/fourier/g_fourier.ml4)6
-rw-r--r--plugins/funind/functional_principles_types.ml11
-rw-r--r--plugins/funind/glob_term_to_relation.ml2
-rw-r--r--plugins/ltac/coretactics.mlg (renamed from plugins/ltac/coretactics.ml4)184
-rw-r--r--plugins/ltac/extratactics.ml42
-rw-r--r--plugins/ltac/g_eqdecide.mlg (renamed from plugins/ltac/g_eqdecide.ml4)8
-rw-r--r--plugins/ltac/g_tactic.mlg (renamed from plugins/ltac/g_tactic.ml4)452
-rw-r--r--plugins/ltac/pptactic.ml51
-rw-r--r--plugins/ltac/rewrite.ml2
-rw-r--r--plugins/ltac/taccoerce.ml23
-rw-r--r--plugins/ltac/taccoerce.mli14
-rw-r--r--plugins/ltac/tacentries.ml34
-rw-r--r--plugins/ltac/tacentries.mli4
-rw-r--r--plugins/ltac/tacinterp.ml14
-rw-r--r--plugins/micromega/g_micromega.mlg (renamed from plugins/micromega/g_micromega.ml4)38
-rw-r--r--plugins/nsatz/g_nsatz.mlg (renamed from plugins/nsatz/g_nsatz.ml4)6
-rw-r--r--plugins/omega/coq_omega.ml10
-rw-r--r--plugins/omega/g_omega.mlg (renamed from plugins/omega/g_omega.ml4)9
-rw-r--r--plugins/quote/g_quote.mlg (renamed from plugins/quote/g_quote.ml4)16
-rw-r--r--plugins/romega/g_romega.mlg (renamed from plugins/romega/g_romega.ml4)12
-rw-r--r--plugins/rtauto/g_rtauto.mlg (renamed from plugins/rtauto/g_rtauto.ml4)5
-rw-r--r--plugins/syntax/n_syntax.ml81
-rw-r--r--plugins/syntax/n_syntax_plugin.mlpack1
-rw-r--r--plugins/syntax/positive_syntax.ml101
-rw-r--r--plugins/syntax/positive_syntax_plugin.mlpack1
-rw-r--r--plugins/syntax/z_syntax.ml128
-rw-r--r--pretyping/glob_ops.ml4
-rw-r--r--pretyping/indrec.ml9
-rw-r--r--pretyping/nativenorm.ml39
-rw-r--r--pretyping/typing.ml2
-rw-r--r--pretyping/vnorm.ml37
-rw-r--r--printing/pputils.ml2
-rw-r--r--proofs/pfedit.mli3
-rw-r--r--shell.nix4
-rw-r--r--tactics/elimschemes.ml2
-rw-r--r--tactics/eqschemes.ml6
-rw-r--r--tactics/hints.ml54
-rw-r--r--tactics/hints.mli9
-rw-r--r--tactics/inv.ml2
-rw-r--r--tactics/leminv.ml2
-rw-r--r--test-suite/bugs/closed/7712.v4
-rw-r--r--test-suite/bugs/closed/8004.v47
-rw-r--r--test-suite/coq-makefile/timing/precomputed-time-tests/001-correct-diff-sorting-order/time-of-build-both.log.expected2
-rw-r--r--test-suite/coq-makefile/timing/precomputed-time-tests/002-single-file-sorting/time-of-build-pretty.log.expected2
-rw-r--r--test-suite/output/Notations3.out5
-rw-r--r--test-suite/output/Notations3.v14
-rw-r--r--test-suite/success/Hints.v30
-rw-r--r--test-suite/success/uniform_inductive_parameters.v13
-rw-r--r--theories/Init/Logic.v7
-rw-r--r--theories/Numbers/BinNums.v2
-rw-r--r--tools/CoqMakefile.in16
-rw-r--r--tools/TimeFileMaker.py23
-rw-r--r--tools/coq_makefile.ml4
-rw-r--r--tools/coqdoc/index.ml2
-rw-r--r--tools/coqdoc/index.mli2
-rw-r--r--tools/coqdoc/output.ml4
-rwxr-xr-xtools/make-both-single-timing-files.py2
-rwxr-xr-xtools/make-both-time-files.py2
-rwxr-xr-xtools/make-one-time-file.py2
-rw-r--r--toplevel/g_toplevel.mlg (renamed from toplevel/g_toplevel.ml4)19
-rw-r--r--vernac/classes.ml2
-rw-r--r--vernac/comInductive.ml49
-rw-r--r--vernac/comInductive.mli7
-rw-r--r--vernac/egramcoq.ml7
-rw-r--r--vernac/g_proofs.ml4131
-rw-r--r--vernac/g_proofs.mlg139
-rw-r--r--vernac/g_vernac.ml41156
-rw-r--r--vernac/g_vernac.mlg1173
-rw-r--r--vernac/metasyntax.ml30
-rw-r--r--vernac/misctypes.ml8
-rw-r--r--vernac/ppvernac.ml5
-rw-r--r--vernac/vernacentries.ml20
-rw-r--r--vernac/vernacexpr.ml7
147 files changed, 4233 insertions, 3461 deletions
diff --git a/.circleci/config.yml b/.circleci/config.yml
index 03aeed2eb..950674b34 100644
--- a/.circleci/config.yml
+++ b/.circleci/config.yml
@@ -11,7 +11,7 @@ defaults:
- image: $CI_REGISTRY_IMAGE:$CACHEKEY
environment: &envvars
- CACHEKEY: "bionic_coq-V2018-06-13-V1"
+ CACHEKEY: "bionic_coq-V2018-07-02-V4"
CI_REGISTRY_IMAGE: registry.gitlab.com/coq/coq
version: 2
diff --git a/.gitignore b/.gitignore
index 6adbc9fb2..1eee3f94f 100644
--- a/.gitignore
+++ b/.gitignore
@@ -115,6 +115,7 @@ dev/ocamldoc/*.css
# .mll files
+grammar/coqpp_lex.ml
dev/ocamlweb-doc/lex.ml
ide/coq_lex.ml
ide/config_lexer.ml
@@ -126,6 +127,11 @@ tools/ocamllibdep.ml
tools/coqdoc/cpretty.ml
ide/protocol/xml_lexer.ml
+# .mly files
+
+grammar/coqpp_parse.ml
+grammar/coqpp_parse.mli
+
# .ml4 / .mlp files
g_*.ml
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 4e159ebdc..11614bc38 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -9,7 +9,7 @@ stages:
variables:
# Format: $IMAGE-V$DATE [Cache is not used as of today but kept here
# for reference]
- CACHEKEY: "bionic_coq-V2018-06-13-V1"
+ CACHEKEY: "bionic_coq-V2018-07-02-V4"
IMAGE: "$CI_REGISTRY_IMAGE:$CACHEKEY"
# By default, jobs run in the base switch; override to select another switch
OPAM_SWITCH: "base"
@@ -45,6 +45,9 @@ before_script:
- opam list
- opam config list
+after_script:
+ - echo "The build completed normally (not a runner failure)."
+
################ GITLAB CACHING ######################
# - use artifacts between jobs #
######################################################
@@ -233,6 +236,35 @@ windows32:
variables:
ARCH: "32"
+pkg:nix:
+ image: nixorg/nix:latest # Minimal NixOS image which doesn't even contain git
+ stage: test
+ variables:
+ # By default we use coq.cachix.org as an extra substituter but this can be overridden
+ EXTRA_SUBSTITUTERS: https://coq.cachix.org
+ EXTRA_PUBLIC_KEYS: coq.cachix.org-1:Jgt0DwGAUo+wpxCM52k2V+E0hLoOzFPzvg94F65agtI=
+ # The following variables should not be overridden
+ GIT_STRATEGY: none
+ CACHIX_PUBLIC_KEY: cachix.cachix.org-1:eWNHQldwUO7G2VkjpnjDbWwy4KQ/HNxht7H4SSoMckM=
+ NIXOS_PUBLIC_KEY: cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY=
+
+ dependencies: [] # We don't need to download build artifacts
+ before_script: [] # We don't want to use the shared 'before_script'
+ script:
+ # Use current worktree as tmpdir to allow exporting artifacts in case of failure
+ - export TMPDIR=$PWD
+ # Install Cachix as documented at https://github.com/cachix/cachix
+ - nix-env -if https://github.com/cachix/cachix/tarball/master --substituters https://cachix.cachix.org --trusted-public-keys "$CACHIX_PUBLIC_KEY"
+ # We build an expression rather than a direct URL to not be dependent on
+ # the URL location; we are forced to put the public key of cache.nixos.org
+ # because there is no --extra-trusted-public-key option.
+ - nix-build -E "import (fetchTarball $CI_PROJECT_URL/-/archive/$CI_COMMIT_SHA.tar.gz) {}" -K --extra-substituters "$EXTRA_SUBSTITUTERS" --trusted-public-keys "$NIXOS_PUBLIC_KEY $EXTRA_PUBLIC_KEYS" | if [ ! -z "$CACHIX_SIGNING_KEY" ]; then cachix push coq; fi
+ artifacts:
+ name: "$CI_JOB_NAME.logs"
+ when: on_failure
+ paths:
+ - nix-build-coq.drv-0/*/test-suite/logs
+
warnings:base:
<<: *warnings-template
diff --git a/.travis.yml b/.travis.yml
index 627334690..42c41249d 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -67,9 +67,6 @@ matrix:
- TEST_TARGET="validate" COMPILER="${COMPILER_BE}+flambda" CAMLP5_VER="${CAMLP5_VER_BE}" NATIVE_COMP="no" EXTRA_CONF="-flambda-opts -O3" FINDLIB_VER="${FINDLIB_VER_BE}"
- if: NOT (type = pull_request)
env:
- - TEST_TARGET="ci-bignums"
- - if: NOT (type = pull_request)
- env:
- TEST_TARGET="ci-color"
- if: NOT (type = pull_request)
env:
@@ -85,9 +82,6 @@ matrix:
- TEST_TARGET="ci-equations"
- if: NOT (type = pull_request)
env:
- - TEST_TARGET="ci-fcsl-pcm"
- - if: NOT (type = pull_request)
- env:
- TEST_TARGET="ci-fiat-parsers"
- if: NOT (type = pull_request)
env:
@@ -100,9 +94,6 @@ matrix:
- TEST_TARGET="ci-ltac2"
- if: NOT (type = pull_request)
env:
- - TEST_TARGET="ci-math-classes"
- - if: NOT (type = pull_request)
- env:
- TEST_TARGET="ci-mtac2"
- if: NOT (type = pull_request)
env:
diff --git a/CHANGES b/CHANGES
index 75f4df06a..3ec53fad2 100644
--- a/CHANGES
+++ b/CHANGES
@@ -52,6 +52,12 @@ Vernacular Commands
- Nested proofs may be enabled through the option `Nested Proofs Allowed`.
By default, they are disabled and produce an error. The deprecation
warning which used to occur when using nested proofs has been removed.
+- Added option Uniform Inductive Parameters which abstracts over parameters
+ before typechecking constructors, allowing to write for example
+ `Inductive list (A : Type) := nil : list | cons : A -> list -> list.`
+- New Set Hint Variables/Constants Opaque/Transparent commands for setting
+ globally the opacity flag of variables and constants in hint databases,
+ overwritting the opacity set of the hint database.
Coq binaries and process model
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 2dffd2019..9bd3d0b7c 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -20,6 +20,9 @@ If you want to minimize your bug (or help minimize someone else's) for more extr
## Pull requests
+**Beginner's guide to hacking Coq: [`dev/doc/README.md`](dev/doc/README.md)** \
+**Development information and tools: [`dev/README.md`](dev/README.md)**
+
If you want to contribute a bug fix or feature yourself, pull requests on the [GitHub repository](https://github.com/coq/coq) are the way to contribute directly to the Coq implementation. We recommend you create a fork of the repository on GitHub and push your changes to a new "topic branch" in that fork. From there you can follow the [GitHub pull request documentation](https://help.github.com/articles/about-pull-requests/) to get your changes reviewed and pulled into the Coq source repository.
Documentation for getting started with the Coq sources is located in various
diff --git a/Makefile b/Makefile
index 0bfac2c38..df5caf0b7 100644
--- a/Makefile
+++ b/Makefile
@@ -74,9 +74,11 @@ endef
## Files in the source tree
LEXFILES := $(call find, '*.mll')
+YACCFILES := $(call find, '*.mly')
export MLLIBFILES := $(call find, '*.mllib')
export MLPACKFILES := $(call find, '*.mlpack')
export ML4FILES := $(call find, '*.ml4')
+export MLGFILES := $(call find, '*.mlg')
export CFILES := $(call findindir, 'kernel/byterun', '*.c')
export MERLINFILES := $(call find, '.merlin')
@@ -90,7 +92,8 @@ EXISTINGMLI := $(call find, '*.mli')
## Files that will be generated
GENML4FILES:= $(ML4FILES:.ml4=.ml)
-export GENMLFILES:=$(LEXFILES:.mll=.ml) kernel/copcodes.ml
+GENMLGFILES:= $(MLGFILES:.mlg=.ml)
+export GENMLFILES:=$(LEXFILES:.mll=.ml) $(YACCFILES:.mly=.ml) $(GENMLGFILES) kernel/copcodes.ml
export GENHFILES:=kernel/byterun/coq_jumptbl.h
export GENFILES:=$(GENMLFILES) $(GENMLIFILES) $(GENHFILES)
@@ -100,7 +103,7 @@ export GENFILES:=$(GENMLFILES) $(GENMLIFILES) $(GENHFILES)
## More complex file lists
-export MLSTATICFILES := $(filter-out $(GENMLFILES) $(GENML4FILES), $(EXISTINGML))
+export MLSTATICFILES := $(filter-out $(GENMLFILES) $(GENML4FILES) $(GENMLGFILES), $(EXISTINGML))
export MLIFILES := $(sort $(GENMLIFILES) $(EXISTINGMLI))
include Makefile.common
@@ -153,7 +156,10 @@ endif
MAKE_OPTS := --warn-undefined-variable --no-builtin-rules
-submake: alienclean
+bin:
+ mkdir bin
+
+submake: alienclean | bin
$(MAKE) $(MAKE_OPTS) -f Makefile.build $(MAKECMDGOALS)
noconfig:
@@ -223,7 +229,7 @@ clean-ide:
rm -rf $(COQIDEAPP)
ml4clean:
- rm -f $(GENML4FILES)
+ rm -f $(GENML4FILES) $(GENMLGFILES)
depclean:
find . $(FIND_SKIP_DIRS) '(' -name '*.d' ')' -print | xargs rm -f
diff --git a/Makefile.build b/Makefile.build
index 0bc48dcf7..5fd3bd873 100644
--- a/Makefile.build
+++ b/Makefile.build
@@ -350,6 +350,7 @@ kernel/copcodes.ml: kernel/byterun/coq_instruct.h
GRAMBASEDEPS := grammar/q_util.cmi
GRAMCMO := grammar/q_util.cmo \
grammar/argextend.cmo grammar/tacextend.cmo grammar/vernacextend.cmo
+COQPPCMO := grammar/coqpp_parse.cmo grammar/coqpp_lex.cmo
grammar/argextend.cmo : $(GRAMBASEDEPS)
grammar/q_util.cmo : $(GRAMBASEDEPS)
@@ -357,6 +358,10 @@ grammar/tacextend.cmo : $(GRAMBASEDEPS) grammar/argextend.cmo
grammar/vernacextend.cmo : $(GRAMBASEDEPS) grammar/tacextend.cmo \
grammar/argextend.cmo
+grammar/coqpp_parse.cmi: grammar/coqpp_ast.cmi
+grammar/coqpp_parse.cmo: grammar/coqpp_ast.cmi grammar/coqpp_parse.cmi
+grammar/coqpp_lex.cmo: grammar/coqpp_ast.cmi grammar/coqpp_parse.cmo
+
## Ocaml compiler with the right options and -I for grammar
GRAMC := $(OCAMLFIND) ocamlc $(CAMLFLAGS) $(CAMLDEBUG) $(USERFLAGS) \
@@ -367,11 +372,15 @@ GRAMC := $(OCAMLFIND) ocamlc $(CAMLFLAGS) $(CAMLDEBUG) $(USERFLAGS) \
grammar/grammar.cma : $(GRAMCMO)
$(SHOW)'Testing $@'
@touch grammar/test.mlp
- $(HIDE)$(GRAMC) -pp '$(CAMLP5O) -I $(MYCAMLP5LIB) $^ -impl' -impl grammar/test.mlp -o grammar/test
+ $(HIDE)$(GRAMC) -pp '$(CAMLP5O) $^ -impl' -impl grammar/test.mlp -o grammar/test
@rm -f grammar/test.* grammar/test
$(SHOW)'OCAMLC -a $@'
$(HIDE)$(GRAMC) $^ -linkall -a -o $@
+grammar/coqpp: $(COQPPCMO) grammar/coqpp_main.ml
+ $(SHOW)'OCAMLC -a $@'
+ $(HIDE)$(GRAMC) $^ -linkall -o $@
+
## Support of Camlp5 and Camlp5
COMPATCMO:=
@@ -384,6 +393,10 @@ grammar/%.cmo: grammar/%.mlp | $(COMPATCMO)
$(SHOW)'OCAMLC -c -pp $<'
$(HIDE)$(GRAMC) -c -pp '$(GRAMPP)' -impl $<
+grammar/%.cmo: grammar/%.ml | $(COMPATCMO)
+ $(SHOW)'OCAMLC -c -pp $<'
+ $(HIDE)$(GRAMC) -c $<
+
grammar/%.cmi: grammar/%.mli
$(SHOW)'OCAMLC -c $<'
$(HIDE)$(GRAMC) -c $<
@@ -755,11 +768,18 @@ plugins/%.cmx: plugins/%.ml
$(SHOW)'OCAMLLEX $<'
$(HIDE)$(OCAMLLEX) -o $@ "$*.mll"
-%.ml: %.ml4 $(CAMLP5DEPS)
+%.ml %.mli: %.mly
+ $(SHOW)'OCAMLYACC $<'
+ $(HIDE)$(OCAMLYACC) --strict "$*.mly"
+
+%.ml: %.ml4 $(CAMLP5DEPS) grammar/coqpp
$(SHOW)'CAMLP5O $<'
$(HIDE)$(CAMLP5O) -I $(MYCAMLP5LIB) $(PR_O) \
$(CAMLP5DEPS) $(CAMLP5USE) $(CAMLP5COMPAT) -impl $< -o $@
+%.ml: %.mlg grammar/coqpp
+ $(SHOW)'COQPP $<'
+ $(HIDE)grammar/coqpp $<
###########################################################################
# Dependencies of ML code
diff --git a/Makefile.common b/Makefile.common
index 1506bfcad..bb6302a98 100644
--- a/Makefile.common
+++ b/Makefile.common
@@ -143,8 +143,8 @@ BTAUTOCMO:=plugins/btauto/btauto_plugin.cmo
RTAUTOCMO:=plugins/rtauto/rtauto_plugin.cmo
NATSYNTAXCMO:=plugins/syntax/nat_syntax_plugin.cmo
OTHERSYNTAXCMO:=$(addprefix plugins/syntax/, \
- z_syntax_plugin.cmo \
- r_syntax_plugin.cmo \
+ positive_syntax_plugin.cmo n_syntax_plugin.cmo \
+ z_syntax_plugin.cmo r_syntax_plugin.cmo \
int31_syntax_plugin.cmo \
ascii_syntax_plugin.cmo \
string_syntax_plugin.cmo )
diff --git a/checker/indtypes.ml b/checker/indtypes.ml
index bcb71fe55..8f11e01c3 100644
--- a/checker/indtypes.ml
+++ b/checker/indtypes.ml
@@ -221,7 +221,7 @@ let allowed_sorts issmall isunit s =
-let compute_elim_sorts env_ar params mib arity lc =
+let compute_elim_sorts env_ar params arity lc =
let inst = extended_rel_list 0 params in
let env_params = push_rel_context params env_ar in
let lc = Array.map
@@ -239,7 +239,7 @@ let compute_elim_sorts env_ar params mib arity lc =
allowed_sorts small unit s
-let typecheck_one_inductive env params mib mip =
+let typecheck_one_inductive env params mip =
(* mind_typename and mind_consnames not checked *)
(* mind_reloc_tbl, mind_nb_constant, mind_nb_args not checked (VM) *)
(* mind_arity_ctxt, mind_arity, mind_nrealargs DONE (typecheck_arity) *)
@@ -256,7 +256,7 @@ let typecheck_one_inductive env params mib mip =
Array.iter2 check_cons_args mip.mind_nf_lc mip.mind_consnrealdecls;
(* mind_kelim: checked by positivity criterion ? *)
let sorts =
- compute_elim_sorts env params mib mip.mind_arity mip.mind_nf_lc in
+ compute_elim_sorts env params mip.mind_arity mip.mind_nf_lc in
let reject_sort s = not (List.mem_f family_equal s sorts) in
if List.exists reject_sort mip.mind_kelim then
failwith "elimination not allowed";
@@ -355,7 +355,7 @@ let lambda_implicit_lift n a =
(* This removes global parameters of the inductive types in lc (for
nested inductive types only ) *)
-let abstract_mind_lc env ntyps npars lc =
+let abstract_mind_lc ntyps npars lc =
if npars = 0 then
lc
else
@@ -448,7 +448,7 @@ let check_positivity_one (env, _,ntypes,_ as ienv) hyps nrecp (_,i as ind) indlc
let auxntyp = mib.mind_ntypes in
if auxntyp <> 1 then raise (IllFormedInd (LocalNonPos n));
(* The nested inductive type with parameters removed *)
- let auxlcvect = abstract_mind_lc env auxntyp auxnpar mip.mind_nf_lc in
+ let auxlcvect = abstract_mind_lc auxntyp auxnpar mip.mind_nf_lc in
(* Extends the environment with a variable corresponding to
the inductive def *)
let (env',_,_,_ as ienv') = ienv_push_inductive ienv ((mi,u),lpar) in
@@ -625,7 +625,7 @@ let check_inductive env kn mib =
(* - check arities *)
let env_ar = typecheck_arity env0 params mib.mind_packets in
(* - check constructor types *)
- Array.iter (typecheck_one_inductive env_ar params mib) mib.mind_packets;
+ Array.iter (typecheck_one_inductive env_ar params) mib.mind_packets;
(* check the inferred subtyping relation *)
let () =
match mib.mind_universes with
diff --git a/checker/inductive.ml b/checker/inductive.ml
index b1edec556..d15380643 100644
--- a/checker/inductive.ml
+++ b/checker/inductive.ml
@@ -797,7 +797,7 @@ let rec subterm_specif renv stack t =
| Lambda (x,a,b) ->
assert (l=[]);
- let spec,stack' = extract_stack renv a stack in
+ let spec,stack' = extract_stack stack in
subterm_specif (push_var renv (x,a,spec)) stack' b
(* Metas and evars are considered OK *)
@@ -813,7 +813,7 @@ and stack_element_specif = function
|SClosure (h_renv,h) -> lazy_subterm_specif h_renv [] h
|SArg x -> x
-and extract_stack renv a = function
+and extract_stack = function
| [] -> Lazy.from_val Not_subterm , []
| h::t -> stack_element_specif h, t
@@ -845,7 +845,7 @@ let error_illegal_rec_call renv fx (arg_renv,arg) =
let error_partial_apply renv fx =
raise (FixGuardError (renv.env,NotEnoughArgumentsForFixCall fx))
-let filter_stack_domain env ci p stack =
+let filter_stack_domain env p stack =
let absctx, ar = dest_lam_assum env p in
(* Optimization: if the predicate is not dependent, no restriction is needed
and we avoid building the recargs tree. *)
@@ -925,7 +925,7 @@ let check_one_fix renv recpos trees def =
let case_spec = branches_specif renv
(lazy_subterm_specif renv [] c_0) ci in
let stack' = push_stack_closures renv l stack in
- let stack' = filter_stack_domain renv.env ci p stack' in
+ let stack' = filter_stack_domain renv.env p stack' in
Array.iteri (fun k br' ->
let stack_br = push_stack_args case_spec.(k) stack' in
check_rec_call renv stack_br br') lrest
@@ -968,7 +968,7 @@ let check_one_fix renv recpos trees def =
| Lambda (x,a,b) ->
assert (l = []);
check_rec_call renv [] a ;
- let spec, stack' = extract_stack renv a stack in
+ let spec, stack' = extract_stack stack in
check_rec_call (push_var renv (x,a,spec)) stack' b
| Prod (x,a,b) ->
diff --git a/checker/modops.ml b/checker/modops.ml
index c7ad0977a..b92d7bbf1 100644
--- a/checker/modops.ml
+++ b/checker/modops.ml
@@ -80,7 +80,7 @@ and add_module mb env =
let add_module_type mp mtb env = add_module (module_body_of_type mp mtb) env
-let strengthen_const mp_from l cb resolver =
+let strengthen_const mp_from l cb =
match cb.const_body with
| Def _ -> cb
| _ ->
@@ -104,34 +104,34 @@ and strengthen_body : 'a. (_ -> 'a) -> _ -> _ -> 'a generic_module_body -> 'a ge
match mb.mod_type with
| MoreFunctor _ -> mb
| NoFunctor sign ->
- let resolve_out,sign_out = strengthen_sig mp_from sign mp_to mb.mod_delta
+ let resolve_out,sign_out = strengthen_sig mp_from sign mp_to
in
{ mb with
mod_expr = mk_expr mp_to;
mod_type = NoFunctor sign_out;
mod_delta = resolve_out }
-and strengthen_sig mp_from sign mp_to resolver =
+and strengthen_sig mp_from sign mp_to =
match sign with
| [] -> empty_delta_resolver,[]
| (l,SFBconst cb) :: rest ->
- let item' = l,SFBconst (strengthen_const mp_from l cb resolver) in
- let resolve_out,rest' = strengthen_sig mp_from rest mp_to resolver in
+ let item' = l,SFBconst (strengthen_const mp_from l cb) in
+ let resolve_out,rest' = strengthen_sig mp_from rest mp_to in
resolve_out,item'::rest'
| (_,SFBmind _ as item):: rest ->
- let resolve_out,rest' = strengthen_sig mp_from rest mp_to resolver in
+ let resolve_out,rest' = strengthen_sig mp_from rest mp_to in
resolve_out,item::rest'
| (l,SFBmodule mb) :: rest ->
let mp_from' = MPdot (mp_from,l) in
let mp_to' = MPdot(mp_to,l) in
let mb_out = strengthen_mod mp_from' mp_to' mb in
let item' = l,SFBmodule (mb_out) in
- let resolve_out,rest' = strengthen_sig mp_from rest mp_to resolver in
+ let resolve_out,rest' = strengthen_sig mp_from rest mp_to in
resolve_out (*add_delta_resolver resolve_out mb.mod_delta*),
- item':: rest'
- | (l,SFBmodtype mty as item) :: rest ->
- let resolve_out,rest' = strengthen_sig mp_from rest mp_to resolver in
- resolve_out,item::rest'
+ item':: rest'
+ | (_,SFBmodtype _ as item) :: rest ->
+ let resolve_out,rest' = strengthen_sig mp_from rest mp_to in
+ resolve_out,item::rest'
let strengthen mtb mp =
strengthen_body ignore mtb.mod_mp mp mtb
diff --git a/checker/subtyping.ml b/checker/subtyping.ml
index 6d0d6f6c6..3f7f84470 100644
--- a/checker/subtyping.ml
+++ b/checker/subtyping.ml
@@ -217,7 +217,7 @@ let check_inductive env mp1 l info1 mib2 spec2 subst1 subst2=
let _ = Array.map2_i check_cons_types mib1.mind_packets mib2.mind_packets
in ()
-let check_constant env mp1 l info1 cb2 spec2 subst1 subst2 =
+let check_constant env l info1 cb2 spec2 subst1 subst2 =
let error () = error_not_match l spec2 in
let check_conv f = check_conv_error error f in
let check_type env t1 t2 = check_conv conv_leq env t1 t2 in
@@ -281,7 +281,7 @@ and check_signatures env mp1 sig1 sig2 subst1 subst2 =
let check_one_body (l,spec2) =
match spec2 with
| SFBconst cb2 ->
- check_constant env mp1 l (get_obj mp1 map1 l)
+ check_constant env l (get_obj mp1 map1 l)
cb2 spec2 subst1 subst2
| SFBmind mib2 ->
check_inductive env mp1 l (get_obj mp1 map1 l)
diff --git a/configure.ml b/configure.ml
index b5d5a2419..b264a2652 100644
--- a/configure.ml
+++ b/configure.ml
@@ -235,12 +235,6 @@ let get_date () =
let short_date, full_date = get_date ()
-
-(** Create the bin/ directory if non-existent *)
-
-let _ = if not (dir_exists "bin") then Unix.mkdir "bin" 0o755
-
-
(** * Command-line parsing *)
type ide = Opt | Byte | No
@@ -461,14 +455,19 @@ let _ = parse_args ()
type camlexec =
{ mutable find : string;
mutable top : string;
- mutable lex : string; }
+ mutable lex : string;
+ mutable yacc : string;
+ }
let camlexec =
{ find = "ocamlfind";
top = "ocaml";
- lex = "ocamllex"; }
+ lex = "ocamllex";
+ yacc = "ocamlyacc";
+ }
let reset_caml_lex c o = c.lex <- o
+let reset_caml_yacc c o = c.yacc <- o
let reset_caml_top c o = c.top <- o
let reset_caml_find c o = c.find <- o
@@ -580,6 +579,9 @@ let camlbin, caml_version, camllib, findlib_version =
if is_executable (camlbin / "ocamllex")
then reset_caml_lex camlexec (camlbin / "ocamllex") in
let () =
+ if is_executable (camlbin / "ocamlyacc")
+ then reset_caml_yacc camlexec (camlbin / "ocamlyacc") in
+ let () =
if is_executable (camlbin / "ocaml")
then reset_caml_top camlexec (camlbin / "ocaml") in
camlbin, caml_version, camllib, findlib_version
@@ -1280,6 +1282,7 @@ let write_makefile f =
pr "OCAML=%S\n" camlexec.top;
pr "OCAMLFIND=%S\n" camlexec.find;
pr "OCAMLLEX=%S\n" camlexec.lex;
+ pr "OCAMLYACC=%S\n" camlexec.yacc;
pr "# The best compiler: native (=opt) or bytecode (=byte)\n";
pr "BEST=%s\n\n" best_compiler;
pr "# Ocaml version number\n";
diff --git a/default.nix b/default.nix
index 91d963604..d9317bcca 100644
--- a/default.nix
+++ b/default.nix
@@ -9,23 +9,30 @@
# nix-shell supports the --arg option (see Nix doc) that allows you for
# instance to do this:
-# $ nix-shell --arg ocamlPackages "(import <nixpkgs> {}).ocamlPackages_latest" --arg buildIde false
+# $ nix-shell --arg ocamlPackages "(import <nixpkgs> {}).ocaml-ng.ocamlPackages_4_05" --arg buildIde false
# You can also compile Coq and "install" it by running:
# $ make clean # (only needed if you have left-over compilation files)
# $ nix-build
# at the root of the Coq repository.
# nix-build also supports the --arg option, so you will be able to do:
-# $ nix-build --arg doCheck false
+# $ nix-build --arg doInstallCheck false
# if you want to speed up things by not running the test-suite.
# Once the build is finished, you will find, in the current directory,
# a symlink to where Coq was installed.
-{ pkgs ? (import <nixpkgs> {})
+{ pkgs ?
+ (import (fetchTarball {
+ url = "https://github.com/NixOS/nixpkgs/archive/060a98e9f4ad879492e48d63e887b0b6db26299e.tar.gz";
+ sha256 = "1lzvp3md0hf6kp2bvc6dbzh40navlyd51qlns9wbkz6lqk3lgf6j";
+ }) {})
, ocamlPackages ? pkgs.ocaml-ng.ocamlPackages_4_06
, buildIde ? true
, buildDoc ? true
-, doCheck ? true
+, doInstallCheck ? true
+, shell ? false
+ # We don't use lib.inNixShell because that would also apply
+ # when in a nix-shell of some package depending on this one.
}:
with pkgs;
@@ -36,63 +43,64 @@ stdenv.mkDerivation rec {
name = "coq";
buildInputs = [
-
- # Coq dependencies
hostname
- ] ++ (with ocamlPackages; [
- ocaml
- findlib
- camlp5_strict
- num
-
- ]) ++ (if buildIde then [
-
- # CoqIDE dependencies
- ocamlPackages.lablgtk
-
- ] else []) ++ (if buildDoc then [
-
+ python2 time # coq-makefile timing tools
+ ]
+ ++ (with ocamlPackages; [ ocaml findlib camlp5_strict num ])
+ ++ optional buildIde ocamlPackages.lablgtk
+ ++ optionals buildDoc [
# Sphinx doc dependencies
pkgconfig (python3.withPackages
(ps: [ ps.sphinx ps.sphinx_rtd_theme ps.pexpect ps.beautifulsoup4
ps.antlr4-python3-runtime ps.sphinxcontrib-bibtex ]))
- antlr4
-
- ] else []) ++ (if doCheck then
-
+ antlr4
+ ]
+ ++ optionals doInstallCheck (
# Test-suite dependencies
# ncurses is required to build an OCaml REPL
optional (!versionAtLeast ocaml.version "4.07") ncurses
- ++ [
- python
- rsync
- which
- ocamlPackages.ounit
-
- ] else []) ++ (if lib.inNixShell then [
- ocamlPackages.merlin
- ocamlPackages.ocpIndent
-
- # Dependencies of the merging script
- jq
- curl
- git
- gnupg
-
- ] else []);
+ ++ [ ocamlPackages.ounit rsync which ]
+ )
+ ++ optionals shell (
+ [ jq curl git gnupg ] # Dependencies of the merging script
+ ++ (with ocamlPackages; [ merlin ocp-indent ocp-index ]) # Dev tools
+ );
src =
- if lib.inNixShell then null
+ if shell then null
else
with builtins; filterSource
- (path: _: !elem (baseNameOf path) [".git" "result" "bin"]) ./.;
+ (path: _:
+ !elem (baseNameOf path) [".git" "result" "bin" "_build_ci"]) ./.;
prefixKey = "-prefix ";
- buildFlags = [ "world" ] ++ optional buildDoc "doc-html";
+ buildFlags = [ "world" "byte" ] ++ optional buildDoc "doc-html";
+
+ installTargets =
+ [ "install" "install-byte" ] ++ optional buildDoc "install-doc-html";
+
+ inherit doInstallCheck;
+
+ preInstallCheck = ''
+ patchShebangs tools/
+ patchShebangs test-suite/
+ '';
+
+ installCheckTarget = [ "check" ];
- installTargets = [ "install" ] ++ optional buildDoc "install-doc-html";
+ passthru = { inherit ocamlPackages; };
- inherit doCheck;
+ meta = {
+ description = "Coq proof assistant";
+ longDescription = ''
+ Coq is a formal proof management system. It provides a formal language
+ to write mathematical definitions, executable algorithms and theorems
+ together with an environment for semi-interactive development of
+ machine-checked proofs.
+ '';
+ homepage = http://coq.inria.fr;
+ license = licenses.lgpl21;
+ };
}
diff --git a/dev/README b/dev/README
deleted file mode 100644
index 453f85f0d..000000000
--- a/dev/README
+++ /dev/null
@@ -1,50 +0,0 @@
-This directory contains information and tools to help develop the
- Coq system
- ======================
-
-
-Debugging and profiling (in current directory - see doc/debugging.txt)
------------------------
-
-ocamldebug-coq: to launch ocaml debugger (generated by the configure script)
-
-db: to install pretty-printers from ocaml debugger
-base_db: to install raw pretty-printers from ocaml debugger
-
-include: to install pretty-printers from ocaml toplevel (use with the coq Drop command)
-base_include: to install raw pretty-printers from ocaml toplevel
-
-vm_printers.ml, top_printers.ml: ML pretty-printers for debugging
-
-
-Miscellaneous information about the code (directory doc)
------------------------------------------
-
-changes.md: (partial) per-version summary of the evolution of Coq ML source
-style.txt: a few style recommendations for writing Coq ML files
-debugging.md: help for debugging or profiling
-universes.txt: help for debugging universes
-translate.txt: help for using coq translator
-extensions.txt: some help about TACTIC EXTEND
-
-header: standard header for Coq ML files
-perf-analysis: analysis of perfs measured on the compilation of user contribs
-cic.dtd: official dtd of the calc. of ind. constr. for im/ex-portation
-
-
-Documentation of ML interfaces using ocamldoc (directory ocamldoc/html)
-----------------------------------------
-"make mli-doc" in coq root directory.
-
-
-Other development tools (directory tools)
------------------------
-
-coqdev.el: helper customizations for everyday Coq development, eg
- making `compile' work in subdirectories
-
-objects.el: various development utilities at emacs level
-
-anomaly-traces-parser.el: a .emacs-ready elisp snippet to parse
- location of Anomaly backtraces and jump to them conveniently from
- the Emacs *compilation* output.
diff --git a/dev/README.md b/dev/README.md
new file mode 100644
index 000000000..4642aaf06
--- /dev/null
+++ b/dev/README.md
@@ -0,0 +1,46 @@
+# This directory contains information and tools to help develop the Coq system
+
+
+## Debugging and profiling (`dev/`)
+**More info on debugging: [`doc/debugging.md`](doc/debugging.md)**
+
+| File | Description |
+| ---- | ----------- |
+| dev/ocamldebug-coq | To launch ocaml debugger (generated by the configure script) |
+| dev/db | To install pretty-printers from ocaml debugger |
+| dev/base_db | To install raw pretty-printers from ocaml debugger |
+| dev/include | To install pretty-printers from ocaml toplevel (use with the coq Drop command) |
+| dev/base_include | To install raw pretty-printers from ocaml toplevel |
+| dev/vm_printers.ml, top_printers.ml | ML pretty-printers for debugging |
+
+
+## Miscellaneous information about the code (`dev/doc`)
+**Beginner's guide to hacking Coq: [`dev/doc/README.md`](doc/README.md)**
+
+| File | Description |
+| ---- | ----------- |
+| [`dev/doc/changes.md`](doc/changes.md) | (partial) Per-version summary of the evolution of Coq ML source |
+| [`dev/doc/style.txt`](doc/style.txt) | A few style recommendations for writing Coq ML files |
+| [`dev/doc/debugging.md`](doc/debugging.md) | Help for debugging or profiling |
+| [`dev/doc/universes.txt`](doc/universes.txt) | Help for debugging universes |
+| [`dev/doc/extensions.txt`](doc/extensions.txt) | Some help about TACTIC EXTEND |
+| [`dev/doc/perf-analysis`](doc/perf-analysis)| Analysis of perfs measured on the compilation of user contribs |
+| [`dev/doc/cic.dtd`](doc/cic.dtd) | Official dtd of the calc. of ind. constr. for im/ex-portation |
+| [`dev/doc/econstr.md`](doc/econstr.md) | Describes `Econstr`, implementation of treatment of `evar` in the engine |
+| [`dev/doc/primproj.md`](doc/primproj.md) | Describes primitive projections |
+| [`dev/doc/proof-engine.md`](doc/proof-engine.md) | Tutorial on new proof engine |
+| [`dev/doc/xml-protocol.md`](doc/proof-engine.md) | XML protocol that coqtop and IDEs use to communicate |
+| [`dev/doc/MERGING.md`](doc/MERGING.md) | How pull requests should be merged into `master` |
+| [`dev/doc/release-process.md`](doc/release-process.md) | Process of creating a new Coq release |
+
+
+## Documentation of ML interfaces using ocamldoc ( `dev/ocamldoc/html`)
+`make mli-doc` in coq root directory.
+
+
+## Other development tools (`dev/tools`)
+
+| File | Description |
+| ---- | ----------- |
+| [`dev/tools/coqdev.el`](tools/coqdev.el) | Helper customizations for everyday Coq development, eg making `compile` work in subdirectories
+| [`dev/tools/objects.el`](tools/objects.el) | Various development utilities at emacs level |
diff --git a/dev/ci/ci-common.sh b/dev/ci/ci-common.sh
index 85df249d3..a68cd0933 100644
--- a/dev/ci/ci-common.sh
+++ b/dev/ci/ci-common.sh
@@ -69,7 +69,7 @@ git_checkout()
if [ ! -d .git ] ; then git clone "${_DEPTH[@]}" "${_URL}" . ; fi && \
echo "Checking out ${_DEST}" && \
git fetch "${_URL}" "${_BRANCH}" && \
- git checkout "${_COMMIT}" && \
+ git -c advice.detachedHead=false checkout "${_COMMIT}" && \
echo "${_DEST}: $(git log -1 --format='%s | %H | %cd | %aN')" )
}
diff --git a/dev/ci/docker/bionic_coq/Dockerfile b/dev/ci/docker/bionic_coq/Dockerfile
index 52f851917..a4ee6a379 100644
--- a/dev/ci/docker/bionic_coq/Dockerfile
+++ b/dev/ci/docker/bionic_coq/Dockerfile
@@ -1,4 +1,4 @@
-# CACHEKEY: "bionic_coq-V2018-06-13-V1"
+# CACHEKEY: "bionic_coq-V2018-07-02-V4"
# ^^ Update when modifying this file.
FROM ubuntu:bionic
@@ -6,10 +6,12 @@ LABEL maintainer="e@x80.org"
ENV DEBIAN_FRONTEND="noninteractive"
-RUN apt-get update -qq && apt-get install -y -qq m4 wget time gcc-multilib opam \
+RUN apt-get update -qq && apt-get install --no-install-recommends -y -qq \
+ m4 automake autoconf time wget rsync git gcc-multilib opam \
libgtk2.0-dev libgtksourceview2.0-dev \
- texlive-latex-extra texlive-fonts-recommended texlive-science \
- python3-sphinx python3-pexpect python3-sphinx-rtd-theme python3-bs4 python3-sphinxcontrib.bibtex python3-pip
+ texlive-latex-extra texlive-fonts-recommended texlive-science tipa \
+ python3-sphinx python3-pexpect python3-sphinx-rtd-theme python3-bs4 python3-sphinxcontrib.bibtex \
+ python3-setuptools python3-wheel python3-pip
RUN pip3 install antlr4-python3-runtime
diff --git a/dev/ci/user-overlays/07746-cleanup-unused-various.sh b/dev/ci/user-overlays/07746-cleanup-unused-various.sh
new file mode 100644
index 000000000..8688b0d53
--- /dev/null
+++ b/dev/ci/user-overlays/07746-cleanup-unused-various.sh
@@ -0,0 +1,6 @@
+#!/bin/sh
+
+if [ "$CI_PULL_REQUEST" = "7746" ] || [ "$CI_BRANCH" = "cleanup-unused-various" ]; then
+ Equations_CI_BRANCH="adapt-unused"
+ Equations_CI_GITURL="https://github.com/SkySkimmer/Coq-Equations.git"
+fi
diff --git a/dev/ci/user-overlays/07820-mattam82-hints-constants.sh b/dev/ci/user-overlays/07820-mattam82-hints-constants.sh
new file mode 100644
index 000000000..2ae86ae22
--- /dev/null
+++ b/dev/ci/user-overlays/07820-mattam82-hints-constants.sh
@@ -0,0 +1,6 @@
+_OVERLAY_BRANCH=hints-variables-overlay
+
+if [ "$CI_PULL_REQUEST" = "7820" ] || [ "$CI_BRANCH" = "_OVERLAY_BRANCH" ]; then
+
+ Equations_CI_BRANCH="$_OVERLAY_BRANCH"
+fi
diff --git a/dev/ci/user-overlays/07863-rm-sorts-contents.sh b/dev/ci/user-overlays/07863-rm-sorts-contents.sh
deleted file mode 100644
index 374a61484..000000000
--- a/dev/ci/user-overlays/07863-rm-sorts-contents.sh
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/bin/sh
-
-if [ "$CI_PULL_REQUEST" = "7863" ] || [ "$CI_BRANCH" = "rm-sorts-contents" ]; then
- Elpi_CI_BRANCH=fix-sorts-contents
- Elpi_CI_GITURL=https://github.com/SkySkimmer/coq-elpi.git
-fi
diff --git a/dev/ci/user-overlays/07902-ppedrot-camlp5-parser.sh b/dev/ci/user-overlays/07902-ppedrot-camlp5-parser.sh
new file mode 100644
index 000000000..735153ebd
--- /dev/null
+++ b/dev/ci/user-overlays/07902-ppedrot-camlp5-parser.sh
@@ -0,0 +1,8 @@
+_OVERLAY_BRANCH=camlp5-parser
+
+if [ "$CI_PULL_REQUEST" = "7902" ] || [ "$CI_BRANCH" = "$_OVERLAY_BRANCH" ]; then
+
+ ltac2_CI_BRANCH="$_OVERLAY_BRANCH"
+ ltac2_CI_GITURL=https://github.com/ppedrot/ltac2
+
+fi
diff --git a/dev/ci/user-overlays/07906-univs-of-constr.sh b/dev/ci/user-overlays/07906-univs-of-constr.sh
deleted file mode 100644
index 071665087..000000000
--- a/dev/ci/user-overlays/07906-univs-of-constr.sh
+++ /dev/null
@@ -1,9 +0,0 @@
-#!/bin/sh
-
-if [ "$CI_PULL_REQUEST" = "7906" ] || [ "$CI_BRANCH" = "univs-of-constr-noseff" ]; then
- Equations_CI_BRANCH=fix-univs-of-constr
- Equations_CI_GITURL=https://github.com/SkySkimmer/Coq-Equations.git
-
- Elpi_CI_BRANCH=fix-universes-of-constr
- Elpi_CI_GITURL=https://github.com/SkySkimmer/coq-elpi.git
-fi
diff --git a/dev/ci/user-overlays/7080-herbelin-master+swapping-modules-constr-context.sh b/dev/ci/user-overlays/7080-herbelin-master+swapping-modules-constr-context.sh
deleted file mode 100644
index b4d3d7e67..000000000
--- a/dev/ci/user-overlays/7080-herbelin-master+swapping-modules-constr-context.sh
+++ /dev/null
@@ -1,7 +0,0 @@
-if [ "$CI_PULL_REQUEST" = "7080" ] || [ "$CI_BRANCH" = "master+swapping-modules-constr-context" ]; then
- Equations_CI_BRANCH=master+adapting-coq-pr7080
- Equations_CI_GITURL=https://github.com/herbelin/Coq-Equations.git
-
- Elpi_CI_BRANCH=coq-master+adapting-coq-pr7080
- Elpi_CI_GITURL=https://github.com/herbelin/coq-elpi.git
-fi
diff --git a/dev/ci/user-overlays/README.md b/dev/ci/user-overlays/README.md
index 11e4d9ae0..d38d1b06d 100644
--- a/dev/ci/user-overlays/README.md
+++ b/dev/ci/user-overlays/README.md
@@ -9,6 +9,8 @@ testing is possible. It redefines some variables from
[`ci-basic-overlay.sh`](../ci-basic-overlay.sh):
give the name of your branch using a `_CI_BRANCH` variable and the location of
your fork using a `_CI_GITURL` variable.
+The `_CI_GITURL` variable should be the URL of the repository without a
+trailing `.git`.
Moreover, the file contains very simple logic to test the pull request number
or branch name and apply it only in this case.
diff --git a/dev/doc/README.md b/dev/doc/README.md
new file mode 100644
index 000000000..1a4e40f68
--- /dev/null
+++ b/dev/doc/README.md
@@ -0,0 +1,77 @@
+# Beginner's guide to hacking Coq
+
+## Getting dependencies
+
+Assuming one is running Ubuntu (if not, replace `apt` with the package manager of choice)
+
+```
+$ sudo apt-get install make opam git
+
+# At the time of writing, <latest-ocaml-version> is 4.06.1.
+# The latest version number is available at: https://ocaml.org/releases/
+
+$ opam init --comp <latest-ocaml-version>
+
+# Then follow the advice displayed at the end as how to update your
+ ~/.bashrc and ~/.ocamlinit files.
+
+$ source ~/.bashrc
+$ opam install camlp5
+
+# needed if you want to build "coqide" target
+
+$ sudo apt-get install liblablgtksourceview2-ocaml-dev \
+ libgtk2.0-dev libgtksourceview2.0-dev
+$ opam install lablgtk
+```
+
+## Building `coqtop`
+The general workflow is to first setup a development environment with
+the correct `configure` settings, then hacking on Coq, make-ing, and testing.
+
+
+This document will use `$JOBS` to refer to the number of parallel jobs one
+is willing to have with `make`.
+
+
+```
+$ git clone git clone https://github.com/coq/coq.git
+$ cd coq
+$ ./configure -profile devel
+$ make -j $JOBS # Make once for `merlin`(autocompletion tool)
+
+<hack>
+
+$ make -j $JOBS states # builds just enough to run coqtop
+$ bin/coqtop -compile <test_file_name.v>
+<goto hack until stuff works>
+
+<run test-suite>
+```
+
+To learn how to run the test suite, you can read
+[`test-suite/README.md`](../../test-suite/README.md).
+
+## Coq functions of interest
+- `Coqtop.start`: This function is the main entry point of coqtop.
+- `Coqtop.parse_args `: This function is responsible for parsing command-line arguments.
+- `Coqloop.loop`: This function implements the read-eval-print loop.
+- `Vernacentries.interp`: This function is called to execute the Vernacular command user have typed.
+ It dispatches the control to specific functions handling different Vernacular command.
+- `Vernacentries.vernac_check_may_eval`: This function handles the `Check ...` command.
+
+
+## Development environment + tooling
+- [`Merlin`](https://github.com/ocaml/merlin) for autocomplete.
+- [Wiki pages on tooling containing `emacs`, `vim`, and `git` information](https://github.com/coq/coq/wiki/DevelSetup)
+
+## A note about rlwrap
+
+When using `rlwrap coqtop` make sure the version of `rlwrap` is at least
+`0.42`, otherwise you will get
+
+```
+rlwrap: error: Couldn't read completions from /usr/share/rlwrap/completions/coqtop: No such file or directory
+```
+
+If this happens either update or use an alternate readline wrapper like `ledit`.
diff --git a/dev/doc/changes.md b/dev/doc/changes.md
index cbb95625f..6d5023405 100644
--- a/dev/doc/changes.md
+++ b/dev/doc/changes.md
@@ -82,11 +82,114 @@ Vernacular commands
`tactics/`. In all cases adapting is a matter of changing the module
name.
+Primitive number parsers
+
+- For better modularity, the primitive parsers for positive, N and Z
+ have been split over three files (plugins/syntax/positive_syntax.ml,
+ plugins/syntax/n_syntax.ml, plugins/syntax/z_syntax.ml).
+
### Unit testing
The test suite now allows writing unit tests against OCaml code in the Coq
code base. Those unit tests create a dependency on the OUnit test framework.
+### Transitioning away from Camlp5
+
+In an effort to reduce dependency on camlp5, the use of several grammar macros
+is discouraged. Coq is now shipped with its own preprocessor, called coqpp,
+which serves the same purpose as camlp5.
+
+To perform the transition to coqpp macros, one first needs to change the
+extension of a macro file from `.ml4` to `.mlg`. Not all camlp5 macros are
+handled yet.
+
+Due to parsing constraints, the syntax of the macros is slightly different, but
+updating the source code is mostly a matter of straightforward
+search-and-replace. The main differences are summarized below.
+
+#### OCaml code
+
+Every piece of toplevel OCaml code needs to be wrapped into braces.
+
+For instance, code of the form
+```
+let myval = 0
+```
+should be turned into
+```
+{
+let myval = 0
+}
+```
+
+#### TACTIC EXTEND
+
+Steps to perform:
+- replace the brackets enclosing OCaml code in actions with braces
+- if not there yet, add a leading `|̀ to the first rule
+
+For instance, code of the form:
+```
+TACTIC EXTEND my_tac
+ [ "tac1" int_or_var(i) tactic(t) ] -> [ mytac1 ist i t ]
+| [ "tac2" tactic(t) ] -> [ mytac2 t ]
+END
+```
+should be turned into
+```
+TACTIC EXTEND my_tac
+| [ "tac1" int_or_var(i) tactic(t) ] -> { mytac1 ist i t }
+| [ "tac2" tactic(t) ] -> { mytac2 t }
+END
+```
+
+#### VERNAC EXTEND
+
+Not handled yet.
+
+#### ARGUMENT EXTEND
+
+Not handled yet.
+
+#### GEXTEND
+
+Most plugin writers do not need this low-level interface, but for the sake of
+completeness we document it.
+
+Steps to perform are:
+- replace GEXTEND with GRAMMAR EXTEND
+- wrap every occurrence of OCaml code in actions into braces { }
+
+For instance, code of the form
+```
+GEXTEND Gram
+ GLOBAL: my_entry;
+
+my_entry:
+[ [ x = bar; y = qux -> do_something x y
+ | "("; z = LIST0 my_entry; ")" -> do_other_thing z
+] ];
+END
+```
+should be turned into
+```
+GRAMMAR EXTEND Gram
+ GLOBAL: my_entry;
+
+my_entry:
+[ [ x = bar; y = qux -> { do_something x y }
+ | "("; z = LIST0 my_entry; ")" -> { do_other_thing z }
+] ];
+END
+```
+
+Caveats:
+- No `GLOBAL` entries mean that they are all local, while camlp5 special-cases
+ this as a shorthand for all global entries. Solution: always define a `GLOBAL`
+ section.
+- No complex patterns allowed in token naming. Solution: match on it inside the
+ OCaml quotation.
+
## Changes between Coq 8.7 and Coq 8.8
### Bug tracker
diff --git a/dev/doc/setup.txt b/dev/doc/setup.txt
deleted file mode 100644
index c48c2d5d1..000000000
--- a/dev/doc/setup.txt
+++ /dev/null
@@ -1,269 +0,0 @@
-This document provides detailed guidance on how to:
-- compile Coq
-- take advantage of Merlin in Emacs
-- enable auto-completion for Ocaml source-code
-- use ocamldebug in Emacs for debugging coqtop
-The instructions were tested with Debian 8.3 (Jessie).
-
-The procedure is somewhat tedious, but the final results are (still) worth the effort.
-
-How to compile Coq
-------------------
-
-Getting build dependencies:
-
- sudo apt-get install make opam git
- opam init --comp 4.02.3
- # Then follow the advice displayed at the end as how to update your ~/.bashrc and ~/.ocamlinit files.
-
- source ~/.bashrc
-
- # needed if you want to build "coqtop" target
- opam install camlp5
-
- # needed if you want to build "coqide" target
- sudo apt-get install liblablgtksourceview2-ocaml-dev libgtk2.0-dev libgtksourceview2.0-dev
- opam install lablgtk
-
- # needed if you want to build "doc" target
- sudo apt-get install texlive-latex-recommended texlive-fonts-extra texlive-math-extra \
- hevea texlive-latex-extra latex-xcolor
-
-Cloning Coq:
-
- # Go to the directory where you want to clone Coq's source-code. E.g.:
- cd ~/git
-
- git clone https://github.com/coq/coq.git
-
-Building coqtop:
-
- cd ~/git/coq
- git checkout trunk
- make distclean
- ./configure -profile devel
- make clean
- make -j4 coqide printers
-
-The "-profile devel" enables all options recommended for developers (like
-warnings, support for Merlin, etc). Moreover Coq is configured so that
-it can be run without installing it (i.e. from the current directory).
-
-Once the compilation is over check if
-- bin/coqtop
-- bin/coqide
-behave as expected.
-
-
-A note about rlwrap
--------------------
-
-When using "rlwrap coqtop" make sure the version of rlwrap is at least
-0.42, otherwise you will get
-
- rlwrap: error: Couldn't read completions from /usr/share/rlwrap/completions/coqtop: No such file or directory
-
-If this happens either update or use an alternate readline wrapper like "ledit".
-
-
-How to install and configure Merlin (for Emacs)
------------------------------------------------
-
- sudo apt-get install emacs
-
- opam install tuareg
- # Follow the advice displayed at the end as how to update your ~/.emacs file.
-
- opam install merlin
- # Follow the advice displayed at the end as how to update your ~/.emacs file.
-
-Then add this:
-
- (push "~/.opam/4.02.3/share/emacs/site-lisp" load-path) ; directory containing merlin.el
- (setq merlin-command "~/.opam/4.02.3/bin/ocamlmerlin") ; needed only if ocamlmerlin not already in your PATH
- (autoload 'merlin-mode "merlin" "Merlin mode" t)
- (add-hook 'tuareg-mode-hook 'merlin-mode)
- (add-hook 'caml-mode-hook 'merlin-mode)
- (load "~/.opam/4.02.3/share/emacs/site-lisp/tuareg-site-file")
-
- ;; Do not use TABs. These confuse Merlin.
- (setq-default indent-tabs-mode nil)
-
-to your ~/.emacs file.
-
-Further Emacs configuration when we start it for the first time.
-
-Try to open some *.ml file in Emacs, e.g.:
-
- cd ~/git/coq
- emacs toplevel/coqtop.ml &
-
-Emacs display the following strange message:
-
- The local variables list in ~/git/coq
- contains values that may be safe (*).
-
- Do you want to apply it?
-
-Just press "!", i.e. "apply the local variable list, and permanently mark these values (\*) as safe."
-
-Emacs then shows two windows:
-- one window that shows the contents of the "toplevel/coqtop.ml" file
-- and the other window that shows greetings for new Emacs users.
-
-If you do not want to see the second window next time you start Emacs, just check "Never show it again" and click on "Dismiss this startup screen."
-
-The default key-bindings are described here:
-
- https://github.com/the-lambda-church/merlin/wiki/emacs-from-scratch
-
-If you want, you can customize them by replacing the following lines:
-
- (define-key merlin-map (kbd "C-c C-x") 'merlin-error-next)
- (define-key merlin-map (kbd "C-c C-l") 'merlin-locate)
- (define-key merlin-map (kbd "C-c &") 'merlin-pop-stack)
- (define-key merlin-map (kbd "C-c C-t") 'merlin-type-enclosing)
-
-in the file "~/.opam/4.02.3/share/emacs/site-lisp/merlin.el" with what you want.
-In the text below we assume that you changed the origin key-bindings in the following way:
-
- (define-key merlin-map (kbd "C-n") 'merlin-error-next)
- (define-key merlin-map (kbd "C-l") 'merlin-locate)
- (define-key merlin-map (kbd "C-b") 'merlin-pop-stack)
- (define-key merlin-map (kbd "C-t") 'merlin-type-enclosing)
-
-Now, when you press <Ctrl+L>, Merlin will show the definition of the symbol in a separate window.
-If you prefer to jump to the definition within the same window, do this:
-
- <Alt+X> customize-group <ENTER> merlin <ENTER>
-
- Merlin Locate In New Window
-
- Value Menu
-
- Never Open In New Window
-
- State
-
- Set For Future Sessions
-
-Testing (Merlin):
-
- cd ~/git/coq
- emacs toplevel/coqtop.ml &
-
-Go to the end of the file where you will see the "start" function.
-
-Go to a line where "init_toplevel" function is called.
-
-If you want to jump to the position where that function or datatype under the cursor is defined, press <Ctrl+L>.
-
-If you want to jump back, type: <Ctrl+B>
-
-If you want to learn the type of the value at current cursor's position, type: <Ctrl+T>
-
-
-Enabling auto-completion in emacs
----------------------------------
-
-In Emacs, type: <Alt+M> list-packages <ENTER>
-
-In the list that is displayed, click on "company".
-
-A new window appears where just click on "Install" and then answer "Yes".
-
-These lines:
-
- (package-initialize)
- (require 'company)
- ; Make company aware of merlin
- (add-to-list 'company-backends 'merlin-company-backend)
- ; Enable company on merlin managed buffers
- (add-hook 'merlin-mode-hook 'company-mode)
- (global-set-key [C-tab] 'company-complete)
-
-then need to be added to your "~/.emacs" file.
-
-Next time when you start emacs and partially type some identifier,
-emacs will offer the corresponding completions.
-Auto-completion can also be manually invoked by typing <Ctrl+TAB>.
-Description of various other shortcuts is here.
-
- http://company-mode.github.io/
-
-
-Getting along with ocamldebug
------------------------------
-
-The default ocamldebug key-bindings are described here.
-
- http://caml.inria.fr/pub/docs/manual-ocaml/debugger.html#sec369
-
-If you want, you can customize them by putting the following commands:
-
- (global-set-key (kbd "<f5>") 'ocamldebug-break)
- (global-set-key (kbd "<f6>") 'ocamldebug-run)
- (global-set-key (kbd "<f7>") 'ocamldebug-next)
- (global-set-key (kbd "<f8>") 'ocamldebug-step)
- (global-set-key (kbd "<f9>") 'ocamldebug-finish)
- (global-set-key (kbd "<f10>") 'ocamldebug-print)
- (global-set-key (kbd "<f12>") 'camldebug)
-
-to your "~/.emacs" file.
-
-Let us try whether ocamldebug in Emacs works for us.
-(If necessary, re-)compile coqtop:
-
- cd ~/git/coq
- make -j4 coqide printers
-
-open Emacs:
-
- emacs toplevel/coqtop.ml &
-
-and type:
-
- <F12> ../bin/coqtop.byte <ENTER> ../dev/ocamldebug-coq <ENTER>
-
-As a result, a new window is open at the bottom where you should see:
-
- (ocd)
-
-i.e. an ocamldebug shell.
-
- 1. Switch to the window that contains the "coqtop.ml" file.
- 2. Go to the end of the file.
- 3. Find the definition of the "start" function.
- 4. Go to the "let" keyword that is at the beginning of the first line.
- 5. By pressing <F5> you set a breakpoint to the cursor's position.
- 6. By pressing <F6> you start the bin/coqtop process.
- 7. Then you can:
- - step over function calls: <F7>
- - step into function calls: <F8>
- - or finish execution of the current function until it returns: <F9>.
-
-Other ocamldebug commands, can be typed to the window that holds the ocamldebug shell.
-
-The points at which the execution of Ocaml program can stop are defined here:
-
- http://caml.inria.fr/pub/docs/manual-ocaml/debugger.html#sec350
-
-
-Installing printers to ocamldebug
----------------------------------
-
-There is a pretty comprehensive set of printers defined for many common data types.
-You can load them by switching to the window holding the "ocamldebug" shell and typing:
-
- (ocd) source "../dev/db"
-
-
-Some of the functions were you might want to set a breakpoint and see what happens next
----------------------------------------------------------------------------------------
-
-- Coqtop.start : This function is the main entry point of coqtop.
-- Coqtop.parse_args : This function is responsible for parsing command-line arguments.
-- Coqloop.loop : This function implements the read-eval-print loop.
-- Vernacentries.interp : This function is called to execute the Vernacular command user have typed.\
- It dispatches the control to specific functions handling different Vernacular command.
-- Vernacentries.vernac_check_may_eval : This function handles the "Check ..." command.
diff --git a/dev/doc/translate.txt b/dev/doc/translate.txt
deleted file mode 100644
index 5b372c96c..000000000
--- a/dev/doc/translate.txt
+++ /dev/null
@@ -1,495 +0,0 @@
-
- How to use the translator
- =========================
-
- (temporary version to be included in the official
- TeX document describing the translator)
-
-The translator is a smart, robust and powerful tool to improve the
-readibility of your script. The current document describes the
-possibilities of the translator.
-
-In case of problem recompiling the translated files, don't waste time
-to modify the translated file by hand, read first the following
-document telling on how to modify the original files to get a smooth
-uniform safe translation. All 60000 lines of Coq lines on our
-user-contributions server have been translated without any change
-afterwards, and 0,5 % of the lines of the original files (mainly
-notations) had to be modified beforehand to get this result.
-
-Table of contents
------------------
-
-I) Implicit Arguments
- 1) Strict Implicit Arguments
- 2) Implicit Arguments in standard library
-
-II) Notations
- 1) Translating a V7 notation as it was
- 2) Translating a V7 notation which conflicts with the new syntax
- a) Associativity conflicts
- b) Conflicts with other notations
- b1) A notation hides another notation
- b2) A notation conflicts with the V8 grammar
- b3) My notation is already defined at another level
- c) How to use V8only with Distfix ?
- d) Can I overload a notation in V8, e.g. use "*" and "+" ?
- 3) Using the translator to have simplest notations
- 4) Setting the translator to automatically use new notations that
- wasn't used in old syntax
- 5) Defining a construction and its notation simultaneously
-
-III) Various pitfalls
- 1) New keywords
- 2) Old "Case" and "Match"
- 3) Change of definition or theorem names
- 4) Change of tactic names
-
----------------------------------------------------------------------
-
-I) Implicit Arguments
- ------------------
-
-1) Strict Implicit Arguments
-
- "Set Implicit Arguments" changes its meaning in V8: the default is
-to turn implicit only the arguments that are _strictly_ implicit (or
-rigid), i.e. that remains inferable whatever the other arguments
-are. E.g "x" inferable from "P x" is not strictly inferable since it
-can disappears if "P" is instanciated by a term which erase "x".
-
- To respect the old semantics, the default behaviour of the
-translator is to replace each occurrence "Set Implicit Arguments" by
-
- Set Implicit Arguments.
- Unset Strict Implicits.
-
- However, you may wish to adopt the new semantics of "Set Implicit
-Arguments" (for instance because you think that the choice of
-arguments it setsimplicit is more "natural" for you). In this case,
-add the option -strict-implicit to the translator.
-
- Warning: Changing the number of implicit arguments can break the
-notations. Then use the V8only modifier of Notations.
-
-2) Implicit Arguments in standard library
-
- Main definitions of standard library have now implicit
-arguments. These arguments are dropped in the translated files. This
-can exceptionally be a source of incompatibilities which has to be
-solved by hand (it typically happens for polymorphic functions applied
-to "nil" or "None").
-
-II) Notations
- ---------
-
- Grammar (on constr) and Syntax are no longer supported. Replace them by
-Notation before translation.
-
- Precedence levels are now from 0 to 200. In V8, the precedence and
-associativity of an operator cannot be redefined. Typical level are
-(refer to the chapter on notations in the Reference Manual for the
-full list):
-
- <-> : 95 (no associativity)
- -> : 90 (right associativity)
- \/ : 85 (right associativity)
- /\ : 80 (right associativity)
- ~ : 75 (right associativity)
- =, <, >, <=, >=, <> : 70 (no associativity)
- +, - : 50 (left associativity)
- *, / : 40 (left associativity)
- ^ : 30 (right associativity)
-
-1) Translating a V7 notation as it was
-
- By default, the translator keeps the associativity given in V7 while
-the levels are mapped according to the following table:
-
- the V7 levels [ 0; 1; 2; 3; 4; 5; 6; 7; 8; 9; 10]
- are resp. mapped in V8 to [ 0; 20; 30; 40; 50; 70; 80; 85; 90; 95; 100]
- with predefined assoc [ No; L; R; L; L; No; R; R; R; No; L]
-
- If this is OK for you, just simply apply the translator.
-
-2) Translating a V7 notation which conflicts with the new syntax
-
-a) Associativity conflict
-
- Since the associativity of the levels obtained by translating a V7
-level (as shown on table above) cannot be changed, you have to choose
-another level with a compatible associativity.
-
- You can choose any level between 0 and 200, knowing that the
-standard operators are already set at the levels shown on the list
-above.
-
-Example 1: Assume you have a notation
-
-Infix NONA 2 "=_S" my_setoid_eq.
-
-By default, the translator moves it to level 30 which is right
-associative, hence a conflict with the expected no associativity.
-
-To solve the problem, just add the "V8only" modifier to reset the
-level and enforce the associativity as follows:
-
-Infix NONA 2 "=_S" my_setoid_eq V8only (at level 70, no associativity).
-
-The translator now knows that it has to translate "=_S" at level 70
-with no associativity.
-
-Rem: 70 is the "natural" level for relations, hence the choice of 70
-here, but any other level accepting a no-associativity would have been
-OK.
-
-Example 2: Assume you have a notation
-
-Infix RIGHTA 1 "o" my_comp.
-
-By default, the translator moves it to level 20 which is left
-associative, hence a conflict with the expected right associativity.
-
-To solve the problem, just add the "V8only" modifier to reset the
-level and enforce the associativity as follows:
-
-Infix RIGHTA 1 "o" my_comp V8only (at level 20, right associativity).
-
-The translator now knows that it has to translate "o" at level 20
-which has the correct "right associativity".
-
-Rem: We assumed here that the user wants a strong precedence for
-composition, in such a way, say, that "f o g + h" is parsed as
-"(f o g) + h". To get "o" binding less than the arithmetical operators,
-an appropriated level would have been close of 70, and below, e.g. 65.
-
-b) Conflicts with other notations
-
-Since the new syntax comes with new keywords and new predefined
-symbols, new conflicts can occur. Again, you can use the option V8only
-to inform the translator of the new syntax to use.
-
-b1) A notation hides another notation
-
-Rem: use Print Grammar constr in V8 to diagnose the overlap and see the
-section on factorization in the chapter on notations of the Reference
-Manual for hints on how to factorize.
-
-Example:
-
-Notation "{ x }" := (my_embedding x) (at level 1).
-
-overlaps in V8 with notation "{ x : A & P }" at level 0 and with x at
-level 99. The conflicts can be solved by left-factorizing the notation
-as follows:
-
-Notation "{ x }" := (my_embedding x) (at level 1)
- V8only (at level 0, x at level 99).
-
-b2) A notation conflicts with the V8 grammar.
-
-Again, use the V8only modifier to tell the translator to automatically
-take in charge the new syntax.
-
-Example:
-
-Infix 3 "@" app.
-
-Since "@" is used in the new syntax for deactivating the implicit
-arguments, another symbol has to be used, e.g. "@@". This is done via
-the V8only option as follows:
-
-Infix 3 "@" app V8only "@@" (at level 40, left associativity).
-
-or, alternatively by
-
-Notation "x @ y" := (app x y) (at level 3, left associativity)
- V8only "x @@ y" (at level 40, left associativity).
-
-b3) My notation is already defined at another level (or with another
-associativity)
-
-In V8, the level and associativity of a given notation can no longer
-be changed. Then, either you adopt the standard reserved levels and
-associativity for this notation (as given on the list above) or you
-change your notation.
-
-- To change the notation, follow the directions in section b2.
-
-- To adopt the standard level, just use V8only without any argument.
-
-Example.
-
-Infix 6 "*" my_mult.
-
-is not accepted as such in V8. Write
-
-Infix 6 "*" my_mult V8only.
-
-to tell the translator to use "*" at the reserved level (i.e. 40 with
-left associativity). Even better, use interpretation scopes (look at
-the Reference Manual).
-
-c) How to use V8only with Distfix ?
-
-You can't, use Notation instead of Distfix.
-
-d) Can I overload a notation in V8, e.g. use "*" and "+" for my own
-algebraic operations ?
-
-Yes, using interpretation scopes (see the corresponding chapter in the
-Reference Manual).
-
-3) Using the translator to have simplest notations
-
-Thanks to the new syntax, * has now the expected left associativity,
-and the symbols <, >, <= and >= are now available.
-
-Thanks to the interpretation scopes, you can overload the
-interpretation of these operators with the default interpretation
-provided in Coq.
-
-This may be a motivation to use the translator to automatically change
-the notations while switching to the new syntax.
-
-See sections b) and d) above for examples.
-
-4) Setting the translator to automatically use new notations that
-wasn't used in old syntax
-
-Thanks to the "Notation" mechanism, defining symbolic notations is
-simpler than in the previous versions of Coq.
-
-Thanks to the new syntax and interpretation scopes, new symbols and
-overloading is available.
-
-This may be a motivation for using the translator to automatically change
-the notations while switching to the new syntax.
-
-Use for that the commands V8Notation and V8Infix.
-
-Examples:
-
-V8Infix "==>" my_relation (at level 65, right associativity).
-
-tells the translator to write an infix "==>" instead of my_relation in
-the translated files.
-
-V8Infix ">=" my_ge.
-
-tells the translator to write an infix ">=" instead of my_ge in the
-translated files and that the level and associativity are the standard
-one (as defined in the chart above).
-
-V8Infix ">=" my_ge : my_scope.
-
-tells the translator to write an infix ">=" instead of my_ge in the
-translated files, that the level and associativity are the standard
-one (as defined in the chart above), but only if scope my_scope is
-open or if a delimiting key is available for "my_scope" (see the
-Reference Manual).
-
-5) Defining a construction and its notation simultaneously
-
-This is permitted by the new syntax. Look at the Reference Manual for
-explanation. The translator is not fully able to take this in charge...
-
-III) Various pitfalls
- ----------------
-
-1) New keywords
-
- The following identifiers are new keywords
-
- "forall"; "fun"; "match"; "fix"; "cofix"; "for"; "if"; "then";
- "else"; "return"; "mod"; "at"; "let"; "_"; ".("
-
- The translator automatically add a "_" to names clashing with a
-keyword, except for files. Hence users may need to rename the files
-whose name clashes with a keyword.
-
- Remark: "in"; "with"; "end"; "as"; "Prop"; "Set"; "Type"
- were already keywords
-
-2) Old "Case" and "Match"
-
- "Case" and "Match" are normally automatically translated into
- "match" or "match" and "fix", but sometimes it fails to do so. It
- typically fails when the Case or Match is argument of a tactic whose
- typing context is unknown because of a preceding Intro/Intros, as e.g. in
-
- Intros; Exists [m:nat](<quasiterm>Case m of t [p:nat](f m) end)
-
- The solution is then to replace the invocation of the sequence of
- tactics into several invocation of the elementary tactics as follows
-
- Intros. Exists [m:nat](<quasiterm>Case m of t [p:nat](f m) end)
- ^^^
-
-3) Change of definition or theorem names
-
- Type "entier" from fast_integer.v is renamed into "N" by the
-translator. As a consequence, user-defined objects of same name "N"
-are systematically qualified even tough it may not be necessary. The
-same apply for names "GREATER", "EQUAL", "LESS", etc... [COMPLETE LIST
-TO GIVE].
-
-4) Change of tactics names
-
- Since tactics names are now lowercase, this can clash with
-user-defined tactic definitions. To pally this, clashing names are
-renamed by adding an extra "_" to their name.
-
-======================================================================
-Main examples for new syntax
-----------------------------
-
-1) Constructions
-
- Applicative terms don't any longer require to be surrounded by parentheses as
-e.g in
-
- "x = f y -> S x = S (f y)"
-
-
- Product is written
-
- "forall x y : T, U"
- "forall x y, U"
- "forall (x y : T) z (v w : V), U"
- etc.
-
- Abstraction is written
-
- "fun x y : T, U"
- "fun x y, U"
- "fun (x y : T) z (v w : V), U"
- etc.
-
- Pattern-matching is written
-
- "match x with c1 x1 x2 => t | c2 y as z => u end"
- "match v1, v2 with c1 x1 x2, _ => t | c2 y, d z => u end"
- "match v1 as y in le _ n, v2 as z in I p q return P n y p q z with
- c1 x1 x2, _ => t | c2 y, d z => u end"
-
- The last example is the new form of what was written
-
- "<[n;y:(le ? n);p;q;z:(I p q)](P n y p q z)>Cases v1 v2 of
- (c1 x1 x2) _ => t | (c2 y) (d z) => u end"
-
- Pattern-matching of type with one constructors and no dependencies
-of the arguments in the resulting type can be written
-
- "let (x,y,z) as u return P u := t in v"
-
- Local fixpoints are written
-
- "fix f (n m:nat) z (x : X) {struct m} : nat := ...
- with ..."
-
- and "struct" tells which argument is structurally decreasing.
-
- Explicitation of implicit arguments is written
-
- "f @1:=u v @3:=w t"
- "@f u v w t"
-
-2) Tactics
-
- The main change is that tactics names are now lowercase. Besides
-this, the following renaming are applied:
-
- "NewDestruct" -> "destruct"
- "NewInduction" -> "induction"
- "Induction" -> "simple induction"
- "Destruct" -> "simple destruct"
-
- For tactics with occurrences, the occurrences now comes after and
- repeated use is separated by comma as in
-
- "Pattern 1 3 c d 4 e" -> "pattern c at 3 1, d, e at 4"
- "Unfold 1 3 f 4 g" -> "unfold f at 1 3, g at 4"
- "Simpl 1 3 e" -> "simpl e at 1 3"
-
-3) Tactic language
-
- Definitions are now introduced with keyword "Ltac" (instead of
-"Tactic"/"Meta" "Definition") and are implicitly recursive
-("Recursive" is no longer used).
-
- The new rule for distinguishing terms from ltac expressions is:
-
- Write "ltac:" in front of any tactic in argument position and
- "constr:" in front of any construction in head position
-
-4) Vernacular language
-
-a) Assumptions
-
- The syntax for commands is mainly unchanged. Declaration of
-assumptions is now done as follows
-
- Variable m : t.
- Variables m n p : t.
- Variables (m n : t) (u v : s) (w : r).
-
-b) Definitions
-
- Definitions are done as follows
-
- Definition f m n : t := ... .
- Definition f m n := ... .
- Definition f m n := ... : t.
- Definition f (m n : u) : t := ... .
- Definition f (m n : u) := ... : t.
- Definition f (m n : u) := ... .
- Definition f a b (p q : v) r s (m n : t) : t := ... .
- Definition f a b (p q : v) r s (m n : t) := ... .
- Definition f a b (p q : v) r s (m n : t) := ... : t.
-
-c) Fixpoints
-
- Fixpoints are done this way
-
- Fixpoint f x (y : t) z a (b c : u) {struct z} : v := ... with ... .
- Fixpoint f x : v := ... .
- Fixpoint f (x : t) : v := ... .
-
- It is possible to give a concrete notation to a fixpoint as follows
-
- Fixpoint plus (n m:nat) {struct n} : nat as "n + m" :=
- match n with
- | O => m
- | S p => S (p + m)
- end.
-
-d) Inductive types
-
- The syntax for inductive types is as follows
-
- Inductive t (a b : u) (d : e) : v :=
- c1 : w1 | c2 : w2 | ... .
-
- Inductive t (a b : u) (d : e) : v :=
- c1 : w1 | c2 : w2 | ... .
-
- Inductive t (a b : u) (d : e) : v :=
- c1 (x y : t) : w1 | c2 (z : r) : w2 | ... .
-
- As seen in the last example, arguments of the constructors can be
-given before the colon. If the type itself is omitted (allowed only in
-case the inductive type has no real arguments), this yields an
-ML-style notation as follows
-
- Inductive nat : Set := O | S (n:nat).
- Inductive bool : Set := true | false.
-
- It is even possible to define a syntax at the same time, as follows:
-
- Inductive or (A B:Prop) : Prop as "A \/ B":=
- | or_introl (a:A) : A \/ B
- | or_intror (b:B) : A \/ B.
-
- Inductive and (A B:Prop) : Prop as "A /\ B" := conj (a:A) (b:B).
-
diff --git a/doc/sphinx/language/cic.rst b/doc/sphinx/language/cic.rst
index f6bab0267..b01a4ef0f 100644
--- a/doc/sphinx/language/cic.rst
+++ b/doc/sphinx/language/cic.rst
@@ -721,67 +721,68 @@ called the *context of parameters*. Furthermore, we must have that
each :math:`T` in :math:`(t:T)∈Γ_I` can be written as: :math:`∀Γ_P,∀Γ_{\mathit{Arr}(t)}, S` where
:math:`Γ_{\mathit{Arr}(t)}` is called the *Arity* of the inductive type t and :math:`S` is called
the sort of the inductive type t (not to be confused with :math:`\Sort` which is the set of sorts).
-** Examples** The declaration for parameterized lists is:
-
-.. math::
- \ind{1}{[\List:\Set→\Set]}{\left[\begin{array}{rcl}
- \Nil & : & \forall A:\Set,\List~A \\
- \cons & : & \forall A:\Set, A→ \List~A→ \List~A
- \end{array}
- \right]}
-
-which corresponds to the result of the |Coq| declaration:
.. example::
- .. coqtop:: in
-
- Inductive list (A:Set) : Set :=
- | nil : list A
- | cons : A -> list A -> list A.
+ The declaration for parameterized lists is:
-The declaration for a mutual inductive definition of tree and forest
-is:
+ .. math::
+ \ind{1}{[\List:\Set→\Set]}{\left[\begin{array}{rcl}
+ \Nil & : & \forall A:\Set,\List~A \\
+ \cons & : & \forall A:\Set, A→ \List~A→ \List~A
+ \end{array}
+ \right]}
-.. math::
- \ind{~}{\left[\begin{array}{rcl}\tree&:&\Set\\\forest&:&\Set\end{array}\right]}
- {\left[\begin{array}{rcl}
- \node &:& \forest → \tree\\
- \emptyf &:& \forest\\
- \consf &:& \tree → \forest → \forest\\
- \end{array}\right]}
+ which corresponds to the result of the |Coq| declaration:
-which corresponds to the result of the |Coq| declaration:
+ .. coqtop:: in
+
+ Inductive list (A:Set) : Set :=
+ | nil : list A
+ | cons : A -> list A -> list A.
.. example::
- .. coqtop:: in
+ The declaration for a mutual inductive definition of tree and forest
+ is:
- Inductive tree : Set :=
- | node : forest -> tree
- with forest : Set :=
- | emptyf : forest
- | consf : tree -> forest -> forest.
+ .. math::
+ \ind{~}{\left[\begin{array}{rcl}\tree&:&\Set\\\forest&:&\Set\end{array}\right]}
+ {\left[\begin{array}{rcl}
+ \node &:& \forest → \tree\\
+ \emptyf &:& \forest\\
+ \consf &:& \tree → \forest → \forest\\
+ \end{array}\right]}
-The declaration for a mutual inductive definition of even and odd is:
+ which corresponds to the result of the |Coq| declaration:
-.. math::
- \ind{1}{\left[\begin{array}{rcl}\even&:&\nat → \Prop \\
- \odd&:&\nat → \Prop \end{array}\right]}
- {\left[\begin{array}{rcl}
- \evenO &:& \even~0\\
- \evenS &:& \forall n, \odd~n -> \even~(\kw{S}~n)\\
- \oddS &:& \forall n, \even~n -> \odd~(\kw{S}~n)
- \end{array}\right]}
+ .. coqtop:: in
-which corresponds to the result of the |Coq| declaration:
+ Inductive tree : Set :=
+ | node : forest -> tree
+ with forest : Set :=
+ | emptyf : forest
+ | consf : tree -> forest -> forest.
.. example::
- .. coqtop:: in
+ The declaration for a mutual inductive definition of even and odd is:
+
+ .. math::
+ \ind{1}{\left[\begin{array}{rcl}\even&:&\nat → \Prop \\
+ \odd&:&\nat → \Prop \end{array}\right]}
+ {\left[\begin{array}{rcl}
+ \evenO &:& \even~0\\
+ \evenS &:& \forall n, \odd~n -> \even~(\kw{S}~n)\\
+ \oddS &:& \forall n, \even~n -> \odd~(\kw{S}~n)
+ \end{array}\right]}
+
+ which corresponds to the result of the |Coq| declaration:
- Inductive even : nat -> prop :=
- | even_O : even 0
- | even_S : forall n, odd n -> even (S n)
- with odd : nat -> prop :=
- | odd_S : forall n, even n -> odd (S n).
+ .. coqtop:: in
+
+ Inductive even : nat -> prop :=
+ | even_O : even 0
+ | even_S : forall n, odd n -> even (S n)
+ with odd : nat -> prop :=
+ | odd_S : forall n, even n -> odd (S n).
@@ -838,8 +839,8 @@ rules, we need a few definitions:
Arity of a given sort
+++++++++++++++++++++
-A type :math:`T` is an *arity of sort s* if it converts to the sort s or to a
-product :math:`∀ x:T,U` with :math:`U` an arity of sort s.
+A type :math:`T` is an *arity of sort* :math:`s` if it converts to the sort :math:`s` or to a
+product :math:`∀ x:T,U` with :math:`U` an arity of sort :math:`s`.
.. example::
@@ -850,7 +851,7 @@ product :math:`∀ x:T,U` with :math:`U` an arity of sort s.
Arity
+++++
A type :math:`T` is an *arity* if there is a :math:`s∈ \Sort` such that :math:`T` is an arity of
-sort s.
+sort :math:`s`.
.. example::
@@ -921,29 +922,29 @@ condition* for a constant :math:`X` in the following cases:
For instance, if one considers the following variant of a tree type
branching over the natural numbers:
- .. coqtop:: in
+ .. coqtop:: in
- Inductive nattree (A:Type) : Type :=
- | leaf : nattree A
- | node : A -> (nat -> nattree A) -> nattree A.
- End TreeExample.
-
- Then every instantiated constructor of ``nattree A`` satisfies the nested positivity
- condition for ``nattree``:
+ Inductive nattree (A:Type) : Type :=
+ | leaf : nattree A
+ | node : A -> (nat -> nattree A) -> nattree A.
+ End TreeExample.
+
+ Then every instantiated constructor of ``nattree A`` satisfies the nested positivity
+ condition for ``nattree``:
- + Type ``nattree A`` of constructor ``leaf`` satisfies the positivity condition for
- ``nattree`` because ``nattree`` does not appear in any (real) arguments of the
- type of that constructor (primarily because ``nattree`` does not have any (real)
- arguments) ... (bullet 1)
+ + Type ``nattree A`` of constructor ``leaf`` satisfies the positivity condition for
+ ``nattree`` because ``nattree`` does not appear in any (real) arguments of the
+ type of that constructor (primarily because ``nattree`` does not have any (real)
+ arguments) ... (bullet 1)
- + Type ``A → (nat → nattree A) → nattree A`` of constructor ``node`` satisfies the
- positivity condition for ``nattree`` because:
+ + Type ``A → (nat → nattree A) → nattree A`` of constructor ``node`` satisfies the
+ positivity condition for ``nattree`` because:
- - ``nattree`` occurs only strictly positively in ``A`` ... (bullet 3)
+ - ``nattree`` occurs only strictly positively in ``A`` ... (bullet 3)
- - ``nattree`` occurs only strictly positively in ``nat → nattree A`` ... (bullet 3 + 2)
+ - ``nattree`` occurs only strictly positively in ``nat → nattree A`` ... (bullet 3 + 2)
- - ``nattree`` satisfies the positivity condition for ``nattree A`` ... (bullet 1)
+ - ``nattree`` satisfies the positivity condition for ``nattree A`` ... (bullet 1)
.. _Correctness-rules:
@@ -978,35 +979,33 @@ provided that the following side conditions hold:
One can remark that there is a constraint between the sort of the
arity of the inductive type and the sort of the type of its
constructors which will always be satisfied for the impredicative
-sortProp but may fail to define inductive definition on sort Set and
+sort :math:`\Prop` but may fail to define inductive definition on sort :math:`\Set` and
generate constraints between universes for inductive definitions in
the Type hierarchy.
-**Examples**. It is well known that the existential quantifier can be encoded as an
-inductive definition. The following declaration introduces the second-
-order existential quantifier :math:`∃ X.P(X)`.
-
.. example::
+ It is well known that the existential quantifier can be encoded as an
+ inductive definition. The following declaration introduces the second-
+ order existential quantifier :math:`∃ X.P(X)`.
+
.. coqtop:: in
-
+
Inductive exProp (P:Prop->Prop) : Prop :=
| exP_intro : forall X:Prop, P X -> exProp P.
-The same definition on Set is not allowed and fails:
+ The same definition on :math:`\Set` is not allowed and fails:
-.. example::
.. coqtop:: all
Fail Inductive exSet (P:Set->Prop) : Set :=
exS_intro : forall X:Set, P X -> exSet P.
-It is possible to declare the same inductive definition in the
-universe Type. The exType inductive definition has type
-:math:`(\Type(i)→\Prop)→\Type(j)` with the constraint that the parameter :math:`X` of :math:`\kw{exT_intro}`
-has type :math:`\Type(k)` with :math:`k<j` and :math:`k≤ i`.
+ It is possible to declare the same inductive definition in the
+ universe :math:`\Type`. The :g:`exType` inductive definition has type
+ :math:`(\Type(i)→\Prop)→\Type(j)` with the constraint that the parameter :math:`X` of :math:`\kw{exT_intro}`
+ has type :math:`\Type(k)` with :math:`k<j` and :math:`k≤ i`.
-.. example::
.. coqtop:: all
Inductive exType (P:Type->Prop) : Type :=
@@ -1019,9 +1018,9 @@ has type :math:`\Type(k)` with :math:`k<j` and :math:`k≤ i`.
Template polymorphism
+++++++++++++++++++++
-Inductive types declared in Type are polymorphic over their arguments
-in Type. If :math:`A` is an arity of some sort and s is a sort, we write :math:`A_{/s}`
-for the arity obtained from :math:`A` by replacing its sort with s.
+Inductive types declared in :math:`\Type` are polymorphic over their arguments
+in :math:`\Type`. If :math:`A` is an arity of some sort and math:`s` is a sort, we write :math:`A_{/s}`
+for the arity obtained from :math:`A` by replacing its sort with :math:`s`.
Especially, if :math:`A` is well-typed in some global environment and local
context, then :math:`A_{/s}` is typable by typability of all products in the
Calculus of Inductive Constructions. The following typing rule is
@@ -1382,7 +1381,7 @@ this type.
[I:Prop|I→ s]
A *singleton definition* has only one constructor and all the
-arguments of this constructor have type Prop. In that case, there is a
+arguments of this constructor have type :math:`\Prop`. In that case, there is a
canonical way to interpret the informative extraction on an object in
that type, such that the elimination on any sort :math:`s` is legal. Typical
examples are the conjunction of non-informative propositions and the
@@ -1421,45 +1420,45 @@ corresponding to the :math:`c:C` constructor.
We write :math:`\{c\}^P` for :math:`\{c:C\}^P` with :math:`C` the type of :math:`c`.
-**Example.**
-The following term in concrete syntax::
+.. example::
+ The following term in concrete syntax::
- match t as l return P' with
- | nil _ => t1
- | cons _ hd tl => t2
- end
+ match t as l return P' with
+ | nil _ => t1
+ | cons _ hd tl => t2
+ end
-can be represented in abstract syntax as
+ can be represented in abstract syntax as
-.. math::
- \case(t,P,f 1 | f 2 )
+ .. math::
+ \case(t,P,f 1 | f 2 )
-where
+ where
-.. math::
- \begin{eqnarray*}
- P & = & \lambda~l~.~P^\prime\\
- f_1 & = & t_1\\
- f_2 & = & \lambda~(hd:\nat)~.~\lambda~(tl:\List~\nat)~.~t_2
- \end{eqnarray*}
+ .. math::
+ \begin{eqnarray*}
+ P & = & \lambda~l~.~P^\prime\\
+ f_1 & = & t_1\\
+ f_2 & = & \lambda~(hd:\nat)~.~\lambda~(tl:\List~\nat)~.~t_2
+ \end{eqnarray*}
-According to the definition:
+ According to the definition:
-.. math::
- \{(\kw{nil}~\nat)\}^P ≡ \{(\kw{nil}~\nat) : (\List~\nat)\}^P ≡ (P~(\kw{nil}~\nat))
+ .. math::
+ \{(\kw{nil}~\nat)\}^P ≡ \{(\kw{nil}~\nat) : (\List~\nat)\}^P ≡ (P~(\kw{nil}~\nat))
-.. math::
-
- \begin{array}{rl}
- \{(\kw{cons}~\nat)\}^P & ≡\{(\kw{cons}~\nat) : (\nat→\List~\nat→\List~\nat)\}^P \\
- & ≡∀ n:\nat, \{(\kw{cons}~\nat~n) : \List~\nat→\List~\nat)\}^P \\
- & ≡∀ n:\nat, ∀ l:\List~\nat, \{(\kw{cons}~\nat~n~l) : \List~\nat)\}^P \\
- & ≡∀ n:\nat, ∀ l:\List~\nat,(P~(\kw{cons}~\nat~n~l)).
- \end{array}
+ .. math::
+
+ \begin{array}{rl}
+ \{(\kw{cons}~\nat)\}^P & ≡\{(\kw{cons}~\nat) : (\nat→\List~\nat→\List~\nat)\}^P \\
+ & ≡∀ n:\nat, \{(\kw{cons}~\nat~n) : \List~\nat→\List~\nat)\}^P \\
+ & ≡∀ n:\nat, ∀ l:\List~\nat, \{(\kw{cons}~\nat~n~l) : \List~\nat)\}^P \\
+ & ≡∀ n:\nat, ∀ l:\List~\nat,(P~(\kw{cons}~\nat~n~l)).
+ \end{array}
-Given some :math:`P` then :math:`\{(\kw{nil}~\nat)\}^P` represents the expected type of :math:`f_1` ,
-and :math:`\{(\kw{cons}~\nat)\}^P` represents the expected type of :math:`f_2`.
+ Given some :math:`P` then :math:`\{(\kw{nil}~\nat)\}^P` represents the expected type of :math:`f_1` ,
+ and :math:`\{(\kw{cons}~\nat)\}^P` represents the expected type of :math:`f_2`.
.. _Typing-rule:
@@ -1819,7 +1818,7 @@ while it will type-check, if one uses instead the `coqtop`
``-impredicative-set`` option..
The major change in the theory concerns the rule for product formation
-in the sort Set, which is extended to a domain in any sort:
+in the sort :math:`\Set`, which is extended to a domain in any sort:
.. inference:: ProdImp
@@ -1832,11 +1831,11 @@ in the sort Set, which is extended to a domain in any sort:
This extension has consequences on the inductive definitions which are
allowed. In the impredicative system, one can build so-called *large
inductive definitions* like the example of second-order existential
-quantifier (exSet).
+quantifier (:g:`exSet`).
There should be restrictions on the eliminations which can be
performed on such definitions. The eliminations rules in the
-impredicative system for sort Set become:
+impredicative system for sort :math:`\Set` become:
diff --git a/doc/sphinx/language/gallina-extensions.rst b/doc/sphinx/language/gallina-extensions.rst
index 8fbd2ea4e..509ac92f8 100644
--- a/doc/sphinx/language/gallina-extensions.rst
+++ b/doc/sphinx/language/gallina-extensions.rst
@@ -1349,7 +1349,7 @@ folder ``path/fOO/Bar`` maps to ``Lib.fOO.Bar``. Subdirectories corresponding
to invalid |Coq| identifiers are skipped, and, by convention,
subdirectories named ``CVS`` or ``_darcs`` are skipped too.
-Thanks to this mechanism, .vo files are made available through the
+Thanks to this mechanism, ``.vo`` files are made available through the
logical name of the folder they are in, extended with their own
basename. For example, the name associated to the file
``path/fOO/Bar/File.vo`` is ``Lib.fOO.Bar.File``. The same caveat applies for
@@ -1360,17 +1360,17 @@ wrong loadpath afterwards.
Some folders have a special status and are automatically put in the
path. |Coq| commands associate automatically a logical path to files in
the repository trees rooted at the directory from where the command is
-launched, coqlib/user-contrib/, the directories listed in the
-`$COQPATH`, `${XDG_DATA_HOME}/coq/` and `${XDG_DATA_DIRS}/coq/`
-environment variables (see`http://standards.freedesktop.org/basedir-
-spec/basedir-spec-latest.html`_) with the same physical-to-logical
-translation and with an empty logical prefix.
+launched, ``coqlib/user-contrib/``, the directories listed in the
+``$COQPATH``, ``${XDG_DATA_HOME}/coq/`` and ``${XDG_DATA_DIRS}/coq/``
+environment variables (see `XDG base directory specification
+<http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html>`_)
+with the same physical-to-logical translation and with an empty logical prefix.
The command line option ``-R`` is a variant of ``-Q`` which has the strictly
same behavior regarding loadpaths, but which also makes the
corresponding ``.vo`` files available through their short names in a way
not unlike the ``Import`` command (see :ref:`here <import_qualid>`). For instance, ``-R`` `path` ``Lib``
-associates to the ``filepath/fOO/Bar/File.vo`` the logical name
+associates to the file path `path`\ ``/path/fOO/Bar/File.vo`` the logical name
``Lib.fOO.Bar.File``, but allows this file to be accessed through the
short names ``fOO.Bar.File,Bar.File`` and ``File``. If several files with
identical base name are present in different subdirectories of a
diff --git a/doc/sphinx/language/gallina-specification-language.rst b/doc/sphinx/language/gallina-specification-language.rst
index c26ae2a93..e13625286 100644
--- a/doc/sphinx/language/gallina-specification-language.rst
+++ b/doc/sphinx/language/gallina-specification-language.rst
@@ -38,6 +38,7 @@ At the end, the notation “``[entry sep … sep entry]``” stands for a
possibly empty sequence of expressions parsed by the “``entry``” entry,
separated by the literal “``sep``”.
+.. _lexical-conventions:
Lexical conventions
===================
@@ -905,6 +906,32 @@ Parametrized inductive types
sort for the inductive definition and will produce a less convenient
rule for case elimination.
+.. opt:: Uniform Inductive Parameters
+
+ When this option is set (it is off by default),
+ inductive definitions are abstracted over their parameters
+ before typechecking constructors, allowing to write:
+
+ .. coqtop:: all undo
+
+ Set Uniform Inductive Parameters.
+ Inductive list3 (A:Set) : Set :=
+ | nil3 : list3
+ | cons3 : A -> list3 -> list3.
+
+ This behavior is essentially equivalent to starting a new section
+ and using :cmd:`Context` to give the uniform parameters, like so
+ (cf. :ref:`section-mechanism`):
+
+ .. coqtop:: all undo
+
+ Section list3.
+ Context (A:Set).
+ Inductive list3 : Set :=
+ | nil3 : list3
+ | cons3 : A -> list3 -> list3.
+ End list3.
+
.. seealso::
Section :ref:`inductive-definitions` and the :tacn:`induction` tactic.
diff --git a/doc/sphinx/proof-engine/tactics.rst b/doc/sphinx/proof-engine/tactics.rst
index d0a0d568e..45667b099 100644
--- a/doc/sphinx/proof-engine/tactics.rst
+++ b/doc/sphinx/proof-engine/tactics.rst
@@ -3422,18 +3422,24 @@ The general command to add a hint to some databases :n:`{+ @ident}` is
Adds each :n:`Hint Unfold @ident`.
- .. cmdv:: Hint %( Transparent %| Opaque %) @qualid
- :name: Hint ( Transparent | Opaque )
+ .. cmdv:: Hint Transparent {+ @qualid}
+ Hint Opaque {+ @qualid}
+ :name: Hint Transparent; Hint Opaque
- This adds a transparency hint to the database, making :n:`@qualid` a
- transparent or opaque constant during resolution. This information is used
+ This adds transparency hints to the database, making :n:`@qualid`
+ transparent or opaque constants during resolution. This information is used
during unification of the goal with any lemma in the database and inside the
discrimination network to relax or constrain it in the case of discriminated
databases.
- .. cmdv:: Hint %( Transparent %| Opaque %) {+ @ident}
+ .. cmdv:: Hint Variables %( Transparent %| Opaque %)
+ Hint Constants %( Transparent %| Opaque %)
+ :name: Hint Variables; Hint Constants
- Declares each :n:`@ident` as a transparent or opaque constant.
+ This sets the transparency flag used during unification of
+ hints in the database for all constants or all variables,
+ overwritting the existing settings of opacity. It is advised
+ to use this just after a :cmd:`Create HintDb` command.
.. cmdv:: Hint Extern @num {? @pattern} => @tactic
:name: Hint Extern
diff --git a/doc/sphinx/user-extensions/syntax-extensions.rst b/doc/sphinx/user-extensions/syntax-extensions.rst
index 3b95a37ed..dcefa293b 100644
--- a/doc/sphinx/user-extensions/syntax-extensions.rst
+++ b/doc/sphinx/user-extensions/syntax-extensions.rst
@@ -10,15 +10,14 @@ parses and prints objects, i.e. the translations between the concrete
and internal representations of terms and commands.
The main commands to provide custom symbolic notations for terms are
-``Notation`` and ``Infix``. They are described in section :ref:`Notations`. There is also a
-variant of ``Notation`` which does not modify the parser. This provides with a
-form of abbreviation and it is described in Section :ref:`Abbreviations`. It is
+:cmd:`Notation` and :cmd:`Infix`; they will be described in the
+:ref:`next section <Notations>`. There is also a
+variant of :cmd:`Notation` which does not modify the parser; this provides a
+form of :ref:`abbreviation <Abbreviations>`. It is
sometimes expected that the same symbolic notation has different meanings in
-different contexts. To achieve this form of overloading, |Coq| offers a notion
-of interpretation scope. This is described in Section :ref:`Scopes`.
-
-The main command to provide custom notations for tactics is ``Tactic Notation``.
-It is described in Section :ref:`TacticNotation`.
+different contexts; to achieve this form of overloading, |Coq| offers a notion
+of :ref:`interpretation scopes <Scopes>`.
+The main command to provide custom notations for tactics is :cmd:`Tactic Notation`.
.. coqtop:: none
@@ -44,7 +43,7 @@ logical conjunction (and). Such a notation is declared by
Notation "A /\ B" := (and A B).
-The expression :g:`(and A B)` is the abbreviated term and the string ``"A /\ B"``
+The expression :g:`(and A B)` is the abbreviated term and the string :g:`"A /\ B"`
(called a *notation*) tells how it is symbolically written.
A notation is always surrounded by double quotes (except when the
@@ -66,15 +65,15 @@ example.
A notation binds a syntactic expression to a term. Unless the parser
and pretty-printer of Coq already know how to deal with the syntactic
-expression (see 12.1.7), explicit precedences and associativity rules
-have to be given.
+expression (see :ref:`ReservingNotations`), explicit precedences and
+associativity rules have to be given.
.. note::
The right-hand side of a notation is interpreted at the time the notation is
- given. In particular, disambiguation of constants, implicit arguments (see
- Section :ref:`ImplicitArguments`), coercions (see Section :ref:`Coercions`),
- etc. are resolved at the time of the declaration of the notation.
+ given. In particular, disambiguation of constants, :ref:`implicit arguments
+ <ImplicitArguments>`, :ref:`coercions <Coercions>`, etc. are resolved at the
+ time of the declaration of the notation.
Precedences and associativity
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -106,13 +105,13 @@ is 100, for example 85 for disjunction and 80 for conjunction [#and_or_levels]_.
Similarly, an associativity is needed to decide whether :g:`True /\ False /\ False`
defaults to :g:`True /\ (False /\ False)` (right associativity) or to
:g:`(True /\ False) /\ False` (left associativity). We may even consider that the
-expression is not well- formed and that parentheses are mandatory (this is a “no
+expression is not well-formed and that parentheses are mandatory (this is a “no
associativity”) [#no_associativity]_. We do not know of a special convention of
the associativity of disjunction and conjunction, so let us apply for instance a
right associativity (which is the choice of Coq).
Precedence levels and associativity rules of notations have to be
-given between parentheses in a list of modifiers that the ``Notation``
+given between parentheses in a list of modifiers that the :cmd:`Notation`
command understands. Here is how the previous examples refine.
.. coqtop:: in
@@ -122,9 +121,10 @@ command understands. Here is how the previous examples refine.
By default, a notation is considered non associative, but the
precedence level is mandatory (except for special cases whose level is
-canonical). The level is either a number or the phrase `next level`
-whose meaning is obvious. The list of levels already assigned is on
-Figure 3.1.
+canonical). The level is either a number or the phrase ``next level``
+whose meaning is obvious.
+Some :ref:`associativities are predefined <init-notations>` in the
+``Notations`` module.
.. TODO I don't find it obvious -- CPC
@@ -162,7 +162,7 @@ One can also define notations for binders.
In the last case though, there is a conflict with the notation for
type casts. The notation for types casts, as shown by the command :cmd:`Print
Grammar constr` is at level 100. To avoid ``x : A`` being parsed as a type cast,
-it is necessary to put x at a level below 100, typically 99. Hence, a correct
+it is necessary to put ``x`` at a level below 100, typically 99. Hence, a correct
definition is the following:
.. coqtop:: all
@@ -176,7 +176,7 @@ Simple factorization rules
~~~~~~~~~~~~~~~~~~~~~~~~~~
Coq extensible parsing is performed by *Camlp5* which is essentially a LL1
-parser: it decides which notation to parse by looking tokens from left to right.
+parser: it decides which notation to parse by looking at tokens from left to right.
Hence, some care has to be taken not to hide already existing rules by new
rules. Some simple left factorization work has to be done. Here is an example.
@@ -186,11 +186,11 @@ rules. Some simple left factorization work has to be done. Here is an example.
Notation "x < y < z" := (x < y /\ y < z) (at level 70).
In order to factorize the left part of the rules, the subexpression
-referred by y has to be at the same level in both rules. However the
-default behavior puts y at the next level below 70 in the first rule
-(no associativity is the default), and at the level 200 in the second
-rule (level 200 is the default for inner expressions). To fix this, we
-need to force the parsing level of y, as follows.
+referred by ``y`` has to be at the same level in both rules. However the
+default behavior puts ``y`` at the next level below 70 in the first rule
+(``no associativity`` is the default), and at the level 200 in the second
+rule (``level 200`` is the default for inner expressions). To fix this, we
+need to force the parsing level of ``y``, as follows.
.. coqtop:: all
@@ -198,9 +198,9 @@ need to force the parsing level of y, as follows.
Notation "x < y < z" := (x < y /\ y < z) (at level 70, y at next level).
For the sake of factorization with Coq predefined rules, simple rules
-have to be observed for notations starting with a symbol: e.g. rules
-starting with “{” or “(” should be put at level 0. The list of Coq
-predefined notations can be found in Chapter :ref:`thecoqlibrary`.
+have to be observed for notations starting with a symbol, e.g., rules
+starting with “\ ``{``\ ” or “\ ``(``\ ” should be put at level 0. The list
+of Coq predefined notations can be found in the chapter on :ref:`thecoqlibrary`.
.. cmd:: Print Grammar constr.
@@ -215,7 +215,7 @@ predefined notations can be found in Chapter :ref:`thecoqlibrary`.
Displaying symbolic notations
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-The command ``Notation`` has an effect both on the Coq parser and on the
+The command :cmd:`Notation` has an effect both on the Coq parser and on the
Coq printer. For example:
.. coqtop:: all
@@ -301,7 +301,7 @@ at the time of use of the notation.
.. note:: Sometimes, a notation is expected only for the parser. To do
so, the option ``only parsing`` is allowed in the list of modifiers
- of ``Notation``. Conversely, the ``only printing`` modifier can be
+ of :cmd:`Notation`. Conversely, the ``only printing`` modifier can be
used to declare that a notation should only be used for printing and
should not declare a parsing rule. In particular, such notations do
not modify the parser.
@@ -309,7 +309,7 @@ at the time of use of the notation.
The Infix command
~~~~~~~~~~~~~~~~~~
-The ``Infix`` command is a shortening for declaring notations of infix
+The :cmd:`Infix` command is a shortening for declaring notations of infix
symbols.
.. cmd:: Infix "@symbol" := @term ({+, @modifier}).
@@ -324,6 +324,8 @@ symbols.
Infix "/\" := and (at level 80, right associativity).
+.. _ReservingNotations:
+
Reserving notations
~~~~~~~~~~~~~~~~~~~
@@ -341,7 +343,7 @@ state of Coq.
Reserving a notation is also useful for simultaneously defining an
inductive type or a recursive constant and a notation for it.
-.. note:: The notations mentioned on Figure 3.1 are reserved. Hence
+.. note:: The notations mentioned in the module :ref:`init-notations` are reserved. Hence
their precedence and associativity cannot be changed.
Simultaneous definition of terms and notations
@@ -391,7 +393,7 @@ Locating notations
To know to which notations a given symbol belongs to, use the :cmd:`Locate`
command. You can call it on any (composite) symbol surrounded by double quotes.
To locate a particular notation, use a string where the variables of the
-notation are replaced by “_” and where possible single quotes inserted around
+notation are replaced by “``_``” and where possible single quotes inserted around
identifiers or tokens starting with a single quote are dropped.
.. coqtop:: all
@@ -404,7 +406,7 @@ Notations and binders
Notations can include binders. This section lists
different ways to deal with binders. For further examples, see also
-Section :ref:`RecursiveNotationsWithBinders`.
+:ref:`RecursiveNotationsWithBinders`.
Binders bound in the notation and parsed as identifiers
+++++++++++++++++++++++++++++++++++++++++++++++++++++++
@@ -490,7 +492,7 @@ the following:
This is so because the grammar also contains rules starting with :g:`{}` and
followed by a term, such as the rule for the notation :g:`{ A } + { B }` for the
-constant :g:`sumbool` (see Section :ref:`specification`).
+constant :g:`sumbool` (see :ref:`specification`).
Then, in the rule, ``x ident`` is replaced by ``x at level 99 as ident`` meaning
that ``x`` is parsed as a term at level 99 (as done in the notation for
@@ -545,7 +547,7 @@ the next command fails because p does not bind in the instance of n.
Notation "[> a , .. , b <]" :=
(cons a .. (cons b nil) .., cons b .. (cons a nil) ..).
-.. _RecursiveNotationsWithBinders:
+.. _RecursiveNotations:
Notations with recursive patterns
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -563,7 +565,7 @@ confused with the three-dots notation “``…``” used in this manual to denot
a sequence of arbitrary size.
On the left-hand side, the part “``x s .. s y``” of the notation parses
-any number of time (but at least one time) a sequence of expressions
+any number of times (but at least one time) a sequence of expressions
separated by the sequence of tokens ``s`` (in the example, ``s`` is just “``;``”).
The right-hand side must contain a subterm of the form either
@@ -572,7 +574,7 @@ called the *iterator* of the recursive notation is an arbitrary expression with
distinguished placeholders and where :math:`t` is called the *terminating
expression* of the recursive notation. In the example, we choose the names
:math:`x` and :math:`y` but in practice they can of course be chosen
-arbitrarily. Not atht the placeholder :math:`[~]_I` has to occur only once but
+arbitrarily. Note that the placeholder :math:`[~]_I` has to occur only once but
:math:`[~]_E` can occur several times.
Parsing the notation produces a list of expressions which are used to
@@ -595,9 +597,10 @@ and the terminating expression is ``nil``. Here are other examples:
(t at level 39).
Notations with recursive patterns can be reserved like standard
-notations, they can also be declared within interpretation scopes (see
-section 12.2).
+notations, they can also be declared within
+:ref:`interpretation scopes <Scopes>`.
+.. _RecursiveNotationsWithBinders:
Notations with recursive patterns involving binders
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -611,7 +614,8 @@ is:
(ex (fun x => .. (ex (fun y => p)) ..))
(at level 200, x binder, y binder, right associativity).
-The principle is the same as in Section 12.1.12 except that in the iterator
+The principle is the same as in :ref:`RecursiveNotations`
+except that in the iterator
:math:`φ([~]_E , [~]_I)`, the placeholder :math:`[~]_E` can also occur in
position of the binding variable of a ``fun`` or a ``forall``.
@@ -620,8 +624,8 @@ binders, ``x`` and ``y`` must be marked as ``binder`` in the list of modifiers
of the notation. The binders of the parsed sequence are used to fill the
occurrences of the first placeholder of the iterating pattern which is
repeatedly nested as many times as the number of binders generated. If ever the
-generalization operator ``'`` (see Section 2.7.19) is used in the binding list,
-the added binders are taken into account too.
+generalization operator ``'`` (see :ref:`implicit-generalization`) is
+used in the binding list, the added binders are taken into account too.
Binders parsing exist in two flavors. If ``x`` and ``y`` are marked as binder,
then a sequence such as :g:`a b c : T` will be accepted and interpreted as
@@ -678,7 +682,7 @@ position of :g:`x`:
In addition to ``global``, one can restrict the syntax of a
sub-expression by using the entry names ``ident`` or ``pattern``
-already seen in Section :ref:`NotationsWithBinders`, even when the
+already seen in :ref:`NotationsWithBinders`, even when the
corresponding expression is not used as a binder in the right-hand
side. E.g.:
@@ -690,10 +694,14 @@ side. E.g.:
Summary
~~~~~~~
-**Syntax of notations**
+.. _NotationSyntax:
+
+Syntax of notations
++++++++++++++++++++
The different syntactic variants of the command Notation are given on the
-following figure. The optional :token:`scope` is described in the Section 12.2.
+following figure. The optional :production:`scope` is described in
+:ref:`Scopes`.
.. productionlist:: coq
notation : [Local] Notation `string` := `term` [`modifiers`] [: `scope`].
@@ -743,14 +751,15 @@ following figure. The optional :token:`scope` is described in the Section 12.2.
given to some notation, say ``"{ y } & { z }"`` in fact applies to the
underlying ``"{ x }"``\-free rule which is ``"y & z"``).
-**Persistence of notations**
+Persistence of notations
+++++++++++++++++++++++++
Notations do not survive the end of sections.
.. cmd:: Local Notation @notation
Notations survive modules unless the command ``Local Notation`` is used instead
- of ``Notation``.
+ of :cmd:`Notation`.
.. _Scopes:
@@ -762,13 +771,13 @@ interpretation. Interpretation scopes provide a weak, purely
syntactical form of notations overloading: the same notation, for
instance the infix symbol ``+`` can be used to denote distinct
definitions of the additive operator. Depending on which interpretation
-scopes is currently open, the interpretation is different.
+scopes are currently open, the interpretation is different.
Interpretation scopes can include an interpretation for numerals and
strings. However, this is only made possible at the Objective Caml
level.
-See Figure 12.1 for the syntax of notations including the possibility
-to declare them in a given scope. Here is a typical example which
+See :ref:`above <NotationSyntax>` for the syntax of notations including the
+possibility to declare them in a given scope. Here is a typical example which
declares the notation for conjunction in the scope ``type_scope``.
.. coqdoc::
@@ -892,27 +901,27 @@ Binding arguments of a constant to an interpretation scope
turn to be themselves arguments of a reference are interpreted
accordingly to the arguments scopes bound to this reference.
-.. cmdv:: Arguments @qualid : clear scopes
+ .. cmdv:: Arguments @qualid : clear scopes
- Arguments scopes can be cleared with :n:`Arguments @qualid : clear scopes`.
+ Arguments scopes can be cleared with :n:`Arguments @qualid : clear scopes`.
-.. cmdv:: Arguments @qualid {+ @name%scope} : extra scopes
+ .. cmdv:: Arguments @qualid {+ @name%scope} : extra scopes
- Defines extra argument scopes, to be used in case of coercion to Funclass
- (see Chapter :ref:`implicitcoercions`) or with a computed type.
+ Defines extra argument scopes, to be used in case of coercion to ``Funclass``
+ (see the :ref:`implicitcoercions` chapter) or with a computed type.
-.. cmdv:: Global Arguments @qualid {+ @name%@scope}
+ .. cmdv:: Global Arguments @qualid {+ @name%@scope}
- This behaves like :n:`Arguments qualid {+ @name%@scope}` but survives when a
- section is closed instead of stopping working at section closing. Without the
- ``Global`` modifier, the effect of the command stops when the section it belongs
- to ends.
+ This behaves like :n:`Arguments qualid {+ @name%@scope}` but survives when a
+ section is closed instead of stopping working at section closing. Without the
+ ``Global`` modifier, the effect of the command stops when the section it belongs
+ to ends.
-.. cmdv:: Local Arguments @qualid {+ @name%@scope}
+ .. cmdv:: Local Arguments @qualid {+ @name%@scope}
- This behaves like :n:`Arguments @qualid {+ @name%@scope}` but does not
- survive modules and files. Without the ``Local`` modifier, the effect of the
- command is visible from within other modules or files.
+ This behaves like :n:`Arguments @qualid {+ @name%@scope}` but does not
+ survive modules and files. Without the ``Local`` modifier, the effect of the
+ command is visible from within other modules or files.
.. seealso::
@@ -947,18 +956,18 @@ Binding types of arguments to an interpretation scope
When an interpretation scope is naturally associated to a type (e.g. the
scope of operations on the natural numbers), it may be convenient to bind it
- to this type. When a scope ``scope`` is bound to a type type, any new function
- defined later on gets its arguments of type type interpreted by default in
+ to this type. When a scope ``scope`` is bound to a type ``type``, any new function
+ defined later on gets its arguments of type ``type`` interpreted by default in
scope scope (this default behavior can however be overwritten by explicitly
- using the command ``Arguments``).
+ using the command :cmd:`Arguments`).
Whether the argument of a function has some type ``type`` is determined
- statically. For instance, if f is a polymorphic function of type :g:`forall
- X:Type, X -> X` and type :g:`t` is bound to a scope ``scope``, then :g:`a` of
- type :g:`t` in :g:`f t a` is not recognized as an argument to be interpreted
- in scope ``scope``.
+ statically. For instance, if ``f`` is a polymorphic function of type
+ :g:`forall X:Type, X -> X` and type :g:`t` is bound to a scope ``scope``,
+ then :g:`a` of type :g:`t` in :g:`f t a` is not recognized as an argument to
+ be interpreted in scope ``scope``.
- More generally, any coercion :n:`@class` (see Chapter :ref:`implicitcoercions`)
+ More generally, any coercion :n:`@class` (see the :ref:`implicitcoercions` chapter)
can be bound to an interpretation scope. The command to do it is
:n:`Bind Scope @scope with @class`
@@ -980,12 +989,6 @@ Binding types of arguments to an interpretation scope
.. note:: The scopes ``type_scope`` and ``function_scope`` also have a local
effect on interpretation. See the next section.
-.. seealso::
-
- :cmd:`About`
- The command to show the scopes bound to the arguments of a
- function is described in Section 2.
-
The ``type_scope`` interpretation scope
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -996,7 +999,7 @@ scope which is temporarily activated each time a subterm of an expression is
expected to be a type. It is delimited by the key ``type``, and bound to the
coercion class ``Sortclass``. It is also used in certain situations where an
expression is statically known to be a type, including the conclusion and the
-type of hypotheses within an Ltac goal match (see Section
+type of hypotheses within an Ltac goal match (see
:ref:`ltac-match-goal`), the statement of a theorem, the type of a definition,
the type of a binder, the domain and codomain of implication, the codomain of
products, and more generally any type argument of a declared or defined
@@ -1017,8 +1020,8 @@ Interpretation scopes used in the standard library of Coq
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We give an overview of the scopes used in the standard library of Coq.
-For a complete list of notations in each scope, use the commands Print
-Scopes or Print Scope scope.
+For a complete list of notations in each scope, use the commands :cmd:`Print
+Scopes` or :cmd:`Print Scope`.
``type_scope``
This scope includes infix * for product types and infix + for sum types. It
@@ -1084,7 +1087,7 @@ Scopes or Print Scope scope.
``string_scope``
This scope includes notation for strings as elements of the type string.
Special characters and escaping follow Coq conventions on strings (see
- Section 1.1). Especially, there is no convention to visualize non
+ :ref:`lexical-conventions`). Especially, there is no convention to visualize non
printable characters of a string. The file :file:`String.v` shows an example
that contains quotes, a newline and a beep (i.e. the ASCII character
of code 7).
@@ -1093,8 +1096,8 @@ Scopes or Print Scope scope.
This scope includes interpretation for all strings of the form ``"c"``
where :g:`c` is an ASCII character, or of the form ``"nnn"`` where nnn is
a three-digits number (possibly with leading 0's), or of the form
- ``""""``. Their respective denotations are the ASCII code of c, the
- decimal ASCII code nnn, or the ascii code of the character ``"`` (i.e.
+ ``""""``. Their respective denotations are the ASCII code of :g:`c`, the
+ decimal ASCII code ``nnn``, or the ascii code of the character ``"`` (i.e.
the ASCII code 34), all of them being represented in the type :g:`ascii`.
@@ -1109,25 +1112,26 @@ Displaying informations about scopes
by the same notation in a more recently open scope are not displayed.
Hence each notation is displayed only once.
-.. cmdv:: Print Visibility scope
-
- This displays the current stack of notations in scopes and lonely
- notations assuming that scope is pushed on top of the stack. This is
- useful to know how a subterm locally occurring in the scope ofscope is
- interpreted.
-
-.. cmdv:: Print Scope scope
+ .. cmdv:: Print Visibility @scope
- This displays all the notations defined in interpretation scopescope.
- It also displays the delimiting key if any and the class to which the
- scope is bound, if any.
+ This displays the current stack of notations in scopes and lonely
+ notations assuming that :token:`scope` is pushed on top of the stack. This is
+ useful to know how a subterm locally occurring in the scope :token:`scope` is
+ interpreted.
-.. cmdv:: Print Scopes
+.. cmd:: Print Scopes
This displays all the notations, delimiting keys and corresponding
class of all the existing interpretation scopes. It also displays the
lonely notations.
+ .. cmdv:: Print Scope @scope
+ :name: Print Scope
+
+ This displays all the notations defined in interpretation scope :token:`scope`.
+ It also displays the delimiting key if any and the class to which the
+ scope is bound, if any.
+
.. _Abbreviations:
Abbreviations
@@ -1324,7 +1328,7 @@ tactic language. Tactic notations obey the following syntax:
.. [#and_or_levels] which are the levels effectively chosen in the current
implementation of Coq
-.. [#no_associativity] Coq accepts notations declared as no associative but the parser on
+.. [#no_associativity] Coq accepts notations declared as ``no associative`` but the parser on
which Coq is built, namely Camlp4, currently does not implement the
- no-associativity and replaces it by a left associativity; hence it is
- the same for Coq: no-associativity is in fact left associativity
+ ``no associativity`` and replaces it by a ``left associativity``; hence it is
+ the same for Coq: ``no associativity`` is in fact ``left associativity``.
diff --git a/engine/evarutil.ml b/engine/evarutil.ml
index 0c044f20d..b77bf55d8 100644
--- a/engine/evarutil.ml
+++ b/engine/evarutil.ml
@@ -495,12 +495,12 @@ let e_new_type_evar env evdref ?src ?filter ?naming ?principal ?hypnaming rigid
evdref := evd;
c
-let new_Type ?(rigid=Evd.univ_flexible) env evd =
+let new_Type ?(rigid=Evd.univ_flexible) evd =
let open EConstr in
let (evd, s) = new_sort_variable rigid evd in
(evd, mkSort s)
-let e_new_Type ?(rigid=Evd.univ_flexible) env evdref =
+let e_new_Type ?(rigid=Evd.univ_flexible) evdref =
let evd', s = new_sort_variable rigid !evdref in
evdref := evd'; EConstr.mkSort s
diff --git a/engine/evarutil.mli b/engine/evarutil.mli
index 135aa73fc..0ad323ac4 100644
--- a/engine/evarutil.mli
+++ b/engine/evarutil.mli
@@ -63,7 +63,7 @@ val new_type_evar :
env -> evar_map -> rigid ->
evar_map * (constr * Sorts.t)
-val new_Type : ?rigid:rigid -> env -> evar_map -> evar_map * constr
+val new_Type : ?rigid:rigid -> evar_map -> evar_map * constr
(** Polymorphic constants *)
@@ -287,7 +287,7 @@ val e_new_type_evar : env -> evar_map ref ->
?principal:bool -> ?hypnaming:naming_mode -> rigid -> constr * Sorts.t
[@@ocaml.deprecated "Use [Evarutil.new_type_evar]"]
-val e_new_Type : ?rigid:rigid -> env -> evar_map ref -> constr
+val e_new_Type : ?rigid:rigid -> evar_map ref -> constr
[@@ocaml.deprecated "Use [Evarutil.new_Type]"]
val e_new_global : evar_map ref -> GlobRef.t -> constr
diff --git a/engine/evd.ml b/engine/evd.ml
index 761ae7983..d1c7fef73 100644
--- a/engine/evd.ml
+++ b/engine/evd.ml
@@ -805,8 +805,8 @@ let make_flexible_variable evd ~algebraic u =
(* Operations on constants *)
(****************************************)
-let fresh_sort_in_family ?loc ?(rigid=univ_flexible) env evd s =
- with_context_set ?loc rigid evd (UnivGen.fresh_sort_in_family env s)
+let fresh_sort_in_family ?loc ?(rigid=univ_flexible) evd s =
+ with_context_set ?loc rigid evd (UnivGen.fresh_sort_in_family s)
let fresh_constant_instance ?loc env evd c =
with_context_set ?loc univ_flexible evd (UnivGen.fresh_constant_instance env c)
@@ -820,8 +820,6 @@ let fresh_constructor_instance ?loc env evd c =
let fresh_global ?loc ?(rigid=univ_flexible) ?names env evd gr =
with_context_set ?loc rigid evd (UnivGen.fresh_global_instance ?names env gr)
-let whd_sort_variable evd t = t
-
let is_sort_variable evd s = UState.is_sort_variable evd.universes s
let is_flexible_level evd l =
diff --git a/engine/evd.mli b/engine/evd.mli
index d638bb2d3..db2bd4eed 100644
--- a/engine/evd.mli
+++ b/engine/evd.mli
@@ -340,8 +340,6 @@ val shelve_on_future_goals : Evar.t list -> future_goals -> future_goals
Evar maps also keep track of the universe constraints defined at a given
point. This section defines the relevant manipulation functions. *)
-val whd_sort_variable : evar_map -> econstr -> econstr
-
exception UniversesDiffer
val add_universe_constraints : evar_map -> UnivProblem.Set.t -> evar_map
@@ -598,7 +596,7 @@ val update_sigma_env : evar_map -> env -> evar_map
(** Polymorphic universes *)
-val fresh_sort_in_family : ?loc:Loc.t -> ?rigid:rigid -> env -> evar_map -> Sorts.family -> evar_map * Sorts.t
+val fresh_sort_in_family : ?loc:Loc.t -> ?rigid:rigid -> evar_map -> Sorts.family -> evar_map * Sorts.t
val fresh_constant_instance : ?loc:Loc.t -> env -> evar_map -> Constant.t -> evar_map * pconstant
val fresh_inductive_instance : ?loc:Loc.t -> env -> evar_map -> inductive -> evar_map * pinductive
val fresh_constructor_instance : ?loc:Loc.t -> env -> evar_map -> constructor -> evar_map * pconstructor
diff --git a/engine/uState.ml b/engine/uState.ml
index 81ab3dd66..0791e4c27 100644
--- a/engine/uState.ml
+++ b/engine/uState.ml
@@ -583,7 +583,7 @@ let refresh_constraints univs (ctx, cstrs) =
in ((ctx, cstrs'), univs')
let normalize_variables uctx =
- let normalized_variables, undef, def, subst =
+ let normalized_variables, def, subst =
UnivSubst.normalize_univ_variables uctx.uctx_univ_variables
in
let ctx_local = subst_univs_context_with_def def (Univ.make_subst subst) uctx.uctx_local in
diff --git a/engine/univGen.ml b/engine/univGen.ml
index 796a1bcc1..b07d4848f 100644
--- a/engine/univGen.ml
+++ b/engine/univGen.ml
@@ -215,7 +215,7 @@ let type_of_reference env r =
let type_of_global t = type_of_reference (Global.env ()) t
-let fresh_sort_in_family env = function
+let fresh_sort_in_family = function
| InProp -> Sorts.prop, ContextSet.empty
| InSet -> Sorts.set, ContextSet.empty
| InType ->
@@ -223,7 +223,7 @@ let fresh_sort_in_family env = function
Type (Univ.Universe.make u), ContextSet.singleton u
let new_sort_in_family sf =
- fst (fresh_sort_in_family (Global.env ()) sf)
+ fst (fresh_sort_in_family sf)
let extend_context (a, ctx) (ctx') =
(a, ContextSet.union ctx ctx')
diff --git a/engine/univGen.mli b/engine/univGen.mli
index 8169dbda4..439424934 100644
--- a/engine/univGen.mli
+++ b/engine/univGen.mli
@@ -39,7 +39,7 @@ val fresh_instance_from_context : AUContext.t ->
val fresh_instance_from : AUContext.t -> Instance.t option ->
Instance.t in_universe_context_set
-val fresh_sort_in_family : env -> Sorts.family ->
+val fresh_sort_in_family : Sorts.family ->
Sorts.t in_universe_context_set
val fresh_constant_instance : env -> Constant.t ->
pconstant in_universe_context_set
diff --git a/engine/univSubst.ml b/engine/univSubst.ml
index 6a433d9fb..2f59a3fa8 100644
--- a/engine/univSubst.ml
+++ b/engine/univSubst.ml
@@ -162,13 +162,13 @@ let subst_opt_univs_constr s =
let normalize_univ_variables ctx =
let ctx = normalize_opt_subst ctx in
- let undef, def, subst =
- Univ.LMap.fold (fun u v (undef, def, subst) ->
+ let def, subst =
+ Univ.LMap.fold (fun u v (def, subst) ->
match v with
- | None -> (Univ.LSet.add u undef, def, subst)
- | Some b -> (undef, Univ.LSet.add u def, Univ.LMap.add u b subst))
- ctx (Univ.LSet.empty, Univ.LSet.empty, Univ.LMap.empty)
- in ctx, undef, def, subst
+ | None -> (def, subst)
+ | Some b -> (Univ.LSet.add u def, Univ.LMap.add u b subst))
+ ctx (Univ.LSet.empty, Univ.LMap.empty)
+ in ctx, def, subst
let pr_universe_body = function
| None -> mt ()
diff --git a/engine/univSubst.mli b/engine/univSubst.mli
index 26e8d1db9..e76d25333 100644
--- a/engine/univSubst.mli
+++ b/engine/univSubst.mli
@@ -23,7 +23,7 @@ val make_opt_subst : universe_opt_subst -> universe_subst_fn
val subst_opt_univs_constr : universe_opt_subst -> constr -> constr
val normalize_univ_variables : universe_opt_subst ->
- universe_opt_subst * LSet.t * LSet.t * universe_subst
+ universe_opt_subst * LSet.t * universe_subst
val normalize_univ_variable :
find:(Level.t -> Universe.t) ->
diff --git a/engine/universes.mli b/engine/universes.mli
index 29673de1e..ad937471e 100644
--- a/engine/universes.mli
+++ b/engine/universes.mli
@@ -86,7 +86,7 @@ val fresh_instance_from : AUContext.t -> Instance.t option ->
Instance.t in_universe_context_set
[@@ocaml.deprecated "Use [UnivGen.fresh_instance_from]"]
-val fresh_sort_in_family : env -> Sorts.family ->
+val fresh_sort_in_family : Sorts.family ->
Sorts.t in_universe_context_set
[@@ocaml.deprecated "Use [UnivGen.fresh_sort_in_family]"]
@@ -154,7 +154,7 @@ val subst_opt_univs_constr : universe_opt_subst -> constr -> constr
[@@ocaml.deprecated "Use [UnivSubst.subst_opt_univs_constr]"]
val normalize_univ_variables : universe_opt_subst ->
- universe_opt_subst * LSet.t * LSet.t * universe_subst
+ universe_opt_subst * LSet.t * universe_subst
[@@ocaml.deprecated "Use [UnivSubst.normalize_univ_variables]"]
val normalize_univ_variable :
diff --git a/grammar/coqpp_ast.mli b/grammar/coqpp_ast.mli
new file mode 100644
index 000000000..39b4d2ab3
--- /dev/null
+++ b/grammar/coqpp_ast.mli
@@ -0,0 +1,95 @@
+(************************************************************************)
+(* v * The Coq Proof Assistant / The Coq Development Team *)
+(* <O___,, * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999-2017 *)
+(* \VV/ **************************************************************)
+(* // * This file is distributed under the terms of the *)
+(* * GNU Lesser General Public License Version 2.1 *)
+(************************************************************************)
+
+type loc = {
+ loc_start : Lexing.position;
+ loc_end : Lexing.position;
+}
+
+type code = { code : string }
+
+type user_symbol =
+| Ulist1 of user_symbol
+| Ulist1sep of user_symbol * string
+| Ulist0 of user_symbol
+| Ulist0sep of user_symbol * string
+| Uopt of user_symbol
+| Uentry of string
+| Uentryl of string * int
+
+type token_data =
+| TokNone
+| TokName of string
+
+type ext_token =
+| ExtTerminal of string
+| ExtNonTerminal of user_symbol * token_data
+
+type tactic_rule = {
+ tac_toks : ext_token list;
+ tac_body : code;
+}
+
+type level = string
+
+type position =
+| First
+| Last
+| Before of level
+| After of level
+| Level of level
+
+type assoc =
+| LeftA
+| RightA
+| NonA
+
+type gram_symbol =
+| GSymbString of string
+| GSymbQualid of string * level option
+| GSymbParen of gram_symbol list
+| GSymbProd of gram_prod list
+
+and gram_prod = {
+ gprod_symbs : (string option * gram_symbol list) list;
+ gprod_body : code;
+}
+
+type gram_rule = {
+ grule_label : string option;
+ grule_assoc : assoc option;
+ grule_prods : gram_prod list;
+}
+
+type grammar_entry = {
+ gentry_name : string;
+ gentry_pos : position option;
+ gentry_rules : gram_rule list;
+}
+
+type grammar_ext = {
+ gramext_name : string;
+ gramext_globals : string list;
+ gramext_entries : grammar_entry list;
+}
+
+type tactic_ext = {
+ tacext_name : string;
+ tacext_level : int option;
+ tacext_rules : tactic_rule list;
+}
+
+type node =
+| Code of code
+| Comment of string
+| DeclarePlugin of string
+| GramExt of grammar_ext
+| VernacExt
+| TacticExt of tactic_ext
+
+type t = node list
diff --git a/grammar/coqpp_lex.mll b/grammar/coqpp_lex.mll
new file mode 100644
index 000000000..6c6562c20
--- /dev/null
+++ b/grammar/coqpp_lex.mll
@@ -0,0 +1,167 @@
+(************************************************************************)
+(* v * The Coq Proof Assistant / The Coq Development Team *)
+(* <O___,, * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999-2017 *)
+(* \VV/ **************************************************************)
+(* // * This file is distributed under the terms of the *)
+(* * GNU Lesser General Public License Version 2.1 *)
+(************************************************************************)
+
+{
+
+open Lexing
+open Coqpp_parse
+
+type mode =
+| OCaml
+| Extend
+
+exception Lex_error of Coqpp_ast.loc * string
+
+let loc lexbuf = {
+ Coqpp_ast.loc_start = lexeme_start_p lexbuf;
+ Coqpp_ast.loc_end = lexeme_end_p lexbuf;
+}
+
+let newline lexbuf =
+ let pos = lexbuf.lex_curr_p in
+ let pos = { pos with pos_lnum = pos.pos_lnum + 1; pos_bol = pos.pos_cnum } in
+ lexbuf.lex_curr_p <- pos
+
+let num_comments = ref 0
+let num_braces = ref 0
+
+let mode () = if !num_braces = 0 then Extend else OCaml
+
+let comment_buf = Buffer.create 512
+let ocaml_buf = Buffer.create 512
+let string_buf = Buffer.create 512
+
+let lex_error lexbuf msg =
+ raise (Lex_error (loc lexbuf, msg))
+
+let lex_unexpected_eof lexbuf where =
+ lex_error lexbuf (Printf.sprintf "Unexpected end of file in %s" where)
+
+let start_comment _ =
+ let () = incr num_comments in
+ Buffer.add_string comment_buf "(*"
+
+let end_comment lexbuf =
+ let () = decr num_comments in
+ let () = Buffer.add_string comment_buf "*)" in
+ if !num_comments < 0 then lex_error lexbuf "Unexpected end of comment"
+ else if !num_comments = 0 then
+ let s = Buffer.contents comment_buf in
+ let () = Buffer.reset comment_buf in
+ Some (COMMENT s)
+ else
+ None
+
+let start_ocaml _ =
+ let () = match mode () with
+ | OCaml -> Buffer.add_string ocaml_buf "{"
+ | Extend -> ()
+ in
+ incr num_braces
+
+let end_ocaml lexbuf =
+ let () = decr num_braces in
+ if !num_braces < 0 then lex_error lexbuf "Unexpected end of OCaml code"
+ else if !num_braces = 0 then
+ let s = Buffer.contents ocaml_buf in
+ let () = Buffer.reset ocaml_buf in
+ Some (CODE { Coqpp_ast.code = s })
+ else
+ let () = Buffer.add_string ocaml_buf "}" in
+ None
+
+}
+
+let letter = ['a'-'z' 'A'-'Z']
+let letterlike = ['_' 'a'-'z' 'A'-'Z']
+let alphanum = ['_' 'a'-'z' 'A'-'Z' '0'-'9' '\'']
+let ident = letterlike alphanum*
+let qualid = ident ('.' ident)*
+let space = [' ' '\t' '\r']
+let number = [ '0'-'9' ]
+
+rule extend = parse
+| "(*" { start_comment (); comment lexbuf }
+| "{" { start_ocaml (); ocaml lexbuf }
+| "GRAMMAR" { GRAMMAR }
+| "VERNAC" { VERNAC }
+| "TACTIC" { TACTIC }
+| "EXTEND" { EXTEND }
+| "END" { END }
+| "DECLARE" { DECLARE }
+| "PLUGIN" { PLUGIN }
+(** Camlp5 specific keywords *)
+| "GLOBAL" { GLOBAL }
+| "FIRST" { FIRST }
+| "LAST" { LAST }
+| "BEFORE" { BEFORE }
+| "AFTER" { AFTER }
+| "LEVEL" { LEVEL }
+| "LEFTA" { LEFTA }
+| "RIGHTA" { RIGHTA }
+| "NONA" { NONA }
+(** Standard *)
+| ident { IDENT (Lexing.lexeme lexbuf) }
+| qualid { QUALID (Lexing.lexeme lexbuf) }
+| number { INT (int_of_string (Lexing.lexeme lexbuf)) }
+| space { extend lexbuf }
+| '\"' { string lexbuf }
+| '\n' { newline lexbuf; extend lexbuf }
+| '[' { LBRACKET }
+| ']' { RBRACKET }
+| '|' { PIPE }
+| "->" { ARROW }
+| ',' { COMMA }
+| ':' { COLON }
+| ';' { SEMICOLON }
+| '(' { LPAREN }
+| ')' { RPAREN }
+| '=' { EQUAL }
+| _ { lex_error lexbuf "syntax error" }
+| eof { EOF }
+
+and ocaml = parse
+| "{" { start_ocaml (); ocaml lexbuf }
+| "}" { match end_ocaml lexbuf with Some tk -> tk | None -> ocaml lexbuf }
+| '\n' { newline lexbuf; Buffer.add_char ocaml_buf '\n'; ocaml lexbuf }
+| '\"' { Buffer.add_char ocaml_buf '\"'; ocaml_string lexbuf }
+| (_ as c) { Buffer.add_char ocaml_buf c; ocaml lexbuf }
+| eof { lex_unexpected_eof lexbuf "OCaml code" }
+
+and comment = parse
+| "*)" { match end_comment lexbuf with Some _ -> extend lexbuf | None -> comment lexbuf }
+| "(*" { start_comment lexbuf; comment lexbuf }
+| '\n' { newline lexbuf; Buffer.add_char comment_buf '\n'; comment lexbuf }
+| (_ as c) { Buffer.add_char comment_buf c; comment lexbuf }
+| eof { lex_unexpected_eof lexbuf "comment" }
+
+and string = parse
+| '\"'
+ {
+ let s = Buffer.contents string_buf in
+ let () = Buffer.reset string_buf in
+ STRING s
+ }
+| "\\\"" { Buffer.add_char string_buf '\"'; string lexbuf }
+| '\n' { newline lexbuf; Buffer.add_char string_buf '\n'; string lexbuf }
+| (_ as c) { Buffer.add_char string_buf c; string lexbuf }
+| eof { lex_unexpected_eof lexbuf "string" }
+
+and ocaml_string = parse
+| "\\\"" { Buffer.add_string ocaml_buf "\\\""; ocaml_string lexbuf }
+| '\"' { Buffer.add_char ocaml_buf '\"'; ocaml lexbuf }
+| (_ as c) { Buffer.add_char ocaml_buf c; ocaml_string lexbuf }
+| eof { lex_unexpected_eof lexbuf "OCaml string" }
+
+{
+
+let token lexbuf = match mode () with
+| OCaml -> ocaml lexbuf
+| Extend -> extend lexbuf
+
+}
diff --git a/grammar/coqpp_main.ml b/grammar/coqpp_main.ml
new file mode 100644
index 000000000..e76a1e9ed
--- /dev/null
+++ b/grammar/coqpp_main.ml
@@ -0,0 +1,349 @@
+(************************************************************************)
+(* v * The Coq Proof Assistant / The Coq Development Team *)
+(* <O___,, * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999-2017 *)
+(* \VV/ **************************************************************)
+(* // * This file is distributed under the terms of the *)
+(* * GNU Lesser General Public License Version 2.1 *)
+(************************************************************************)
+
+open Lexing
+open Coqpp_ast
+open Format
+
+let fatal msg =
+ let () = Format.eprintf "Error: %s@\n%!" msg in
+ exit 1
+
+let pr_loc loc =
+ let file = loc.loc_start.pos_fname in
+ let line = loc.loc_start.pos_lnum in
+ let bpos = loc.loc_start.pos_cnum - loc.loc_start.pos_bol in
+ let epos = loc.loc_end.pos_cnum - loc.loc_start.pos_bol in
+ Printf.sprintf "File \"%s\", line %d, characters %d-%d:" file line bpos epos
+
+let parse_file f =
+ let chan = open_in f in
+ let lexbuf = Lexing.from_channel chan in
+ let () = lexbuf.lex_curr_p <- { lexbuf.lex_curr_p with pos_fname = f } in
+ let ans =
+ try Coqpp_parse.file Coqpp_lex.token lexbuf
+ with
+ | Coqpp_lex.Lex_error (loc, msg) ->
+ let () = close_in chan in
+ let () = Printf.eprintf "%s\n%!" (pr_loc loc) in
+ fatal msg
+ | Parsing.Parse_error ->
+ let () = close_in chan in
+ let loc = Coqpp_lex.loc lexbuf in
+ let () = Printf.eprintf "%s\n%!" (pr_loc loc) in
+ fatal "syntax error"
+ in
+ let () = close_in chan in
+ ans
+
+module StringSet = Set.Make(String)
+
+let string_split s =
+ let len = String.length s in
+ let rec split n =
+ try
+ let pos = String.index_from s n '.' in
+ let dir = String.sub s n (pos-n) in
+ dir :: split (succ pos)
+ with
+ | Not_found -> [String.sub s n (len-n)]
+ in
+ if len == 0 then [] else split 0
+
+let plugin_name = "__coq_plugin_name"
+
+module GramExt :
+sig
+
+val print_ast : Format.formatter -> grammar_ext -> unit
+
+end =
+struct
+
+let is_uident s = match s.[0] with
+| 'A'..'Z' -> true
+| _ -> false
+
+let is_qualified = is_uident
+
+let get_local_entries ext =
+ let global = StringSet.of_list ext.gramext_globals in
+ let map e = e.gentry_name in
+ let entries = List.map map ext.gramext_entries in
+ let local = List.filter (fun e -> not (is_qualified e || StringSet.mem e global)) entries in
+ let rec uniquize seen = function
+ | [] -> []
+ | id :: rem ->
+ let rem = uniquize (StringSet.add id seen) rem in
+ if StringSet.mem id seen then rem else id :: rem
+ in
+ uniquize StringSet.empty local
+
+let print_local fmt ext =
+ let locals = get_local_entries ext in
+ match locals with
+ | [] -> ()
+ | e :: locals ->
+ let mk_e fmt e = fprintf fmt "%s.Entry.create \"%s\"" ext.gramext_name e in
+ let () = fprintf fmt "@[<hv 2>let %s =@ @[%a@]@]@ " e mk_e e in
+ let iter e = fprintf fmt "@[<hv 2>and %s =@ @[%a@]@]@ " e mk_e e in
+ let () = List.iter iter locals in
+ fprintf fmt "in@ "
+
+let print_string fmt s = fprintf fmt "\"%s\"" s
+
+let print_list fmt pr l =
+ let rec prl fmt = function
+ | [] -> ()
+ | [x] -> fprintf fmt "%a" pr x
+ | x :: l -> fprintf fmt "%a;@ %a" pr x prl l
+ in
+ fprintf fmt "@[<hv>[%a]@]" prl l
+
+let print_opt fmt pr = function
+| None -> fprintf fmt "None"
+| Some x -> fprintf fmt "Some@ (%a)" pr x
+
+let print_position fmt pos = match pos with
+| First -> fprintf fmt "Extend.First"
+| Last -> fprintf fmt "Extend.Last"
+| Before s -> fprintf fmt "Extend.Before@ \"%s\"" s
+| After s -> fprintf fmt "Extend.After@ \"%s\"" s
+| Level s -> fprintf fmt "Extend.Level@ \"%s\"" s
+
+let print_assoc fmt = function
+| LeftA -> fprintf fmt "Extend.LeftA"
+| RightA -> fprintf fmt "Extend.RightA"
+| NonA -> fprintf fmt "Extend.NonA"
+
+type symb =
+| SymbToken of string * string option
+| SymbEntry of string * string option
+| SymbSelf
+| SymbNext
+| SymbList0 of symb * symb option
+| SymbList1 of symb * symb option
+| SymbOpt of symb
+| SymbRules of ((string option * symb) list * code) list
+
+let is_token s = match string_split s with
+| [s] -> is_uident s
+| _ -> false
+
+let rec parse_tokens = function
+| [GSymbString s] -> SymbToken ("", Some s)
+| [GSymbQualid ("SELF", None)] -> SymbSelf
+| [GSymbQualid ("NEXT", None)] -> SymbNext
+| [GSymbQualid ("LIST0", None); tkn] ->
+ SymbList0 (parse_token tkn, None)
+| [GSymbQualid ("LIST1", None); tkn] ->
+ SymbList1 (parse_token tkn, None)
+| [GSymbQualid ("LIST0", None); tkn; GSymbQualid ("SEP", None); tkn'] ->
+ SymbList0 (parse_token tkn, Some (parse_token tkn'))
+| [GSymbQualid ("LIST1", None); tkn; GSymbQualid ("SEP", None); tkn'] ->
+ SymbList1 (parse_token tkn, Some (parse_token tkn'))
+| [GSymbQualid ("OPT", None); tkn] ->
+ SymbOpt (parse_token tkn)
+| [GSymbQualid (e, None)] when is_token e -> SymbToken (e, None)
+| [GSymbQualid (e, None); GSymbString s] when is_token e -> SymbToken (e, Some s)
+| [GSymbQualid (e, lvl)] when not (is_token e) -> SymbEntry (e, lvl)
+| [GSymbParen tkns] -> parse_tokens tkns
+| [GSymbProd prds] ->
+ let map p =
+ let map (pat, tkns) = (pat, parse_tokens tkns) in
+ (List.map map p.gprod_symbs, p.gprod_body)
+ in
+ SymbRules (List.map map prds)
+| t ->
+ let rec db_token = function
+ | GSymbString s -> Printf.sprintf "\"%s\"" s
+ | GSymbQualid (s, _) -> Printf.sprintf "%s" s
+ | GSymbParen s -> Printf.sprintf "(%s)" (db_tokens s)
+ | GSymbProd _ -> Printf.sprintf "[...]"
+ and db_tokens tkns =
+ let s = List.map db_token tkns in
+ String.concat " " s
+ in
+ fatal (Printf.sprintf "Invalid token: %s" (db_tokens t))
+
+and parse_token tkn = parse_tokens [tkn]
+
+let print_fun fmt (vars, body) =
+ let vars = List.rev vars in
+ let iter = function
+ | None -> fprintf fmt "_@ "
+ | Some id -> fprintf fmt "%s@ " id
+ in
+ let () = fprintf fmt "fun@ " in
+ let () = List.iter iter vars in
+ (** FIXME: use Coq locations in the macros *)
+ let () = fprintf fmt "loc ->@ @[%s@]" body.code in
+ ()
+
+(** Meta-program instead of calling Tok.of_pattern here because otherwise
+ violates value restriction *)
+let print_tok fmt = function
+| "", s -> fprintf fmt "Tok.KEYWORD %a" print_string s
+| "IDENT", s -> fprintf fmt "Tok.IDENT %a" print_string s
+| "PATTERNIDENT", s -> fprintf fmt "Tok.PATTERNIDENT %a" print_string s
+| "FIELD", s -> fprintf fmt "Tok.FIELD %a" print_string s
+| "INT", s -> fprintf fmt "Tok.INT %a" print_string s
+| "STRING", s -> fprintf fmt "Tok.STRING %a" print_string s
+| "LEFTQMARK", _ -> fprintf fmt "Tok.LEFTQMARK"
+| "BULLET", s -> fprintf fmt "Tok.BULLET %a" print_string s
+| "EOI", _ -> fprintf fmt "Tok.EOI"
+| _ -> failwith "Tok.of_pattern: not a constructor"
+
+let rec print_prod fmt p =
+ let (vars, tkns) = List.split p.gprod_symbs in
+ let f = (vars, p.gprod_body) in
+ let tkn = List.rev_map parse_tokens tkns in
+ fprintf fmt "@[Extend.Rule@ (@[%a@],@ @[(%a)@])@]" print_symbols tkn print_fun f
+
+and print_symbols fmt = function
+| [] -> fprintf fmt "Extend.Stop"
+| tkn :: tkns ->
+ fprintf fmt "Extend.Next @[(%a,@ %a)@]" print_symbols tkns print_symbol tkn
+
+and print_symbol fmt tkn = match tkn with
+| SymbToken (t, s) ->
+ let s = match s with None -> "" | Some s -> s in
+ fprintf fmt "(Extend.Atoken (%a))" print_tok (t, s)
+| SymbEntry (e, None) ->
+ fprintf fmt "(Extend.Aentry %s)" e
+| SymbEntry (e, Some l) ->
+ fprintf fmt "(Extend.Aentryl (%s, %a))" e print_string l
+| SymbSelf ->
+ fprintf fmt "Extend.Aself"
+| SymbNext ->
+ fprintf fmt "Extend.Anext"
+| SymbList0 (s, None) ->
+ fprintf fmt "(Extend.Alist0 %a)" print_symbol s
+| SymbList0 (s, Some sep) ->
+ fprintf fmt "(Extend.Alist0sep (%a, %a))" print_symbol s print_symbol sep
+| SymbList1 (s, None) ->
+ fprintf fmt "(Extend.Alist1 %a)" print_symbol s
+| SymbList1 (s, Some sep) ->
+ fprintf fmt "(Extend.Alist1sep (%a, %a))" print_symbol s print_symbol sep
+| SymbOpt s ->
+ fprintf fmt "(Extend.Aopt %a)" print_symbol s
+| SymbRules rules ->
+ let pr fmt (r, body) =
+ let (vars, tkn) = List.split r in
+ let tkn = List.rev tkn in
+ fprintf fmt "Extend.Rules @[({ Extend.norec_rule = %a },@ (%a))@]" print_symbols tkn print_fun (vars, body)
+ in
+ let pr fmt rules = print_list fmt pr rules in
+ fprintf fmt "(Extend.Arules %a)" pr (List.rev rules)
+
+let print_rule fmt r =
+ let pr_lvl fmt lvl = print_opt fmt print_string lvl in
+ let pr_asc fmt asc = print_opt fmt print_assoc asc in
+ let pr_prd fmt prd = print_list fmt print_prod prd in
+ fprintf fmt "@[(%a,@ %a,@ %a)@]" pr_lvl r.grule_label pr_asc r.grule_assoc pr_prd (List.rev r.grule_prods)
+
+let print_entry fmt gram e =
+ let print_position_opt fmt pos = print_opt fmt print_position pos in
+ let print_rules fmt rules = print_list fmt print_rule rules in
+ fprintf fmt "let () =@ @[%s.gram_extend@ %s@ @[(%a, %a)@]@]@ in@ "
+ gram e.gentry_name print_position_opt e.gentry_pos print_rules e.gentry_rules
+
+let print_ast fmt ext =
+ let () = fprintf fmt "let _ = @[" in
+ let () = fprintf fmt "@[<v>%a@]" print_local ext in
+ let () = List.iter (fun e -> print_entry fmt ext.gramext_name e) ext.gramext_entries in
+ let () = fprintf fmt "()@]@\n" in
+ ()
+
+end
+
+module TacticExt :
+sig
+
+val print_ast : Format.formatter -> tactic_ext -> unit
+
+end =
+struct
+
+let rec print_symbol fmt = function
+| Ulist1 s ->
+ fprintf fmt "@[Extend.TUlist1 (%a)@]" print_symbol s
+| Ulist1sep (s, sep) ->
+ fprintf fmt "@[Extend.TUlist1sep (%a, \"%s\")@]" print_symbol s sep
+| Ulist0 s ->
+ fprintf fmt "@[Extend.TUlist0 (%a)@]" print_symbol s
+| Ulist0sep (s, sep) ->
+ fprintf fmt "@[Extend.TUlist0sep (%a, \"%s\")@]" print_symbol s sep
+| Uopt s ->
+ fprintf fmt "@[Extend.TUopt (%a)@]" print_symbol s
+| Uentry e ->
+ fprintf fmt "@[Extend.TUentry (Genarg.get_arg_tag wit_%s)@]" e
+| Uentryl (e, l) ->
+ assert (e = "tactic");
+ fprintf fmt "@[Extend.TUentryl (Genarg.get_arg_tag wit_%s, %i)@]" e l
+
+let rec print_clause fmt = function
+| [] -> fprintf fmt "@[TyNil@]"
+| ExtTerminal s :: cl -> fprintf fmt "@[TyIdent (\"%s\", %a)@]" s print_clause cl
+| ExtNonTerminal (g, TokNone) :: cl ->
+ fprintf fmt "@[TyAnonArg (%a, %a)@]"
+ print_symbol g print_clause cl
+| ExtNonTerminal (g, TokName id) :: cl ->
+ fprintf fmt "@[TyArg (%a, \"%s\", %a)@]"
+ print_symbol g id print_clause cl
+
+let rec print_binders fmt = function
+| [] -> fprintf fmt "ist@ "
+| (ExtTerminal _ | ExtNonTerminal (_, TokNone)) :: rem -> print_binders fmt rem
+| (ExtNonTerminal (_, TokName id)) :: rem ->
+ fprintf fmt "%s@ %a" id print_binders rem
+
+let print_rule fmt r =
+ fprintf fmt "@[TyML (%a, @[fun %a -> %s@])@]"
+ print_clause r.tac_toks print_binders r.tac_toks r.tac_body.code
+
+let rec print_rules fmt = function
+| [] -> ()
+| [r] -> fprintf fmt "(%a)@\n" print_rule r
+| r :: rem -> fprintf fmt "(%a);@\n%a" print_rule r print_rules rem
+
+let print_rules fmt rules =
+ fprintf fmt "Tacentries.([@[<v>%a@]])" print_rules rules
+
+let print_ast fmt ext =
+ let pr fmt () =
+ let level = match ext.tacext_level with None -> 0 | Some i -> i in
+ fprintf fmt "Tacentries.tactic_extend %s \"%s\" ~level:%i %a"
+ plugin_name ext.tacext_name level print_rules ext.tacext_rules
+ in
+ let () = fprintf fmt "let () = @[%a@]\n" pr () in
+ ()
+
+end
+
+let pr_ast fmt = function
+| Code s -> fprintf fmt "%s@\n" s.code
+| Comment s -> fprintf fmt "%s@\n" s
+| DeclarePlugin name -> fprintf fmt "let %s = \"%s\"@\n" plugin_name name
+| GramExt gram -> fprintf fmt "%a@\n" GramExt.print_ast gram
+| VernacExt -> fprintf fmt "VERNACEXT@\n"
+| TacticExt tac -> fprintf fmt "%a@\n" TacticExt.print_ast tac
+
+let () =
+ let () =
+ if Array.length Sys.argv <> 2 then fatal "Expected exactly one command line argument"
+ in
+ let file = Sys.argv.(1) in
+ let output = Filename.chop_extension file ^ ".ml" in
+ let ast = parse_file file in
+ let chan = open_out output in
+ let fmt = formatter_of_out_channel chan in
+ let iter ast = Format.fprintf fmt "@[%a@]%!"pr_ast ast in
+ let () = List.iter iter ast in
+ let () = close_out chan in
+ exit 0
diff --git a/grammar/coqpp_parse.mly b/grammar/coqpp_parse.mly
new file mode 100644
index 000000000..baafd633c
--- /dev/null
+++ b/grammar/coqpp_parse.mly
@@ -0,0 +1,256 @@
+/************************************************************************/
+/* v * The Coq Proof Assistant / The Coq Development Team */
+/* <O___,, * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999-2017 */
+/* \VV/ **************************************************************/
+/* // * This file is distributed under the terms of the */
+/* * GNU Lesser General Public License Version 2.1 */
+/************************************************************************/
+
+%{
+
+open Coqpp_ast
+
+let starts s pat =
+ let len = String.length s in
+ let patlen = String.length pat in
+ if patlen <= len && String.sub s 0 patlen = pat then
+ Some (String.sub s patlen (len - patlen))
+ else None
+
+let ends s pat =
+ let len = String.length s in
+ let patlen = String.length pat in
+ if patlen <= len && String.sub s (len - patlen) patlen = pat then
+ Some (String.sub s 0 (len - patlen))
+ else None
+
+let between s pat1 pat2 = match starts s pat1 with
+| None -> None
+| Some s -> ends s pat2
+
+let without_sep k sep r =
+ if sep <> "" then raise Parsing.Parse_error else k r
+
+let parse_user_entry s sep =
+ let table = [
+ "ne_", "_list", without_sep (fun r -> Ulist1 r);
+ "ne_", "_list_sep", (fun sep r -> Ulist1sep (r, sep));
+ "", "_list", without_sep (fun r -> Ulist0 r);
+ "", "_list_sep", (fun sep r -> Ulist0sep (r, sep));
+ "", "_opt", without_sep (fun r -> Uopt r);
+ ] in
+ let rec parse s sep = function
+ | [] ->
+ let () = without_sep ignore sep () in
+ begin match starts s "tactic" with
+ | Some ("0"|"1"|"2"|"3"|"4"|"5") -> Uentryl ("tactic", int_of_string s)
+ | Some _ | None -> Uentry s
+ end
+ | (pat1, pat2, k) :: rem ->
+ match between s pat1 pat2 with
+ | None -> parse s sep rem
+ | Some s ->
+ let r = parse s "" table in
+ k sep r
+ in
+ parse s sep table
+
+%}
+
+%token <Coqpp_ast.code> CODE
+%token <string> COMMENT
+%token <string> IDENT QUALID
+%token <string> STRING
+%token <int> INT
+%token VERNAC TACTIC GRAMMAR EXTEND END DECLARE PLUGIN
+%token LBRACKET RBRACKET PIPE ARROW COMMA EQUAL
+%token LPAREN RPAREN COLON SEMICOLON
+%token GLOBAL FIRST LAST BEFORE AFTER LEVEL LEFTA RIGHTA NONA
+%token EOF
+
+%type <Coqpp_ast.t> file
+%start file
+
+%%
+
+file:
+| nodes EOF
+ { $1 }
+;
+
+nodes:
+|
+ { [] }
+| node nodes
+ { $1 :: $2 }
+;
+
+node:
+| CODE { Code $1 }
+| COMMENT { Comment $1 }
+| declare_plugin { $1 }
+| grammar_extend { $1 }
+| vernac_extend { $1 }
+| tactic_extend { $1 }
+;
+
+declare_plugin:
+| DECLARE PLUGIN STRING { DeclarePlugin $3 }
+;
+
+grammar_extend:
+| GRAMMAR EXTEND qualid_or_ident globals gram_entries END
+ { GramExt { gramext_name = $3; gramext_globals = $4; gramext_entries = $5 } }
+;
+
+vernac_extend:
+| VERNAC EXTEND IDENT END { VernacExt }
+;
+
+tactic_extend:
+| TACTIC EXTEND IDENT tactic_level tactic_rules END
+ { TacticExt { tacext_name = $3; tacext_level = $4; tacext_rules = $5 } }
+;
+
+tactic_level:
+| { None }
+| LEVEL INT { Some $2 }
+;
+
+tactic_rules:
+| tactic_rule { [$1] }
+| tactic_rule tactic_rules { $1 :: $2 }
+;
+
+tactic_rule:
+| PIPE LBRACKET ext_tokens RBRACKET ARROW CODE
+ { { tac_toks = $3; tac_body = $6 } }
+;
+
+ext_tokens:
+| { [] }
+| ext_token ext_tokens { $1 :: $2 }
+;
+
+ext_token:
+| STRING { ExtTerminal $1 }
+| IDENT {
+ let e = parse_user_entry $1 "" in
+ ExtNonTerminal (e, TokNone)
+ }
+| IDENT LPAREN IDENT RPAREN {
+ let e = parse_user_entry $1 "" in
+ ExtNonTerminal (e, TokName $3)
+ }
+| IDENT LPAREN IDENT COMMA STRING RPAREN {
+ let e = parse_user_entry $1 $5 in
+ ExtNonTerminal (e, TokName $3)
+}
+;
+
+qualid_or_ident:
+| QUALID { $1 }
+| IDENT { $1 }
+;
+
+globals:
+| { [] }
+| GLOBAL COLON idents SEMICOLON { $3 }
+;
+
+idents:
+| { [] }
+| qualid_or_ident idents { $1 :: $2 }
+;
+
+gram_entries:
+| { [] }
+| gram_entry gram_entries { $1 :: $2 }
+;
+
+gram_entry:
+| qualid_or_ident COLON position_opt LBRACKET levels RBRACKET SEMICOLON
+ { { gentry_name = $1; gentry_pos = $3; gentry_rules = $5; } }
+;
+
+position_opt:
+| { None }
+| position { Some $1 }
+;
+
+position:
+| FIRST { First }
+| LAST { Last }
+| BEFORE STRING { Before $2 }
+| AFTER STRING { After $2 }
+| LEVEL STRING { Level $2 }
+;
+
+string_opt:
+| { None }
+| STRING { Some $1 }
+;
+
+assoc_opt:
+| { None }
+| assoc { Some $1 }
+;
+
+assoc:
+| LEFTA { LeftA }
+| RIGHTA { RightA }
+| NONA { NonA }
+;
+
+levels:
+| level { [$1] }
+| level PIPE levels { $1 :: $3 }
+;
+
+level:
+| string_opt assoc_opt LBRACKET rules_opt RBRACKET
+ { { grule_label = $1; grule_assoc = $2; grule_prods = $4; } }
+;
+
+rules_opt:
+| { [] }
+| rules { $1 }
+;
+
+rules:
+| rule { [$1] }
+| rule PIPE rules { $1 :: $3 }
+;
+
+rule:
+| symbols_opt ARROW CODE
+ { { gprod_symbs = $1; gprod_body = $3; } }
+;
+
+symbols_opt:
+| { [] }
+| symbols { $1 }
+;
+
+symbols:
+| symbol { [$1] }
+| symbol SEMICOLON symbols { $1 :: $3 }
+;
+
+symbol:
+| IDENT EQUAL gram_tokens { (Some $1, $3) }
+| gram_tokens { (None, $1) }
+;
+
+gram_token:
+| qualid_or_ident { GSymbQualid ($1, None) }
+| qualid_or_ident LEVEL STRING { GSymbQualid ($1, Some $3) }
+| LPAREN gram_tokens RPAREN { GSymbParen $2 }
+| LBRACKET rules RBRACKET { GSymbProd $2 }
+| STRING { GSymbString $1 }
+;
+
+gram_tokens:
+| gram_token { [$1] }
+| gram_token gram_tokens { $1 :: $2 }
+;
diff --git a/grammar/q_util.mlp b/grammar/q_util.mlp
index 6cdd2ec19..0b8d7fda7 100644
--- a/grammar/q_util.mlp
+++ b/grammar/q_util.mlp
@@ -75,7 +75,7 @@ let rec mlexpr_of_prod_entry_key f = function
(** Keep in sync with Pcoq! *)
assert (e = "tactic");
if l = 5 then <:expr< Extend.Aentry Pltac.binder_tactic >>
- else <:expr< Extend.Aentryl (Pltac.tactic_expr) $mlexpr_of_int l$ >>
+ else <:expr< Extend.Aentryl (Pltac.tactic_expr) $mlexpr_of_string (string_of_int l)$ >>
let rec type_of_user_symbol = function
| Ulist1 s | Ulist1sep (s, _) | Ulist0 s | Ulist0sep (s, _) ->
diff --git a/grammar/tacextend.mlp b/grammar/tacextend.mlp
index 525be6432..cea0bed59 100644
--- a/grammar/tacextend.mlp
+++ b/grammar/tacextend.mlp
@@ -15,11 +15,6 @@ open Argextend
let plugin_name = <:expr< __coq_plugin_name >>
-let mlexpr_of_ident id =
- (** Workaround for badly-designed generic arguments lacking a closure *)
- let id = "$" ^ id in
- <:expr< Names.Id.of_string_soft $str:id$ >>
-
let rec mlexpr_of_symbol = function
| Ulist1 s -> <:expr< Extend.TUlist1 $mlexpr_of_symbol s$ >>
| Ulist1sep (s,sep) -> <:expr< Extend.TUlist1sep $mlexpr_of_symbol s$ $str:sep$ >>
@@ -38,9 +33,9 @@ let rec mlexpr_of_clause = function
| [] -> <:expr< TyNil >>
| ExtTerminal s :: cl -> <:expr< TyIdent($str:s$, $mlexpr_of_clause cl$) >>
| ExtNonTerminal(g,None) :: cl ->
- <:expr< TyAnonArg(Loc.tag($mlexpr_of_symbol g$), $mlexpr_of_clause cl$) >>
+ <:expr< TyAnonArg($mlexpr_of_symbol g$, $mlexpr_of_clause cl$) >>
| ExtNonTerminal(g,Some id) :: cl ->
- <:expr< TyArg(Loc.tag($mlexpr_of_symbol g$, $mlexpr_of_ident id$), $mlexpr_of_clause cl$) >>
+ <:expr< TyArg($mlexpr_of_symbol g$, $mlexpr_of_string id$, $mlexpr_of_clause cl$) >>
let rec binders_of_clause e = function
| [] -> <:expr< fun ist -> $e$ >>
diff --git a/ide/idetop.ml b/ide/idetop.ml
index 7abbf239b..6fb004061 100644
--- a/ide/idetop.ml
+++ b/ide/idetop.ml
@@ -151,7 +151,7 @@ let hyp_next_tac sigma env decl =
("inversion clear "^id_s), ("inversion_clear "^id_s^".")
]
-let concl_next_tac sigma concl =
+let concl_next_tac =
let expand s = (s,s^".") in
List.map expand ([
"intro";
@@ -230,10 +230,9 @@ let hints () =
| [] -> None
| g :: _ ->
let env = Goal.V82.env sigma g in
- let hint_goal = concl_next_tac sigma g in
let get_hint_hyp env d accu = hyp_next_tac sigma env d :: accu in
let hint_hyps = List.rev (Environ.fold_named_context get_hint_hyp env ~init: []) in
- Some (hint_hyps, hint_goal)
+ Some (hint_hyps, concl_next_tac)
with Proof_global.NoCurrentProof -> None
diff --git a/kernel/cooking.ml b/kernel/cooking.ml
index 521a7d890..094609b96 100644
--- a/kernel/cooking.ml
+++ b/kernel/cooking.ml
@@ -204,7 +204,7 @@ let lift_univs cb subst auctx0 =
let auctx' = Univ.subst_univs_level_abstract_universe_context (Univ.make_instance_subst subst) auctx in
subst, (Polymorphic_const (AUContext.union auctx0 auctx'))
-let cook_constant ~hcons env { from = cb; info } =
+let cook_constant ~hcons { from = cb; info } =
let { Opaqueproof.modlist; abstract } = info in
let cache = RefTable.create 13 in
let abstract, usubst, abs_ctx = abstract in
diff --git a/kernel/cooking.mli b/kernel/cooking.mli
index 4c254d2ea..6ebe691b8 100644
--- a/kernel/cooking.mli
+++ b/kernel/cooking.mli
@@ -10,7 +10,6 @@
open Constr
open Declarations
-open Environ
(** {6 Cooking the constants. } *)
@@ -26,7 +25,7 @@ type result = {
cook_context : Constr.named_context option;
}
-val cook_constant : hcons:bool -> env -> recipe -> result
+val cook_constant : hcons:bool -> recipe -> result
val cook_constr : Opaqueproof.cooking_info -> constr -> constr
(** {6 Utility functions used in module [Discharge]. } *)
diff --git a/kernel/indtypes.ml b/kernel/indtypes.ml
index 61b71df31..5d45c2c1a 100644
--- a/kernel/indtypes.ml
+++ b/kernel/indtypes.ml
@@ -120,16 +120,6 @@ let mind_check_names mie =
(* Typing the arities and constructor types *)
-(* An inductive definition is a "unit" if it has only one constructor
- and that all arguments expected by this constructor are
- logical, this is the case for equality, conjunction of logical properties
-*)
-let is_unit constrsinfos =
- match constrsinfos with (* One info = One constructor *)
- | [level] -> is_type0m_univ level
- | [] -> (* type without constructors *) true
- | _ -> false
-
let infos_and_sort env t =
let rec aux env t max =
let t = whd_all env t in
@@ -174,10 +164,9 @@ let infer_constructor_packet env_ar_par params lc =
let lc'' = Array.map (fun j -> Term.it_mkProd_or_LetIn j.utj_val params) jlc in
(* compute the max of the sorts of the products of the constructors types *)
let levels = List.map (infos_and_sort env_ar_par) lc in
- let isunit = is_unit levels in
let min = if Array.length jlc > 1 then Universe.type0 else Universe.type0m in
let level = List.fold_left (fun max l -> Universe.sup max l) min levels in
- (lc'', (isunit, level))
+ (lc'', level)
(* If indices matter *)
let cumulate_arity_large_levels env sign =
@@ -354,7 +343,7 @@ let typecheck_inductive env mie =
(* Compute/check the sorts of the inductive types *)
let inds =
- Array.map (fun ((id,full_arity,sign,expltype,def_level,inf_level),cn,lc,(is_unit,clev)) ->
+ Array.map (fun ((id,full_arity,sign,expltype,def_level,inf_level),cn,lc,clev) ->
let infu =
(** Inferred level, with parameters and constructors. *)
match inf_level with
diff --git a/kernel/inductive.ml b/kernel/inductive.ml
index 584c1af03..88b00600e 100644
--- a/kernel/inductive.ml
+++ b/kernel/inductive.ml
@@ -785,7 +785,7 @@ let rec subterm_specif renv stack t =
| Lambda (x,a,b) ->
let () = assert (List.is_empty l) in
- let spec,stack' = extract_stack renv a stack in
+ let spec,stack' = extract_stack stack in
subterm_specif (push_var renv (x,a,spec)) stack' b
(* Metas and evars are considered OK *)
@@ -817,7 +817,7 @@ and stack_element_specif = function
|SClosure (h_renv,h) -> lazy_subterm_specif h_renv [] h
|SArg x -> x
-and extract_stack renv a = function
+and extract_stack = function
| [] -> Lazy.from_val Not_subterm , []
| h::t -> stack_element_specif h, t
@@ -848,7 +848,7 @@ let error_illegal_rec_call renv fx (arg_renv,arg) =
let error_partial_apply renv fx =
raise (FixGuardError (renv.env,NotEnoughArgumentsForFixCall fx))
-let filter_stack_domain env ci p stack =
+let filter_stack_domain env p stack =
let absctx, ar = dest_lam_assum env p in
(* Optimization: if the predicate is not dependent, no restriction is needed
and we avoid building the recargs tree. *)
@@ -933,7 +933,7 @@ let check_one_fix renv recpos trees def =
let case_spec = branches_specif renv
(lazy_subterm_specif renv [] c_0) ci in
let stack' = push_stack_closures renv l stack in
- let stack' = filter_stack_domain renv.env ci p stack' in
+ let stack' = filter_stack_domain renv.env p stack' in
Array.iteri (fun k br' ->
let stack_br = push_stack_args case_spec.(k) stack' in
check_rec_call renv stack_br br') lrest
@@ -976,7 +976,7 @@ let check_one_fix renv recpos trees def =
| Lambda (x,a,b) ->
let () = assert (List.is_empty l) in
check_rec_call renv [] a ;
- let spec, stack' = extract_stack renv a stack in
+ let spec, stack' = extract_stack stack in
check_rec_call (push_var renv (x,a,spec)) stack' b
| Prod (x,a,b) ->
diff --git a/kernel/modops.ml b/kernel/modops.ml
index 02bab581a..98a997311 100644
--- a/kernel/modops.ml
+++ b/kernel/modops.ml
@@ -265,7 +265,7 @@ let subst_structure subst = subst_structure subst do_delta_codom
(* spiwack: here comes the function which takes care of importing
the retroknowledge declared in the library *)
(* lclrk : retroknowledge_action list, rkaction : retroknowledge action *)
-let add_retroknowledge mp =
+let add_retroknowledge =
let perform rkaction env = match rkaction with
| Retroknowledge.RKRegister (f, e) when (isConst e || isInd e) ->
Environ.register env f e
@@ -309,7 +309,7 @@ and add_module mb linkinfo env =
let env = Environ.shallow_add_module mb env in
match mb.mod_type with
|NoFunctor struc ->
- add_retroknowledge mp mb.mod_retroknowledge
+ add_retroknowledge mb.mod_retroknowledge
(add_structure mp struc mb.mod_delta linkinfo env)
|MoreFunctor _ -> env
diff --git a/kernel/nativecode.ml b/kernel/nativecode.ml
index 1748e98a4..39f7de942 100644
--- a/kernel/nativecode.ml
+++ b/kernel/nativecode.ml
@@ -1649,15 +1649,15 @@ let pp_mllam fmt l =
and pp_letrec fmt defs =
let len = Array.length defs in
- let pp_one_rec i (fn, argsn, body) =
+ let pp_one_rec (fn, argsn, body) =
Format.fprintf fmt "%a%a =@\n %a"
pp_lname fn
pp_ldecls argsn pp_mllam body in
Format.fprintf fmt "@[let rec ";
- pp_one_rec 0 defs.(0);
+ pp_one_rec defs.(0);
for i = 1 to len - 1 do
Format.fprintf fmt "@\nand ";
- pp_one_rec i defs.(i)
+ pp_one_rec defs.(i)
done;
and pp_blam fmt l =
@@ -1941,7 +1941,7 @@ let is_code_loaded ~interactive name =
let param_name = Name (Id.of_string "params")
let arg_name = Name (Id.of_string "arg")
-let compile_mind prefix ~interactive mb mind stack =
+let compile_mind mb mind stack =
let u = Declareops.inductive_polymorphic_context mb in
(** Generate data for every block *)
let f i stack ob =
@@ -2020,7 +2020,7 @@ let compile_mind_deps env prefix ~interactive
then init
else
let comp_stack =
- compile_mind prefix ~interactive mib mind comp_stack
+ compile_mind mib mind comp_stack
in
let name =
if interactive then LinkedInteractive prefix
@@ -2092,9 +2092,9 @@ let compile_constant_field env prefix con acc cb =
in
gl@acc
-let compile_mind_field prefix mp l acc mb =
+let compile_mind_field mp l acc mb =
let mind = MutInd.make2 mp l in
- compile_mind prefix ~interactive:false mb mind acc
+ compile_mind mb mind acc
let mk_open s = Gopen s
diff --git a/kernel/nativecode.mli b/kernel/nativecode.mli
index 684983a87..96efa7faa 100644
--- a/kernel/nativecode.mli
+++ b/kernel/nativecode.mli
@@ -67,7 +67,7 @@ val register_native_file : string -> unit
val compile_constant_field : env -> string -> Constant.t ->
global list -> constant_body -> global list
-val compile_mind_field : string -> ModPath.t -> Label.t ->
+val compile_mind_field : ModPath.t -> Label.t ->
global list -> mutual_inductive_body -> global list
val mk_conv_code : env -> evars -> string -> constr -> constr -> linkable_code
diff --git a/kernel/nativelibrary.ml b/kernel/nativelibrary.ml
index 8bff43632..edce9367f 100644
--- a/kernel/nativelibrary.ml
+++ b/kernel/nativelibrary.ml
@@ -37,7 +37,7 @@ and translate_field prefix mp env acc (l,x) =
let id = mb.mind_packets.(0).mind_typename in
let msg = Printf.sprintf "Compiling inductive %s..." (Id.to_string id) in
Feedback.msg_debug (Pp.str msg));
- compile_mind_field prefix mp l acc mb
+ compile_mind_field mp l acc mb
| SFBmodule md ->
let mp = md.mod_mp in
(if !Flags.debug then
diff --git a/kernel/subtyping.ml b/kernel/subtyping.ml
index 1e58f5c24..74042f9e0 100644
--- a/kernel/subtyping.ml
+++ b/kernel/subtyping.ml
@@ -224,7 +224,7 @@ let check_inductive cst env mp1 l info1 mp2 mib2 spec2 subst1 subst2 reso1 reso2
cst
-let check_constant cst env mp1 l info1 cb2 spec2 subst1 subst2 =
+let check_constant cst env l info1 cb2 spec2 subst1 subst2 =
let error why = error_signature_mismatch l spec2 why in
let check_conv cst poly f = check_conv_error error cst poly f in
let check_type poly cst env t1 t2 =
@@ -292,7 +292,7 @@ and check_signatures cst env mp1 sig1 mp2 sig2 subst1 subst2 reso1 reso2=
let check_one_body cst (l,spec2) =
match spec2 with
| SFBconst cb2 ->
- check_constant cst env mp1 l (get_obj mp1 map1 l)
+ check_constant cst env l (get_obj mp1 map1 l)
cb2 spec2 subst1 subst2
| SFBmind mib2 ->
check_inductive cst env mp1 l (get_obj mp1 map1 l)
diff --git a/kernel/term_typing.ml b/kernel/term_typing.ml
index bad449731..1f7ee145a 100644
--- a/kernel/term_typing.ml
+++ b/kernel/term_typing.ml
@@ -378,7 +378,7 @@ let build_constant_declaration kn env result =
str "Proof using " ++ declared_vars ++ fnl () ++
str "to" ++ fnl () ++
str "Proof using " ++ inferred_vars) in
- let sort evn l =
+ let sort l =
List.filter (fun decl ->
let id = NamedDecl.get_id decl in
List.exists (NamedDecl.get_id %> Names.Id.equal id) l)
@@ -411,7 +411,7 @@ let build_constant_declaration kn env result =
[], def (* Empty section context: no need to check *)
| Some declared ->
(* We use the declared set and chain a check of correctness *)
- sort env declared,
+ sort declared,
match def with
| Undef _ as x -> x (* nothing to check *)
| Def cs as x ->
@@ -554,7 +554,7 @@ let translate_recipe env kn r =
be useless. It is detected by the dirpath of the constant being empty. *)
let (_, dir, _) = Constant.repr3 kn in
let hcons = DirPath.is_empty dir in
- build_constant_declaration kn env (Cooking.cook_constant ~hcons env r)
+ build_constant_declaration kn env (Cooking.cook_constant ~hcons r)
let translate_local_def env id centry =
let open Cooking in
diff --git a/lib/cWarnings.ml b/lib/cWarnings.ml
index fda25a0a6..0cf989e49 100644
--- a/lib/cWarnings.ml
+++ b/lib/cWarnings.ml
@@ -120,7 +120,7 @@ let uniquize_flags_rev flags =
(** [normalize_flags] removes redundant warnings. Unknown warnings are kept
because they may be declared in a plugin that will be linked later. *)
-let normalize_flags ~silent warnings =
+let normalize_flags warnings =
let warnings = cut_before_all_rev warnings in
uniquize_flags_rev warnings
@@ -130,7 +130,7 @@ let normalize_flags_string s =
if is_none_keyword s then s
else
let flags = flags_of_string s in
- let flags = normalize_flags ~silent:false flags in
+ let flags = normalize_flags flags in
string_of_flags flags
let parse_warnings items =
@@ -146,7 +146,7 @@ let parse_flags s =
else begin
Flags.make_warn true;
let flags = flags_of_string s in
- let flags = normalize_flags ~silent:true flags in
+ let flags = normalize_flags flags in
parse_warnings flags;
string_of_flags flags
end
diff --git a/lib/envars.ml b/lib/envars.ml
index be82bfe9b..3ee0c7106 100644
--- a/lib/envars.ml
+++ b/lib/envars.ml
@@ -110,6 +110,7 @@ let check_file_else ~dir ~file oth =
if Sys.file_exists (path / file) then path else oth ()
let guess_coqlib fail =
+ getenv_else "COQLIB" (fun () ->
let prelude = "theories/Init/Prelude.vo" in
check_file_else ~dir:Coq_config.coqlibsuffix ~file:prelude
(fun () ->
@@ -117,6 +118,7 @@ let guess_coqlib fail =
then Coq_config.coqlib
else
fail "cannot guess a path for Coq libraries; please use -coqlib option")
+ )
(** coqlib is now computed once during coqtop initialization *)
diff --git a/lib/util.ml b/lib/util.ml
index 7d7d380b2..38d73d345 100644
--- a/lib/util.ml
+++ b/lib/util.ml
@@ -38,8 +38,8 @@ let is_blank = function
module Empty =
struct
- type t
- let abort (x : t) = assert false
+ type t = { abort : 'a. 'a }
+ let abort (x : t) = x.abort
end
(* Strings *)
diff --git a/library/libobject.ml b/library/libobject.ml
index c5cd01525..79a3fed1b 100644
--- a/library/libobject.ml
+++ b/library/libobject.ml
@@ -98,7 +98,7 @@ let declare_object_full odecl = declare_object_full odecl
(* this function describes how the cache, load, open, and export functions
are triggered. *)
-let apply_dyn_fun deflt f lobj =
+let apply_dyn_fun f lobj =
let tag = object_tag lobj in
let dodecl =
try Hashtbl.find cache_tab tag
@@ -107,24 +107,24 @@ let apply_dyn_fun deflt f lobj =
f dodecl
let cache_object ((_,lobj) as node) =
- apply_dyn_fun () (fun d -> d.dyn_cache_function node) lobj
+ apply_dyn_fun (fun d -> d.dyn_cache_function node) lobj
let load_object i ((_,lobj) as node) =
- apply_dyn_fun () (fun d -> d.dyn_load_function i node) lobj
+ apply_dyn_fun (fun d -> d.dyn_load_function i node) lobj
let open_object i ((_,lobj) as node) =
- apply_dyn_fun () (fun d -> d.dyn_open_function i node) lobj
+ apply_dyn_fun (fun d -> d.dyn_open_function i node) lobj
let subst_object ((_,lobj) as node) =
- apply_dyn_fun lobj (fun d -> d.dyn_subst_function node) lobj
+ apply_dyn_fun (fun d -> d.dyn_subst_function node) lobj
let classify_object lobj =
- apply_dyn_fun Dispose (fun d -> d.dyn_classify_function lobj) lobj
+ apply_dyn_fun (fun d -> d.dyn_classify_function lobj) lobj
let discharge_object ((_,lobj) as node) =
- apply_dyn_fun None (fun d -> d.dyn_discharge_function node) lobj
+ apply_dyn_fun (fun d -> d.dyn_discharge_function node) lobj
let rebuild_object lobj =
- apply_dyn_fun lobj (fun d -> d.dyn_rebuild_function lobj) lobj
+ apply_dyn_fun (fun d -> d.dyn_rebuild_function lobj) lobj
let dump = Dyn.dump
diff --git a/library/library.ml b/library/library.ml
index 400f3dcf1..0ff82d7cc 100644
--- a/library/library.ml
+++ b/library/library.ml
@@ -167,7 +167,7 @@ let opened_libraries () = !libraries_imports_list
let register_loaded_library m =
let libname = m.libsum_name in
- let link m =
+ let link () =
let dirname = Filename.dirname (library_full_filename libname) in
let prefix = Nativecode.mod_uid_of_dirpath libname ^ "." in
let f = prefix ^ "cmo" in
@@ -176,7 +176,7 @@ let register_loaded_library m =
Nativelib.link_library ~prefix ~dirname ~basename:f
in
let rec aux = function
- | [] -> link m; [libname]
+ | [] -> link (); [libname]
| m'::_ as l when DirPath.equal m' libname -> l
| m'::l' -> m' :: aux l' in
libraries_loaded_list := aux !libraries_loaded_list;
@@ -288,16 +288,15 @@ let locate_absolute_library dir =
try
let name = Id.to_string base ^ ext in
let _, file = System.where_in_path ~warn:false loadpath name in
- [file]
- with Not_found -> [] in
- match find ".vo" @ find ".vio" with
- | [] -> raise LibNotFound
- | [file] -> file
- | [vo;vi] when Unix.((stat vo).st_mtime < (stat vi).st_mtime) ->
+ Some file
+ with Not_found -> None in
+ match find ".vo", find ".vio" with
+ | None, None -> raise LibNotFound
+ | Some file, None | None, Some file -> file
+ | Some vo, Some vi when Unix.((stat vo).st_mtime < (stat vi).st_mtime) ->
warn_several_object_files (vi, vo);
vi
- | [vo;vi] -> vo
- | _ -> assert false
+ | Some vo, Some _ -> vo
let locate_qualified_library ?root ?(warn = true) qid =
(* Search library in loadpath *)
@@ -309,18 +308,17 @@ let locate_qualified_library ?root ?(warn = true) qid =
let name = Id.to_string base ^ ext in
let lpath, file =
System.where_in_path ~warn (List.map fst loadpath) name in
- [lpath, file]
- with Not_found -> [] in
+ Some (lpath, file)
+ with Not_found -> None in
let lpath, file =
- match find ".vo" @ find ".vio" with
- | [] -> raise LibNotFound
- | [lpath, file] -> lpath, file
- | [lpath_vo, vo; lpath_vi, vi]
+ match find ".vo", find ".vio" with
+ | None, None -> raise LibNotFound
+ | Some res, None | None, Some res -> res
+ | Some (_, vo), Some (_, vi as resvi)
when Unix.((stat vo).st_mtime < (stat vi).st_mtime) ->
warn_several_object_files (vi, vo);
- lpath_vi, vi
- | [lpath_vo, vo; _ ] -> lpath_vo, vo
- | _ -> assert false
+ resvi
+ | Some resvo, Some _ -> resvo
in
let dir = add_dirpath_suffix (String.List.assoc lpath loadpath) base in
(* Look if loaded *)
diff --git a/parsing/extend.ml b/parsing/extend.ml
index 6805a96ed..f57e32c88 100644
--- a/parsing/extend.ml
+++ b/parsing/extend.ml
@@ -101,7 +101,7 @@ type ('self, 'a) symbol =
| Aself : ('self, 'self) symbol
| Anext : ('self, 'self) symbol
| Aentry : 'a entry -> ('self, 'a) symbol
-| Aentryl : 'a entry * int -> ('self, 'a) symbol
+| Aentryl : 'a entry * string -> ('self, 'a) symbol
| Arules : 'a rules list -> ('self, 'a) symbol
and ('self, _, 'r) rule =
diff --git a/parsing/g_constr.ml4 b/parsing/g_constr.mlg
index 1fa26b455..b2913d5d4 100644
--- a/parsing/g_constr.ml4
+++ b/parsing/g_constr.mlg
@@ -8,6 +8,8 @@
(* * (see LICENSE file for the text of the license) *)
(************************************************************************)
+{
+
open Names
open Constr
open Libnames
@@ -126,396 +128,399 @@ let name_colon =
let aliasvar = function { CAst.v = CPatAlias (_, na) } -> Some na | _ -> None
-GEXTEND Gram
+}
+
+GRAMMAR EXTEND Gram
GLOBAL: binder_constr lconstr constr operconstr universe_level sort sort_family
global constr_pattern lconstr_pattern Constr.ident
closed_binder open_binders binder binders binders_fixannot
record_declaration typeclass_constraint pattern appl_arg;
Constr.ident:
- [ [ id = Prim.ident -> id ] ]
+ [ [ id = Prim.ident -> { id } ] ]
;
Prim.name:
- [ [ "_" -> CAst.make ~loc:!@loc Anonymous ] ]
+ [ [ "_" -> { CAst.make ~loc Anonymous } ] ]
;
global:
- [ [ r = Prim.reference -> r ] ]
+ [ [ r = Prim.reference -> { r } ] ]
;
constr_pattern:
- [ [ c = constr -> c ] ]
+ [ [ c = constr -> { c } ] ]
;
lconstr_pattern:
- [ [ c = lconstr -> c ] ]
+ [ [ c = lconstr -> { c } ] ]
;
sort:
- [ [ "Set" -> GSet
- | "Prop" -> GProp
- | "Type" -> GType []
- | "Type"; "@{"; u = universe; "}" -> GType u
+ [ [ "Set" -> { GSet }
+ | "Prop" -> { GProp }
+ | "Type" -> { GType [] }
+ | "Type"; "@{"; u = universe; "}" -> { GType u }
] ]
;
sort_family:
- [ [ "Set" -> Sorts.InSet
- | "Prop" -> Sorts.InProp
- | "Type" -> Sorts.InType
+ [ [ "Set" -> { Sorts.InSet }
+ | "Prop" -> { Sorts.InProp }
+ | "Type" -> { Sorts.InType }
] ]
;
universe_expr:
- [ [ id = global; "+"; n = natural -> Some (id,n)
- | id = global -> Some (id,0)
- | "_" -> None
+ [ [ id = global; "+"; n = natural -> { Some (id,n) }
+ | id = global -> { Some (id,0) }
+ | "_" -> { None }
] ]
;
universe:
- [ [ IDENT "max"; "("; ids = LIST1 universe_expr SEP ","; ")" -> ids
- | u = universe_expr -> [u]
+ [ [ IDENT "max"; "("; ids = LIST1 universe_expr SEP ","; ")" -> { ids }
+ | u = universe_expr -> { [u] }
] ]
;
lconstr:
- [ [ c = operconstr LEVEL "200" -> c ] ]
+ [ [ c = operconstr LEVEL "200" -> { c } ] ]
;
constr:
- [ [ c = operconstr LEVEL "8" -> c
- | "@"; f=global; i = instance -> CAst.make ~loc:!@loc @@ CAppExpl((None,f,i),[]) ] ]
+ [ [ c = operconstr LEVEL "8" -> { c }
+ | "@"; f=global; i = instance -> { CAst.make ~loc @@ CAppExpl((None,f,i),[]) } ] ]
;
operconstr:
[ "200" RIGHTA
- [ c = binder_constr -> c ]
+ [ c = binder_constr -> { c } ]
| "100" RIGHTA
[ c1 = operconstr; "<:"; c2 = binder_constr ->
- CAst.make ~loc:(!@loc) @@ CCast(c1, CastVM c2)
+ { CAst.make ~loc @@ CCast(c1, CastVM c2) }
| c1 = operconstr; "<:"; c2 = SELF ->
- CAst.make ~loc:(!@loc) @@ CCast(c1, CastVM c2)
+ { CAst.make ~loc @@ CCast(c1, CastVM c2) }
| c1 = operconstr; "<<:"; c2 = binder_constr ->
- CAst.make ~loc:(!@loc) @@ CCast(c1, CastNative c2)
+ { CAst.make ~loc @@ CCast(c1, CastNative c2) }
| c1 = operconstr; "<<:"; c2 = SELF ->
- CAst.make ~loc:(!@loc) @@ CCast(c1, CastNative c2)
+ { CAst.make ~loc @@ CCast(c1, CastNative c2) }
| c1 = operconstr; ":";c2 = binder_constr ->
- CAst.make ~loc:(!@loc) @@ CCast(c1, CastConv c2)
+ { CAst.make ~loc @@ CCast(c1, CastConv c2) }
| c1 = operconstr; ":"; c2 = SELF ->
- CAst.make ~loc:(!@loc) @@ CCast(c1, CastConv c2)
+ { CAst.make ~loc @@ CCast(c1, CastConv c2) }
| c1 = operconstr; ":>" ->
- CAst.make ~loc:(!@loc) @@ CCast(c1, CastCoerce) ]
+ { CAst.make ~loc @@ CCast(c1, CastCoerce) } ]
| "99" RIGHTA [ ]
| "90" RIGHTA [ ]
| "10" LEFTA
- [ f=operconstr; args=LIST1 appl_arg -> CAst.make ~loc:(!@loc) @@ CApp((None,f),args)
- | "@"; f=global; i = instance; args=LIST0 NEXT -> CAst.make ~loc:!@loc @@ CAppExpl((None,f,i),args)
+ [ f=operconstr; args=LIST1 appl_arg -> { CAst.make ~loc @@ CApp((None,f),args) }
+ | "@"; f=global; i = instance; args=LIST0 NEXT -> { CAst.make ~loc @@ CAppExpl((None,f,i),args) }
| "@"; lid = pattern_identref; args=LIST1 identref ->
- let { CAst.loc = locid; v = id } = lid in
+ { let { CAst.loc = locid; v = id } = lid in
let args = List.map (fun x -> CAst.make @@ CRef (qualid_of_ident ?loc:x.CAst.loc x.CAst.v, None), None) args in
- CAst.make ~loc:(!@loc) @@ CApp((None, CAst.make ?loc:locid @@ CPatVar id),args) ]
+ CAst.make ~loc @@ CApp((None, CAst.make ?loc:locid @@ CPatVar id),args) } ]
| "9"
[ ".."; c = operconstr LEVEL "0"; ".." ->
- CAst.make ~loc:!@loc @@ CAppExpl ((None, (qualid_of_ident ~loc:!@loc ldots_var), None),[c]) ]
+ { CAst.make ~loc @@ CAppExpl ((None, (qualid_of_ident ~loc ldots_var), None),[c]) } ]
| "8" [ ]
| "1" LEFTA
[ c=operconstr; ".("; f=global; args=LIST0 appl_arg; ")" ->
- CAst.make ~loc:(!@loc) @@ CApp((Some (List.length args+1), CAst.make @@ CRef (f,None)),args@[c,None])
+ { CAst.make ~loc @@ CApp((Some (List.length args+1), CAst.make @@ CRef (f,None)),args@[c,None]) }
| c=operconstr; ".("; "@"; f=global;
args=LIST0 (operconstr LEVEL "9"); ")" ->
- CAst.make ~loc:(!@loc) @@ CAppExpl((Some (List.length args+1),f,None),args@[c])
- | c=operconstr; "%"; key=IDENT -> CAst.make ~loc:(!@loc) @@ CDelimiters (key,c) ]
+ { CAst.make ~loc @@ CAppExpl((Some (List.length args+1),f,None),args@[c]) }
+ | c=operconstr; "%"; key=IDENT -> { CAst.make ~loc @@ CDelimiters (key,c) } ]
| "0"
- [ c=atomic_constr -> c
- | c=match_constr -> c
+ [ c=atomic_constr -> { c }
+ | c=match_constr -> { c }
| "("; c = operconstr LEVEL "200"; ")" ->
- (match c.CAst.v with
+ { (match c.CAst.v with
| CPrim (Numeral (n,true)) ->
- CAst.make ~loc:(!@loc) @@ CNotation("( _ )",([c],[],[],[]))
- | _ -> c)
- | "{|"; c = record_declaration; "|}" -> c
+ CAst.make ~loc @@ CNotation("( _ )",([c],[],[],[]))
+ | _ -> c) }
+ | "{|"; c = record_declaration; "|}" -> { c }
| "{"; c = binder_constr ; "}" ->
- CAst.make ~loc:(!@loc) @@ CNotation(("{ _ }"),([c],[],[],[]))
+ { CAst.make ~loc @@ CNotation(("{ _ }"),([c],[],[],[])) }
| "`{"; c = operconstr LEVEL "200"; "}" ->
- CAst.make ~loc:(!@loc) @@ CGeneralization (Implicit, None, c)
+ { CAst.make ~loc @@ CGeneralization (Implicit, None, c) }
| "`("; c = operconstr LEVEL "200"; ")" ->
- CAst.make ~loc:(!@loc) @@ CGeneralization (Explicit, None, c)
+ { CAst.make ~loc @@ CGeneralization (Explicit, None, c) }
] ]
;
record_declaration:
- [ [ fs = record_fields -> CAst.make ~loc:(!@loc) @@ CRecord fs ] ]
+ [ [ fs = record_fields -> { CAst.make ~loc @@ CRecord fs } ] ]
;
record_fields:
- [ [ f = record_field_declaration; ";"; fs = record_fields -> f :: fs
- | f = record_field_declaration -> [f]
- | -> []
+ [ [ f = record_field_declaration; ";"; fs = record_fields -> { f :: fs }
+ | f = record_field_declaration -> { [f] }
+ | -> { [] }
] ]
;
record_field_declaration:
[ [ id = global; bl = binders; ":="; c = lconstr ->
- (id, mkCLambdaN ~loc:!@loc bl c) ] ]
+ { (id, mkCLambdaN ~loc bl c) } ] ]
;
binder_constr:
[ [ "forall"; bl = open_binders; ","; c = operconstr LEVEL "200" ->
- mkCProdN ~loc:!@loc bl c
+ { mkCProdN ~loc bl c }
| "fun"; bl = open_binders; "=>"; c = operconstr LEVEL "200" ->
- mkCLambdaN ~loc:!@loc bl c
+ { mkCLambdaN ~loc bl c }
| "let"; id=name; bl = binders; ty = type_cstr; ":=";
c1 = operconstr LEVEL "200"; "in"; c2 = operconstr LEVEL "200" ->
- let ty,c1 = match ty, c1 with
+ { let ty,c1 = match ty, c1 with
| (_,None), { CAst.v = CCast(c, CastConv t) } -> (Loc.tag ?loc:(constr_loc t) @@ Some t), c (* Tolerance, see G_vernac.def_body *)
| _, _ -> ty, c1 in
- CAst.make ~loc:!@loc @@ CLetIn(id,mkCLambdaN ?loc:(constr_loc c1) bl c1,
- Option.map (mkCProdN ?loc:(fst ty) bl) (snd ty), c2)
+ CAst.make ~loc @@ CLetIn(id,mkCLambdaN ?loc:(constr_loc c1) bl c1,
+ Option.map (mkCProdN ?loc:(fst ty) bl) (snd ty), c2) }
| "let"; fx = single_fix; "in"; c = operconstr LEVEL "200" ->
- let fixp = mk_single_fix fx in
+ { let fixp = mk_single_fix fx in
let { CAst.loc = li; v = id } = match fixp.CAst.v with
CFix(id,_) -> id
| CCoFix(id,_) -> id
| _ -> assert false in
- CAst.make ~loc:!@loc @@ CLetIn( CAst.make ?loc:li @@ Name id,fixp,None,c)
- | "let"; lb = ["("; l=LIST0 name SEP ","; ")" -> l | "()" -> []];
+ CAst.make ~loc @@ CLetIn( CAst.make ?loc:li @@ Name id,fixp,None,c) }
+ | "let"; lb = ["("; l=LIST0 name SEP ","; ")" -> { l } | "()" -> { [] } ];
po = return_type;
":="; c1 = operconstr LEVEL "200"; "in";
c2 = operconstr LEVEL "200" ->
- CAst.make ~loc:!@loc @@ CLetTuple (lb,po,c1,c2)
+ { CAst.make ~loc @@ CLetTuple (lb,po,c1,c2) }
| "let"; "'"; p=pattern; ":="; c1 = operconstr LEVEL "200";
"in"; c2 = operconstr LEVEL "200" ->
- CAst.make ~loc:!@loc @@
- CCases (LetPatternStyle, None, [c1, None, None], [CAst.make ~loc:!@loc ([[p]], c2)])
+ { CAst.make ~loc @@
+ CCases (LetPatternStyle, None, [c1, None, None], [CAst.make ~loc ([[p]], c2)]) }
| "let"; "'"; p=pattern; ":="; c1 = operconstr LEVEL "200";
rt = case_type; "in"; c2 = operconstr LEVEL "200" ->
- CAst.make ~loc:!@loc @@
- CCases (LetPatternStyle, Some rt, [c1, aliasvar p, None], [CAst.make ~loc:!@loc ([[p]], c2)])
+ { CAst.make ~loc @@
+ CCases (LetPatternStyle, Some rt, [c1, aliasvar p, None], [CAst.make ~loc ([[p]], c2)]) }
| "let"; "'"; p=pattern; "in"; t = pattern LEVEL "200";
":="; c1 = operconstr LEVEL "200"; rt = case_type;
"in"; c2 = operconstr LEVEL "200" ->
- CAst.make ~loc:!@loc @@
- CCases (LetPatternStyle, Some rt, [c1, aliasvar p, Some t], [CAst.make ~loc:!@loc ([[p]], c2)])
+ { CAst.make ~loc @@
+ CCases (LetPatternStyle, Some rt, [c1, aliasvar p, Some t], [CAst.make ~loc ([[p]], c2)]) }
| "if"; c=operconstr LEVEL "200"; po = return_type;
"then"; b1=operconstr LEVEL "200";
"else"; b2=operconstr LEVEL "200" ->
- CAst.make ~loc:(!@loc) @@ CIf (c, po, b1, b2)
- | c=fix_constr -> c ] ]
+ { CAst.make ~loc @@ CIf (c, po, b1, b2) }
+ | c=fix_constr -> { c } ] ]
;
appl_arg:
[ [ id = lpar_id_coloneq; c=lconstr; ")" ->
- (c,Some (CAst.make ~loc:!@loc @@ ExplByName id))
- | c=operconstr LEVEL "9" -> (c,None) ] ]
+ { (c,Some (CAst.make ~loc @@ ExplByName id)) }
+ | c=operconstr LEVEL "9" -> { (c,None) } ] ]
;
atomic_constr:
- [ [ g=global; i=instance -> CAst.make ~loc:!@loc @@ CRef (g,i)
- | s=sort -> CAst.make ~loc:!@loc @@ CSort s
- | n=INT -> CAst.make ~loc:!@loc @@ CPrim (Numeral (n,true))
- | s=string -> CAst.make ~loc:!@loc @@ CPrim (String s)
- | "_" -> CAst.make ~loc:!@loc @@ CHole (None, IntroAnonymous, None)
- | "?"; "["; id=ident; "]" -> CAst.make ~loc:!@loc @@ CHole (None, IntroIdentifier id, None)
- | "?"; "["; id=pattern_ident; "]" -> CAst.make ~loc:!@loc @@ CHole (None, IntroFresh id, None)
- | id=pattern_ident; inst = evar_instance -> CAst.make ~loc:!@loc @@ CEvar(id,inst) ] ]
+ [ [ g=global; i=instance -> { CAst.make ~loc @@ CRef (g,i) }
+ | s=sort -> { CAst.make ~loc @@ CSort s }
+ | n=INT -> { CAst.make ~loc @@ CPrim (Numeral (n,true)) }
+ | s=string -> { CAst.make ~loc @@ CPrim (String s) }
+ | "_" -> { CAst.make ~loc @@ CHole (None, IntroAnonymous, None) }
+ | "?"; "["; id=ident; "]" -> { CAst.make ~loc @@ CHole (None, IntroIdentifier id, None) }
+ | "?"; "["; id=pattern_ident; "]" -> { CAst.make ~loc @@ CHole (None, IntroFresh id, None) }
+ | id=pattern_ident; inst = evar_instance -> { CAst.make ~loc @@ CEvar(id,inst) } ] ]
;
inst:
- [ [ id = ident; ":="; c = lconstr -> (id,c) ] ]
+ [ [ id = ident; ":="; c = lconstr -> { (id,c) } ] ]
;
evar_instance:
- [ [ "@{"; l = LIST1 inst SEP ";"; "}" -> l
- | -> [] ] ]
+ [ [ "@{"; l = LIST1 inst SEP ";"; "}" -> { l }
+ | -> { [] } ] ]
;
instance:
- [ [ "@{"; l = LIST0 universe_level; "}" -> Some l
- | -> None ] ]
+ [ [ "@{"; l = LIST0 universe_level; "}" -> { Some l }
+ | -> { None } ] ]
;
universe_level:
- [ [ "Set" -> GSet
- | "Prop" -> GProp
- | "Type" -> GType UUnknown
- | "_" -> GType UAnonymous
- | id = global -> GType (UNamed id)
+ [ [ "Set" -> { GSet }
+ | "Prop" -> { GProp }
+ | "Type" -> { GType UUnknown }
+ | "_" -> { GType UAnonymous }
+ | id = global -> { GType (UNamed id) }
] ]
;
fix_constr:
- [ [ fx1=single_fix -> mk_single_fix fx1
- | (_,kw,dcl1)=single_fix; "with"; dcls=LIST1 fix_decl SEP "with";
+ [ [ fx1=single_fix -> { mk_single_fix fx1 }
+ | f=single_fix; "with"; dcls=LIST1 fix_decl SEP "with";
"for"; id=identref ->
- mk_fix(!@loc,kw,id,dcl1::dcls)
+ { let (_,kw,dcl1) = f in
+ mk_fix(loc,kw,id,dcl1::dcls) }
] ]
;
single_fix:
- [ [ kw=fix_kw; dcl=fix_decl -> (!@loc,kw,dcl) ] ]
+ [ [ kw=fix_kw; dcl=fix_decl -> { (loc,kw,dcl) } ] ]
;
fix_kw:
- [ [ "fix" -> true
- | "cofix" -> false ] ]
+ [ [ "fix" -> { true }
+ | "cofix" -> { false } ] ]
;
fix_decl:
[ [ id=identref; bl=binders_fixannot; ty=type_cstr; ":=";
c=operconstr LEVEL "200" ->
- (id,fst bl,snd bl,c,ty) ] ]
+ { (id,fst bl,snd bl,c,ty) } ] ]
;
match_constr:
[ [ "match"; ci=LIST1 case_item SEP ","; ty=OPT case_type; "with";
- br=branches; "end" -> CAst.make ~loc:!@loc @@ CCases(RegularStyle,ty,ci,br) ] ]
+ br=branches; "end" -> { CAst.make ~loc @@ CCases(RegularStyle,ty,ci,br) } ] ]
;
case_item:
[ [ c=operconstr LEVEL "100";
- ona = OPT ["as"; id=name -> id];
- ty = OPT ["in"; t=pattern -> t] ->
- (c,ona,ty) ] ]
+ ona = OPT ["as"; id=name -> { id } ];
+ ty = OPT ["in"; t=pattern -> { t } ] ->
+ { (c,ona,ty) } ] ]
;
case_type:
- [ [ "return"; ty = operconstr LEVEL "100" -> ty ] ]
+ [ [ "return"; ty = operconstr LEVEL "100" -> { ty } ] ]
;
return_type:
- [ [ a = OPT [ na = OPT["as"; na=name -> na];
- ty = case_type -> (na,ty) ] ->
- match a with
+ [ [ a = OPT [ na = OPT["as"; na=name -> { na } ];
+ ty = case_type -> { (na,ty) } ] ->
+ { match a with
| None -> None, None
- | Some (na,t) -> (na, Some t)
+ | Some (na,t) -> (na, Some t) }
] ]
;
branches:
- [ [ OPT"|"; br=LIST0 eqn SEP "|" -> br ] ]
+ [ [ OPT"|"; br=LIST0 eqn SEP "|" -> { br } ] ]
;
mult_pattern:
- [ [ pl = LIST1 pattern LEVEL "99" SEP "," -> pl ] ]
+ [ [ pl = LIST1 pattern LEVEL "99" SEP "," -> { pl } ] ]
;
eqn:
[ [ pll = LIST1 mult_pattern SEP "|";
- "=>"; rhs = lconstr -> (CAst.make ~loc:!@loc (pll,rhs)) ] ]
+ "=>"; rhs = lconstr -> { (CAst.make ~loc (pll,rhs)) } ] ]
;
record_pattern:
- [ [ id = global; ":="; pat = pattern -> (id, pat) ] ]
+ [ [ id = global; ":="; pat = pattern -> { (id, pat) } ] ]
;
record_patterns:
- [ [ p = record_pattern; ";"; ps = record_patterns -> p :: ps
- | p = record_pattern; ";" -> [p]
- | p = record_pattern-> [p]
- | -> []
+ [ [ p = record_pattern; ";"; ps = record_patterns -> { p :: ps }
+ | p = record_pattern; ";" -> { [p] }
+ | p = record_pattern-> { [p] }
+ | -> { [] }
] ]
;
pattern:
[ "200" RIGHTA [ ]
| "100" RIGHTA
- [ p = pattern; "|"; pl = LIST1 pattern SEP "|" -> CAst.make ~loc:!@loc @@ CPatOr (p::pl) ]
+ [ p = pattern; "|"; pl = LIST1 pattern SEP "|" -> { CAst.make ~loc @@ CPatOr (p::pl) } ]
| "99" RIGHTA [ ]
| "90" RIGHTA [ ]
| "10" LEFTA
[ p = pattern; "as"; na = name ->
- CAst.make ~loc:!@loc @@ CPatAlias (p, na)
- | p = pattern; lp = LIST1 NEXT -> mkAppPattern ~loc:!@loc p lp
+ { CAst.make ~loc @@ CPatAlias (p, na) }
+ | p = pattern; lp = LIST1 NEXT -> { mkAppPattern ~loc p lp }
| "@"; r = Prim.reference; lp = LIST0 NEXT ->
- CAst.make ~loc:!@loc @@ CPatCstr (r, Some lp, []) ]
+ { CAst.make ~loc @@ CPatCstr (r, Some lp, []) } ]
| "1" LEFTA
- [ c = pattern; "%"; key=IDENT -> CAst.make ~loc:!@loc @@ CPatDelimiters (key,c) ]
+ [ c = pattern; "%"; key=IDENT -> { CAst.make ~loc @@ CPatDelimiters (key,c) } ]
| "0"
- [ r = Prim.reference -> CAst.make ~loc:!@loc @@ CPatAtom (Some r)
- | "{|"; pat = record_patterns; "|}" -> CAst.make ~loc:!@loc @@ CPatRecord pat
- | "_" -> CAst.make ~loc:!@loc @@ CPatAtom None
+ [ r = Prim.reference -> { CAst.make ~loc @@ CPatAtom (Some r) }
+ | "{|"; pat = record_patterns; "|}" -> { CAst.make ~loc @@ CPatRecord pat }
+ | "_" -> { CAst.make ~loc @@ CPatAtom None }
| "("; p = pattern LEVEL "200"; ")" ->
- (match p.CAst.v with
+ { (match p.CAst.v with
| CPatPrim (Numeral (n,true)) ->
- CAst.make ~loc:!@loc @@ CPatNotation("( _ )",([p],[]),[])
- | _ -> p)
+ CAst.make ~loc @@ CPatNotation("( _ )",([p],[]),[])
+ | _ -> p) }
| "("; p = pattern LEVEL "200"; ":"; ty = lconstr; ")" ->
- let p =
+ { let p =
match p with
| { CAst.v = CPatPrim (Numeral (n,true)) } ->
- CAst.make ~loc:!@loc @@ CPatNotation("( _ )",([p],[]),[])
+ CAst.make ~loc @@ CPatNotation("( _ )",([p],[]),[])
| _ -> p
in
- CAst.make ~loc:!@loc @@ CPatCast (p, ty)
- | n = INT -> CAst.make ~loc:!@loc @@ CPatPrim (Numeral (n,true))
- | s = string -> CAst.make ~loc:!@loc @@ CPatPrim (String s) ] ]
+ CAst.make ~loc @@ CPatCast (p, ty) }
+ | n = INT -> { CAst.make ~loc @@ CPatPrim (Numeral (n,true)) }
+ | s = string -> { CAst.make ~loc @@ CPatPrim (String s) } ] ]
;
impl_ident_tail:
- [ [ "}" -> binder_of_name Implicit
+ [ [ "}" -> { binder_of_name Implicit }
| nal=LIST1 name; ":"; c=lconstr; "}" ->
- (fun na -> CLocalAssum (na::nal,Default Implicit,c))
+ { (fun na -> CLocalAssum (na::nal,Default Implicit,c)) }
| nal=LIST1 name; "}" ->
- (fun na -> CLocalAssum (na::nal,Default Implicit,
- CAst.make ?loc:(Loc.merge_opt na.CAst.loc (Some !@loc)) @@
- CHole (Some (Evar_kinds.BinderType na.CAst.v), IntroAnonymous, None)))
+ { (fun na -> CLocalAssum (na::nal,Default Implicit,
+ CAst.make ?loc:(Loc.merge_opt na.CAst.loc (Some loc)) @@
+ CHole (Some (Evar_kinds.BinderType na.CAst.v), IntroAnonymous, None))) }
| ":"; c=lconstr; "}" ->
- (fun na -> CLocalAssum ([na],Default Implicit,c))
+ { (fun na -> CLocalAssum ([na],Default Implicit,c)) }
] ]
;
fixannot:
- [ [ "{"; IDENT "struct"; id=identref; "}" -> (Some id, CStructRec)
- | "{"; IDENT "wf"; rel=constr; id=OPT identref; "}" -> (id, CWfRec rel)
+ [ [ "{"; IDENT "struct"; id=identref; "}" -> { (Some id, CStructRec) }
+ | "{"; IDENT "wf"; rel=constr; id=OPT identref; "}" -> { (id, CWfRec rel) }
| "{"; IDENT "measure"; m=constr; id=OPT identref;
- rel=OPT constr; "}" -> (id, CMeasureRec (m,rel))
+ rel=OPT constr; "}" -> { (id, CMeasureRec (m,rel)) }
] ]
;
impl_name_head:
- [ [ id = impl_ident_head -> (CAst.make ~loc:!@loc @@ Name id) ] ]
+ [ [ id = impl_ident_head -> { CAst.make ~loc @@ Name id } ] ]
;
binders_fixannot:
[ [ na = impl_name_head; assum = impl_ident_tail; bl = binders_fixannot ->
- (assum na :: fst bl), snd bl
- | f = fixannot -> [], f
- | b = binder; bl = binders_fixannot -> b @ fst bl, snd bl
- | -> [], (None, CStructRec)
+ { (assum na :: fst bl), snd bl }
+ | f = fixannot -> { [], f }
+ | b = binder; bl = binders_fixannot -> { b @ fst bl, snd bl }
+ | -> { [], (None, CStructRec) }
] ]
;
open_binders:
(* Same as binders but parentheses around a closed binder are optional if
the latter is unique *)
- [ [ (* open binder *)
+ [ [
id = name; idl = LIST0 name; ":"; c = lconstr ->
- [CLocalAssum (id::idl,Default Explicit,c)]
- (* binders factorized with open binder *)
+ { [CLocalAssum (id::idl,Default Explicit,c)]
+ (* binders factorized with open binder *) }
| id = name; idl = LIST0 name; bl = binders ->
- binders_of_names (id::idl) @ bl
+ { binders_of_names (id::idl) @ bl }
| id1 = name; ".."; id2 = name ->
- [CLocalAssum ([id1;(CAst.make ~loc:!@loc (Name ldots_var));id2],
- Default Explicit, CAst.make ~loc:!@loc @@ CHole (None, IntroAnonymous, None))]
+ { [CLocalAssum ([id1;(CAst.make ~loc (Name ldots_var));id2],
+ Default Explicit, CAst.make ~loc @@ CHole (None, IntroAnonymous, None))] }
| bl = closed_binder; bl' = binders ->
- bl@bl'
+ { bl@bl' }
] ]
;
binders:
- [ [ l = LIST0 binder -> List.flatten l ] ]
+ [ [ l = LIST0 binder -> { List.flatten l } ] ]
;
binder:
- [ [ id = name -> [CLocalAssum ([id],Default Explicit, CAst.make ~loc:!@loc @@ CHole (None, IntroAnonymous, None))]
- | bl = closed_binder -> bl ] ]
+ [ [ id = name -> { [CLocalAssum ([id],Default Explicit, CAst.make ~loc @@ CHole (None, IntroAnonymous, None))] }
+ | bl = closed_binder -> { bl } ] ]
;
closed_binder:
[ [ "("; id=name; idl=LIST1 name; ":"; c=lconstr; ")" ->
- [CLocalAssum (id::idl,Default Explicit,c)]
+ { [CLocalAssum (id::idl,Default Explicit,c)] }
| "("; id=name; ":"; c=lconstr; ")" ->
- [CLocalAssum ([id],Default Explicit,c)]
+ { [CLocalAssum ([id],Default Explicit,c)] }
| "("; id=name; ":="; c=lconstr; ")" ->
- (match c.CAst.v with
+ { (match c.CAst.v with
| CCast(c, CastConv t) -> [CLocalDef (id,c,Some t)]
- | _ -> [CLocalDef (id,c,None)])
+ | _ -> [CLocalDef (id,c,None)]) }
| "("; id=name; ":"; t=lconstr; ":="; c=lconstr; ")" ->
- [CLocalDef (id,c,Some t)]
+ { [CLocalDef (id,c,Some t)] }
| "{"; id=name; "}" ->
- [CLocalAssum ([id],Default Implicit, CAst.make ~loc:!@loc @@ CHole (None, IntroAnonymous, None))]
+ { [CLocalAssum ([id],Default Implicit, CAst.make ~loc @@ CHole (None, IntroAnonymous, None))] }
| "{"; id=name; idl=LIST1 name; ":"; c=lconstr; "}" ->
- [CLocalAssum (id::idl,Default Implicit,c)]
+ { [CLocalAssum (id::idl,Default Implicit,c)] }
| "{"; id=name; ":"; c=lconstr; "}" ->
- [CLocalAssum ([id],Default Implicit,c)]
+ { [CLocalAssum ([id],Default Implicit,c)] }
| "{"; id=name; idl=LIST1 name; "}" ->
- List.map (fun id -> CLocalAssum ([id],Default Implicit, CAst.make ~loc:!@loc @@ CHole (None, IntroAnonymous, None))) (id::idl)
+ { List.map (fun id -> CLocalAssum ([id],Default Implicit, CAst.make ~loc @@ CHole (None, IntroAnonymous, None))) (id::idl) }
| "`("; tc = LIST1 typeclass_constraint SEP "," ; ")" ->
- List.map (fun (n, b, t) -> CLocalAssum ([n], Generalized (Implicit, Explicit, b), t)) tc
+ { List.map (fun (n, b, t) -> CLocalAssum ([n], Generalized (Implicit, Explicit, b), t)) tc }
| "`{"; tc = LIST1 typeclass_constraint SEP "," ; "}" ->
- List.map (fun (n, b, t) -> CLocalAssum ([n], Generalized (Implicit, Implicit, b), t)) tc
+ { List.map (fun (n, b, t) -> CLocalAssum ([n], Generalized (Implicit, Implicit, b), t)) tc }
| "'"; p = pattern LEVEL "0" ->
- let (p, ty) =
+ { let (p, ty) =
match p.CAst.v with
| CPatCast (p, ty) -> (p, Some ty)
| _ -> (p, None)
in
- [CLocalPattern (CAst.make ~loc:!@loc (p, ty))]
+ [CLocalPattern (CAst.make ~loc (p, ty))] }
] ]
;
typeclass_constraint:
- [ [ "!" ; c = operconstr LEVEL "200" -> (CAst.make ~loc:!@loc Anonymous), true, c
- | "{"; id = name; "}"; ":" ; expl = [ "!" -> true | -> false ] ; c = operconstr LEVEL "200" ->
- id, expl, c
- | iid=name_colon ; expl = [ "!" -> true | -> false ] ; c = operconstr LEVEL "200" ->
- (CAst.make ~loc:!@loc iid), expl, c
+ [ [ "!" ; c = operconstr LEVEL "200" -> { (CAst.make ~loc Anonymous), true, c }
+ | "{"; id = name; "}"; ":" ; expl = [ "!" -> { true } | -> { false } ] ; c = operconstr LEVEL "200" ->
+ { id, expl, c }
+ | iid=name_colon ; expl = [ "!" -> { true } | -> { false } ] ; c = operconstr LEVEL "200" ->
+ { (CAst.make ~loc iid), expl, c }
| c = operconstr LEVEL "200" ->
- (CAst.make ~loc:!@loc Anonymous), false, c
+ { (CAst.make ~loc Anonymous), false, c }
] ]
;
type_cstr:
- [ [ c=OPT [":"; c=lconstr -> c] -> Loc.tag ~loc:!@loc c ] ]
+ [ [ c=OPT [":"; c=lconstr -> { c } ] -> { Loc.tag ~loc c } ] ]
;
- END;;
+ END
diff --git a/parsing/g_prim.ml4 b/parsing/g_prim.mlg
index 91dce27fe..774db97f2 100644
--- a/parsing/g_prim.ml4
+++ b/parsing/g_prim.mlg
@@ -8,6 +8,8 @@
(* * (see LICENSE file for the text of the license) *)
(************************************************************************)
+{
+
open Names
open Libnames
@@ -30,93 +32,96 @@ let my_int_of_string loc s =
with Failure _ | Exit ->
CErrors.user_err ~loc (Pp.str "Cannot support a so large number.")
-GEXTEND Gram
+}
+
+GRAMMAR EXTEND Gram
GLOBAL:
bigint natural integer identref name ident var preident
fullyqualid qualid reference dirpath ne_lstring
ne_string string lstring pattern_ident pattern_identref by_notation smart_global;
preident:
- [ [ s = IDENT -> s ] ]
+ [ [ s = IDENT -> { s } ] ]
;
ident:
- [ [ s = IDENT -> Id.of_string s ] ]
+ [ [ s = IDENT -> { Id.of_string s } ] ]
;
pattern_ident:
- [ [ LEFTQMARK; id = ident -> id ] ]
+ [ [ LEFTQMARK; id = ident -> { id } ] ]
;
pattern_identref:
- [ [ id = pattern_ident -> CAst.make ~loc:!@loc id ] ]
+ [ [ id = pattern_ident -> { CAst.make ~loc id } ] ]
;
var: (* as identref, but interpret as a term identifier in ltac *)
- [ [ id = ident -> CAst.make ~loc:!@loc id ] ]
+ [ [ id = ident -> { CAst.make ~loc id } ] ]
;
identref:
- [ [ id = ident -> CAst.make ~loc:!@loc id ] ]
+ [ [ id = ident -> { CAst.make ~loc id } ] ]
;
field:
- [ [ s = FIELD -> Id.of_string s ] ]
+ [ [ s = FIELD -> { Id.of_string s } ] ]
;
fields:
- [ [ id = field; (l,id') = fields -> (l@[id],id')
- | id = field -> ([],id)
+ [ [ id = field; f = fields -> { let (l,id') = f in (l@[id],id') }
+ | id = field -> { ([],id) }
] ]
;
fullyqualid:
- [ [ id = ident; (l,id')=fields -> CAst.make ~loc:!@loc @@ id::List.rev (id'::l)
- | id = ident -> CAst.make ~loc:!@loc [id]
+ [ [ id = ident; f=fields -> { let (l,id') = f in CAst.make ~loc @@ id::List.rev (id'::l) }
+ | id = ident -> { CAst.make ~loc [id] }
] ]
;
basequalid:
- [ [ id = ident; (l,id')=fields -> local_make_qualid !@loc (l@[id]) id'
- | id = ident -> qualid_of_ident ~loc:!@loc id
+ [ [ id = ident; f=fields -> { let (l,id') = f in local_make_qualid loc (l@[id]) id' }
+ | id = ident -> { qualid_of_ident ~loc id }
] ]
;
name:
- [ [ IDENT "_" -> CAst.make ~loc:!@loc Anonymous
- | id = ident -> CAst.make ~loc:!@loc @@ Name id ] ]
+ [ [ IDENT "_" -> { CAst.make ~loc Anonymous }
+ | id = ident -> { CAst.make ~loc @@ Name id } ] ]
;
reference:
- [ [ id = ident; (l,id') = fields ->
- local_make_qualid !@loc (l@[id]) id'
- | id = ident -> local_make_qualid !@loc [] id
+ [ [ id = ident; f = fields -> {
+ let (l,id') = f in
+ local_make_qualid loc (l@[id]) id' }
+ | id = ident -> { local_make_qualid loc [] id }
] ]
;
by_notation:
- [ [ s = ne_string; sc = OPT ["%"; key = IDENT -> key ] -> (s, sc) ] ]
+ [ [ s = ne_string; sc = OPT ["%"; key = IDENT -> { key } ] -> { (s, sc) } ] ]
;
smart_global:
- [ [ c = reference -> CAst.make ~loc:!@loc @@ Constrexpr.AN c
- | ntn = by_notation -> CAst.make ~loc:!@loc @@ Constrexpr.ByNotation ntn ] ]
+ [ [ c = reference -> { CAst.make ~loc @@ Constrexpr.AN c }
+ | ntn = by_notation -> { CAst.make ~loc @@ Constrexpr.ByNotation ntn } ] ]
;
qualid:
- [ [ qid = basequalid -> qid ] ]
+ [ [ qid = basequalid -> { qid } ] ]
;
ne_string:
[ [ s = STRING ->
- if s="" then CErrors.user_err ~loc:!@loc (Pp.str"Empty string."); s
+ { if s="" then CErrors.user_err ~loc (Pp.str"Empty string."); s }
] ]
;
ne_lstring:
- [ [ s = ne_string -> CAst.make ~loc:!@loc s ] ]
+ [ [ s = ne_string -> { CAst.make ~loc s } ] ]
;
dirpath:
[ [ id = ident; l = LIST0 field ->
- DirPath.make (List.rev (id::l)) ] ]
+ { DirPath.make (List.rev (id::l)) } ] ]
;
string:
- [ [ s = STRING -> s ] ]
+ [ [ s = STRING -> { s } ] ]
;
lstring:
- [ [ s = string -> (CAst.make ~loc:!@loc s) ] ]
+ [ [ s = string -> { CAst.make ~loc s } ] ]
;
integer:
- [ [ i = INT -> my_int_of_string (!@loc) i
- | "-"; i = INT -> - my_int_of_string (!@loc) i ] ]
+ [ [ i = INT -> { my_int_of_string loc i }
+ | "-"; i = INT -> { - my_int_of_string loc i } ] ]
;
natural:
- [ [ i = INT -> my_int_of_string (!@loc) i ] ]
+ [ [ i = INT -> { my_int_of_string loc i } ] ]
;
bigint: (* Negative numbers are dealt with elsewhere *)
- [ [ i = INT -> i ] ]
+ [ [ i = INT -> { i } ] ]
;
END
diff --git a/parsing/pcoq.ml b/parsing/pcoq.ml
index 6fdd9ea9b..997ea78e6 100644
--- a/parsing/pcoq.ml
+++ b/parsing/pcoq.ml
@@ -17,9 +17,17 @@ let curry f x y = f (x, y)
let uncurry f (x,y) = f x y
(** Location Utils *)
+let ploc_file_of_coq_file = function
+| Loc.ToplevelInput -> ""
+| Loc.InFile f -> f
+
let coq_file_of_ploc_file s =
if s = "" then Loc.ToplevelInput else Loc.InFile s
+let of_coqloc loc =
+ let open Loc in
+ Ploc.make_loc (ploc_file_of_coq_file loc.fname) loc.line_nb loc.bol_pos (loc.bp, loc.ep) ""
+
let to_coqloc loc =
{ Loc.fname = coq_file_of_ploc_file (Ploc.file_name loc);
Loc.line_nb = Ploc.line_nb loc;
@@ -236,7 +244,7 @@ let rec symbol_of_prod_entry_key : type s a. (s, a) symbol -> _ = function
| Aentry e ->
Symbols.snterm (G.Entry.obj e)
| Aentryl (e, n) ->
- Symbols.snterml (G.Entry.obj e, string_of_int n)
+ Symbols.snterml (G.Entry.obj e, n)
| Arules rs ->
G.srules' (List.map symbol_of_rules rs)
@@ -328,6 +336,7 @@ module Gram =
I'm not entirely sure it makes sense, but at least it would be more correct.
*)
G.delete_rule e pil
+ let gram_extend e ext = grammar_extend e None ext
end
(** Remove extensions
diff --git a/parsing/pcoq.mli b/parsing/pcoq.mli
index f959bd80c..154de1221 100644
--- a/parsing/pcoq.mli
+++ b/parsing/pcoq.mli
@@ -80,6 +80,8 @@ module type S =
(* Get comment parsing information from the Lexer *)
val comment_state : coq_parsable -> ((int * int) * string) list
+ val gram_extend : 'a entry -> 'a Extend.extend_statement -> unit
+
(* Apparently not used *)
val srules' : production_rule list -> symbol
val parse_tokens_after_filter : 'a entry -> Tok.t Stream.t -> 'a
@@ -299,6 +301,7 @@ val recover_grammar_command : 'a grammar_command -> 'a list
val with_grammar_rule_protection : ('a -> 'b) -> 'a -> 'b
(** Location Utils *)
+val of_coqloc : Loc.t -> Ploc.t
val to_coqloc : Ploc.t -> Loc.t
val (!@) : Ploc.t -> Loc.t
diff --git a/plugins/btauto/g_btauto.ml4 b/plugins/btauto/g_btauto.mlg
index 3ae0f45cb..312ef1e55 100644
--- a/plugins/btauto/g_btauto.ml4
+++ b/plugins/btauto/g_btauto.mlg
@@ -8,11 +8,15 @@
(* * (see LICENSE file for the text of the license) *)
(************************************************************************)
+{
+
open Ltac_plugin
+}
+
DECLARE PLUGIN "btauto_plugin"
TACTIC EXTEND btauto
-| [ "btauto" ] -> [ Refl_btauto.Btauto.tac ]
+| [ "btauto" ] -> { Refl_btauto.Btauto.tac }
END
diff --git a/plugins/cc/g_congruence.ml4 b/plugins/cc/g_congruence.mlg
index fb013ac13..685059294 100644
--- a/plugins/cc/g_congruence.ml4
+++ b/plugins/cc/g_congruence.mlg
@@ -8,22 +8,26 @@
(* * (see LICENSE file for the text of the license) *)
(************************************************************************)
+{
+
open Ltac_plugin
open Cctac
open Stdarg
+}
+
DECLARE PLUGIN "cc_plugin"
(* Tactic registration *)
TACTIC EXTEND cc
- [ "congruence" ] -> [ congruence_tac 1000 [] ]
- |[ "congruence" integer(n) ] -> [ congruence_tac n [] ]
- |[ "congruence" "with" ne_constr_list(l) ] -> [ congruence_tac 1000 l ]
+| [ "congruence" ] -> { congruence_tac 1000 [] }
+| [ "congruence" integer(n) ] -> { congruence_tac n [] }
+| [ "congruence" "with" ne_constr_list(l) ] -> { congruence_tac 1000 l }
|[ "congruence" integer(n) "with" ne_constr_list(l) ] ->
- [ congruence_tac n l ]
+ { congruence_tac n l }
END
TACTIC EXTEND f_equal
- [ "f_equal" ] -> [ f_equal ]
+| [ "f_equal" ] -> { f_equal }
END
diff --git a/plugins/fourier/g_fourier.ml4 b/plugins/fourier/g_fourier.mlg
index 44560ac18..703e29f96 100644
--- a/plugins/fourier/g_fourier.ml4
+++ b/plugins/fourier/g_fourier.mlg
@@ -8,11 +8,15 @@
(* * (see LICENSE file for the text of the license) *)
(************************************************************************)
+{
+
open Ltac_plugin
open FourierR
+}
+
DECLARE PLUGIN "fourier_plugin"
TACTIC EXTEND fourier
- [ "fourierz" ] -> [ fourier () ]
+| [ "fourierz" ] -> { fourier () }
END
diff --git a/plugins/funind/functional_principles_types.ml b/plugins/funind/functional_principles_types.ml
index 31496513a..b2a528a1f 100644
--- a/plugins/funind/functional_principles_types.ml
+++ b/plugins/funind/functional_principles_types.ml
@@ -322,8 +322,7 @@ let generate_functional_principle (evd: Evd.evar_map ref)
try
let f = funs.(i) in
- let env = Global.env () in
- let type_sort = Evarutil.evd_comb1 (Evd.fresh_sort_in_family env) evd InType in
+ let type_sort = Evarutil.evd_comb1 Evd.fresh_sort_in_family evd InType in
let new_sorts =
match sorts with
| None -> Array.make (Array.length funs) (type_sort)
@@ -344,7 +343,7 @@ let generate_functional_principle (evd: Evd.evar_map ref)
(* let id_of_f = Label.to_id (con_label f) in *)
let register_with_sort fam_sort =
let evd' = Evd.from_env (Global.env ()) in
- let evd',s = Evd.fresh_sort_in_family env evd' fam_sort in
+ let evd',s = Evd.fresh_sort_in_family evd' fam_sort in
let name = Indrec.make_elimination_ident base_new_princ_name fam_sort in
let evd',value = change_property_sort evd' s new_principle_type new_princ_name in
let evd' = fst (Typing.type_of ~refresh:true (Global.env ()) evd' (EConstr.of_constr value)) in
@@ -354,7 +353,7 @@ let generate_functional_principle (evd: Evd.evar_map ref)
Evd.const_univ_entry ~poly evd'
in
let ce = Declare.definition_entry ~univs value in
- ignore(
+ ignore(
Declare.declare_constant
name
(DefinitionEntry ce,
@@ -508,8 +507,8 @@ let make_scheme evd (fas : (pconstant*Sorts.family) list) : Safe_typing.private_
let i = ref (-1) in
let sorts =
List.rev_map (fun (_,x) ->
- Evarutil.evd_comb1 (Evd.fresh_sort_in_family env) evd x
- )
+ Evarutil.evd_comb1 Evd.fresh_sort_in_family evd x
+ )
fas
in
(* We create the first priciple by tactic *)
diff --git a/plugins/funind/glob_term_to_relation.ml b/plugins/funind/glob_term_to_relation.ml
index 6b9b10312..5fc4293cb 100644
--- a/plugins/funind/glob_term_to_relation.ml
+++ b/plugins/funind/glob_term_to_relation.ml
@@ -1499,7 +1499,7 @@ let do_build_inductive
let _time2 = System.get_time () in
try
with_full_print
- (Flags.silently (ComInductive.do_mutual_inductive rel_inds (Flags.is_universe_polymorphism ()) false false))
+ (Flags.silently (ComInductive.do_mutual_inductive rel_inds (Flags.is_universe_polymorphism ()) false false ~uniform:ComInductive.NonUniformParameters))
Declarations.Finite
with
| UserError(s,msg) as e ->
diff --git a/plugins/ltac/coretactics.ml4 b/plugins/ltac/coretactics.mlg
index 61525cb49..6388906f5 100644
--- a/plugins/ltac/coretactics.ml4
+++ b/plugins/ltac/coretactics.mlg
@@ -8,6 +8,8 @@
(* * (see LICENSE file for the text of the license) *)
(************************************************************************)
+{
+
open Util
open Locus
open Tactypes
@@ -18,147 +20,153 @@ open Tacarg
open Names
open Logic
+let wit_hyp = wit_var
+
+}
+
DECLARE PLUGIN "ltac_plugin"
(** Basic tactics *)
TACTIC EXTEND reflexivity
- [ "reflexivity" ] -> [ Tactics.intros_reflexivity ]
+| [ "reflexivity" ] -> { Tactics.intros_reflexivity }
END
TACTIC EXTEND exact
- [ "exact" casted_constr(c) ] -> [ Tactics.exact_no_check c ]
+| [ "exact" casted_constr(c) ] -> { Tactics.exact_no_check c }
END
TACTIC EXTEND assumption
- [ "assumption" ] -> [ Tactics.assumption ]
+| [ "assumption" ] -> { Tactics.assumption }
END
TACTIC EXTEND etransitivity
- [ "etransitivity" ] -> [ Tactics.intros_transitivity None ]
+| [ "etransitivity" ] -> { Tactics.intros_transitivity None }
END
TACTIC EXTEND cut
- [ "cut" constr(c) ] -> [ Tactics.cut c ]
+| [ "cut" constr(c) ] -> { Tactics.cut c }
END
TACTIC EXTEND exact_no_check
- [ "exact_no_check" constr(c) ] -> [ Tactics.exact_no_check c ]
+| [ "exact_no_check" constr(c) ] -> { Tactics.exact_no_check c }
END
TACTIC EXTEND vm_cast_no_check
- [ "vm_cast_no_check" constr(c) ] -> [ Tactics.vm_cast_no_check c ]
+| [ "vm_cast_no_check" constr(c) ] -> { Tactics.vm_cast_no_check c }
END
TACTIC EXTEND native_cast_no_check
- [ "native_cast_no_check" constr(c) ] -> [ Tactics.native_cast_no_check c ]
+| [ "native_cast_no_check" constr(c) ] -> { Tactics.native_cast_no_check c }
END
TACTIC EXTEND casetype
- [ "casetype" constr(c) ] -> [ Tactics.case_type c ]
+| [ "casetype" constr(c) ] -> { Tactics.case_type c }
END
TACTIC EXTEND elimtype
- [ "elimtype" constr(c) ] -> [ Tactics.elim_type c ]
+| [ "elimtype" constr(c) ] -> { Tactics.elim_type c }
END
TACTIC EXTEND lapply
- [ "lapply" constr(c) ] -> [ Tactics.cut_and_apply c ]
+| [ "lapply" constr(c) ] -> { Tactics.cut_and_apply c }
END
TACTIC EXTEND transitivity
- [ "transitivity" constr(c) ] -> [ Tactics.intros_transitivity (Some c) ]
+| [ "transitivity" constr(c) ] -> { Tactics.intros_transitivity (Some c) }
END
(** Left *)
TACTIC EXTEND left
- [ "left" ] -> [ Tactics.left_with_bindings false NoBindings ]
+| [ "left" ] -> { Tactics.left_with_bindings false NoBindings }
END
TACTIC EXTEND eleft
- [ "eleft" ] -> [ Tactics.left_with_bindings true NoBindings ]
+| [ "eleft" ] -> { Tactics.left_with_bindings true NoBindings }
END
TACTIC EXTEND left_with
- [ "left" "with" bindings(bl) ] -> [
+| [ "left" "with" bindings(bl) ] -> {
Tacticals.New.tclDELAYEDWITHHOLES false bl (fun bl -> Tactics.left_with_bindings false bl)
- ]
+ }
END
TACTIC EXTEND eleft_with
- [ "eleft" "with" bindings(bl) ] -> [
+| [ "eleft" "with" bindings(bl) ] -> {
Tacticals.New.tclDELAYEDWITHHOLES true bl (fun bl -> Tactics.left_with_bindings true bl)
- ]
+ }
END
(** Right *)
TACTIC EXTEND right
- [ "right" ] -> [ Tactics.right_with_bindings false NoBindings ]
+| [ "right" ] -> { Tactics.right_with_bindings false NoBindings }
END
TACTIC EXTEND eright
- [ "eright" ] -> [ Tactics.right_with_bindings true NoBindings ]
+| [ "eright" ] -> { Tactics.right_with_bindings true NoBindings }
END
TACTIC EXTEND right_with
- [ "right" "with" bindings(bl) ] -> [
+| [ "right" "with" bindings(bl) ] -> {
Tacticals.New.tclDELAYEDWITHHOLES false bl (fun bl -> Tactics.right_with_bindings false bl)
- ]
+ }
END
TACTIC EXTEND eright_with
- [ "eright" "with" bindings(bl) ] -> [
+| [ "eright" "with" bindings(bl) ] -> {
Tacticals.New.tclDELAYEDWITHHOLES true bl (fun bl -> Tactics.right_with_bindings true bl)
- ]
+ }
END
(** Constructor *)
TACTIC EXTEND constructor
- [ "constructor" ] -> [ Tactics.any_constructor false None ]
-| [ "constructor" int_or_var(i) ] -> [
+| [ "constructor" ] -> { Tactics.any_constructor false None }
+| [ "constructor" int_or_var(i) ] -> {
Tactics.constructor_tac false None i NoBindings
- ]
-| [ "constructor" int_or_var(i) "with" bindings(bl) ] -> [
+ }
+| [ "constructor" int_or_var(i) "with" bindings(bl) ] -> {
let tac bl = Tactics.constructor_tac false None i bl in
Tacticals.New.tclDELAYEDWITHHOLES false bl tac
- ]
+ }
END
TACTIC EXTEND econstructor
- [ "econstructor" ] -> [ Tactics.any_constructor true None ]
-| [ "econstructor" int_or_var(i) ] -> [
+| [ "econstructor" ] -> { Tactics.any_constructor true None }
+| [ "econstructor" int_or_var(i) ] -> {
Tactics.constructor_tac true None i NoBindings
- ]
-| [ "econstructor" int_or_var(i) "with" bindings(bl) ] -> [
+ }
+| [ "econstructor" int_or_var(i) "with" bindings(bl) ] -> {
let tac bl = Tactics.constructor_tac true None i bl in
Tacticals.New.tclDELAYEDWITHHOLES true bl tac
- ]
+ }
END
(** Specialize *)
TACTIC EXTEND specialize
- [ "specialize" constr_with_bindings(c) ] -> [
+| [ "specialize" constr_with_bindings(c) ] -> {
Tacticals.New.tclDELAYEDWITHHOLES false c (fun c -> Tactics.specialize c None)
- ]
-| [ "specialize" constr_with_bindings(c) "as" intropattern(ipat) ] -> [
+ }
+| [ "specialize" constr_with_bindings(c) "as" intropattern(ipat) ] -> {
Tacticals.New.tclDELAYEDWITHHOLES false c (fun c -> Tactics.specialize c (Some ipat))
- ]
+ }
END
TACTIC EXTEND symmetry
- [ "symmetry" ] -> [ Tactics.intros_symmetry {onhyps=Some[];concl_occs=AllOccurrences} ]
+| [ "symmetry" ] -> { Tactics.intros_symmetry {onhyps=Some[];concl_occs=AllOccurrences} }
END
TACTIC EXTEND symmetry_in
-| [ "symmetry" "in" in_clause(cl) ] -> [ Tactics.intros_symmetry cl ]
+| [ "symmetry" "in" in_clause(cl) ] -> { Tactics.intros_symmetry cl }
END
(** Split *)
+{
+
let rec delayed_list = function
| [] -> fun _ sigma -> (sigma, [])
| x :: l ->
@@ -167,147 +175,159 @@ let rec delayed_list = function
let (sigma, l) = delayed_list l env sigma in
(sigma, x :: l)
+}
+
TACTIC EXTEND split
- [ "split" ] -> [ Tactics.split_with_bindings false [NoBindings] ]
+| [ "split" ] -> { Tactics.split_with_bindings false [NoBindings] }
END
TACTIC EXTEND esplit
- [ "esplit" ] -> [ Tactics.split_with_bindings true [NoBindings] ]
+| [ "esplit" ] -> { Tactics.split_with_bindings true [NoBindings] }
END
TACTIC EXTEND split_with
- [ "split" "with" bindings(bl) ] -> [
+| [ "split" "with" bindings(bl) ] -> {
Tacticals.New.tclDELAYEDWITHHOLES false bl (fun bl -> Tactics.split_with_bindings false [bl])
- ]
+ }
END
TACTIC EXTEND esplit_with
- [ "esplit" "with" bindings(bl) ] -> [
+| [ "esplit" "with" bindings(bl) ] -> {
Tacticals.New.tclDELAYEDWITHHOLES true bl (fun bl -> Tactics.split_with_bindings true [bl])
- ]
+ }
END
TACTIC EXTEND exists
- [ "exists" ] -> [ Tactics.split_with_bindings false [NoBindings] ]
-| [ "exists" ne_bindings_list_sep(bll, ",") ] -> [
+| [ "exists" ] -> { Tactics.split_with_bindings false [NoBindings] }
+| [ "exists" ne_bindings_list_sep(bll, ",") ] -> {
Tacticals.New.tclDELAYEDWITHHOLES false (delayed_list bll) (fun bll -> Tactics.split_with_bindings false bll)
- ]
+ }
END
TACTIC EXTEND eexists
- [ "eexists" ] -> [ Tactics.split_with_bindings true [NoBindings] ]
-| [ "eexists" ne_bindings_list_sep(bll, ",") ] -> [
+| [ "eexists" ] -> { Tactics.split_with_bindings true [NoBindings] }
+| [ "eexists" ne_bindings_list_sep(bll, ",") ] -> {
Tacticals.New.tclDELAYEDWITHHOLES true (delayed_list bll) (fun bll -> Tactics.split_with_bindings true bll)
- ]
+ }
END
(** Intro *)
TACTIC EXTEND intros_until
- [ "intros" "until" quantified_hypothesis(h) ] -> [ Tactics.intros_until h ]
+| [ "intros" "until" quantified_hypothesis(h) ] -> { Tactics.intros_until h }
END
TACTIC EXTEND intro
-| [ "intro" ] -> [ Tactics.intro_move None MoveLast ]
-| [ "intro" ident(id) ] -> [ Tactics.intro_move (Some id) MoveLast ]
-| [ "intro" ident(id) "at" "top" ] -> [ Tactics.intro_move (Some id) MoveFirst ]
-| [ "intro" ident(id) "at" "bottom" ] -> [ Tactics.intro_move (Some id) MoveLast ]
-| [ "intro" ident(id) "after" hyp(h) ] -> [ Tactics.intro_move (Some id) (MoveAfter h) ]
-| [ "intro" ident(id) "before" hyp(h) ] -> [ Tactics.intro_move (Some id) (MoveBefore h) ]
-| [ "intro" "at" "top" ] -> [ Tactics.intro_move None MoveFirst ]
-| [ "intro" "at" "bottom" ] -> [ Tactics.intro_move None MoveLast ]
-| [ "intro" "after" hyp(h) ] -> [ Tactics.intro_move None (MoveAfter h) ]
-| [ "intro" "before" hyp(h) ] -> [ Tactics.intro_move None (MoveBefore h) ]
+| [ "intro" ] -> { Tactics.intro_move None MoveLast }
+| [ "intro" ident(id) ] -> { Tactics.intro_move (Some id) MoveLast }
+| [ "intro" ident(id) "at" "top" ] -> { Tactics.intro_move (Some id) MoveFirst }
+| [ "intro" ident(id) "at" "bottom" ] -> { Tactics.intro_move (Some id) MoveLast }
+| [ "intro" ident(id) "after" hyp(h) ] -> { Tactics.intro_move (Some id) (MoveAfter h) }
+| [ "intro" ident(id) "before" hyp(h) ] -> { Tactics.intro_move (Some id) (MoveBefore h) }
+| [ "intro" "at" "top" ] -> { Tactics.intro_move None MoveFirst }
+| [ "intro" "at" "bottom" ] -> { Tactics.intro_move None MoveLast }
+| [ "intro" "after" hyp(h) ] -> { Tactics.intro_move None (MoveAfter h) }
+| [ "intro" "before" hyp(h) ] -> { Tactics.intro_move None (MoveBefore h) }
END
(** Move *)
TACTIC EXTEND move
- [ "move" hyp(id) "at" "top" ] -> [ Tactics.move_hyp id MoveFirst ]
-| [ "move" hyp(id) "at" "bottom" ] -> [ Tactics.move_hyp id MoveLast ]
-| [ "move" hyp(id) "after" hyp(h) ] -> [ Tactics.move_hyp id (MoveAfter h) ]
-| [ "move" hyp(id) "before" hyp(h) ] -> [ Tactics.move_hyp id (MoveBefore h) ]
+| [ "move" hyp(id) "at" "top" ] -> { Tactics.move_hyp id MoveFirst }
+| [ "move" hyp(id) "at" "bottom" ] -> { Tactics.move_hyp id MoveLast }
+| [ "move" hyp(id) "after" hyp(h) ] -> { Tactics.move_hyp id (MoveAfter h) }
+| [ "move" hyp(id) "before" hyp(h) ] -> { Tactics.move_hyp id (MoveBefore h) }
END
(** Rename *)
TACTIC EXTEND rename
-| [ "rename" ne_rename_list_sep(ids, ",") ] -> [ Tactics.rename_hyp ids ]
+| [ "rename" ne_rename_list_sep(ids, ",") ] -> { Tactics.rename_hyp ids }
END
(** Revert *)
TACTIC EXTEND revert
- [ "revert" ne_hyp_list(hl) ] -> [ Tactics.revert hl ]
+| [ "revert" ne_hyp_list(hl) ] -> { Tactics.revert hl }
END
(** Simple induction / destruct *)
+{
+
let simple_induct h =
Tacticals.New.tclTHEN (Tactics.intros_until h)
(Tacticals.New.onLastHyp Tactics.simplest_elim)
+}
+
TACTIC EXTEND simple_induction
- [ "simple" "induction" quantified_hypothesis(h) ] -> [ simple_induct h ]
+| [ "simple" "induction" quantified_hypothesis(h) ] -> { simple_induct h }
END
+{
+
let simple_destruct h =
Tacticals.New.tclTHEN (Tactics.intros_until h)
(Tacticals.New.onLastHyp Tactics.simplest_case)
+}
+
TACTIC EXTEND simple_destruct
- [ "simple" "destruct" quantified_hypothesis(h) ] -> [ simple_destruct h ]
+| [ "simple" "destruct" quantified_hypothesis(h) ] -> { simple_destruct h }
END
(** Double induction *)
TACTIC EXTEND double_induction
- [ "double" "induction" quantified_hypothesis(h1) quantified_hypothesis(h2) ] ->
- [ Elim.h_double_induction h1 h2 ]
+| [ "double" "induction" quantified_hypothesis(h1) quantified_hypothesis(h2) ] ->
+ { Elim.h_double_induction h1 h2 }
END
(* Admit *)
TACTIC EXTEND admit
- [ "admit" ] -> [ Proofview.give_up ]
+|[ "admit" ] -> { Proofview.give_up }
END
(* Fix *)
TACTIC EXTEND fix
- [ "fix" ident(id) natural(n) ] -> [ Tactics.fix id n ]
+| [ "fix" ident(id) natural(n) ] -> { Tactics.fix id n }
END
(* Cofix *)
TACTIC EXTEND cofix
- [ "cofix" ident(id) ] -> [ Tactics.cofix id ]
+| [ "cofix" ident(id) ] -> { Tactics.cofix id }
END
(* Clear *)
TACTIC EXTEND clear
- [ "clear" hyp_list(ids) ] -> [
+| [ "clear" hyp_list(ids) ] -> {
if List.is_empty ids then Tactics.keep []
else Tactics.clear ids
- ]
-| [ "clear" "-" ne_hyp_list(ids) ] -> [ Tactics.keep ids ]
+ }
+| [ "clear" "-" ne_hyp_list(ids) ] -> { Tactics.keep ids }
END
(* Clearbody *)
TACTIC EXTEND clearbody
- [ "clearbody" ne_hyp_list(ids) ] -> [ Tactics.clear_body ids ]
+| [ "clearbody" ne_hyp_list(ids) ] -> { Tactics.clear_body ids }
END
(* Generalize dependent *)
TACTIC EXTEND generalize_dependent
- [ "generalize" "dependent" constr(c) ] -> [ Tactics.generalize_dep c ]
+| [ "generalize" "dependent" constr(c) ] -> { Tactics.generalize_dep c }
END
(* Table of "pervasives" macros tactics (e.g. auto, simpl, etc.) *)
+{
+
open Tacexpr
let initial_atomic () =
@@ -364,3 +384,5 @@ let initial_tacticals () =
]
let () = Mltop.declare_cache_obj initial_tacticals "ltac_plugin"
+
+}
diff --git a/plugins/ltac/extratactics.ml4 b/plugins/ltac/extratactics.ml4
index 660e29ca8..f24ab2bdd 100644
--- a/plugins/ltac/extratactics.ml4
+++ b/plugins/ltac/extratactics.ml4
@@ -293,7 +293,7 @@ open Vars
let constr_flags () = {
Pretyping.use_typeclasses = true;
- Pretyping.solve_unification_constraints = true;
+ Pretyping.solve_unification_constraints = Pfedit.use_unification_heuristics ();
Pretyping.use_hook = Pfedit.solve_by_implicit_tactic ();
Pretyping.fail_evar = false;
Pretyping.expand_evars = true }
diff --git a/plugins/ltac/g_eqdecide.ml4 b/plugins/ltac/g_eqdecide.mlg
index 2251a6620..e57afe3e3 100644
--- a/plugins/ltac/g_eqdecide.ml4
+++ b/plugins/ltac/g_eqdecide.mlg
@@ -14,15 +14,19 @@
(* by Eduardo Gimenez *)
(************************************************************************)
+{
+
open Eqdecide
open Stdarg
+}
+
DECLARE PLUGIN "ltac_plugin"
TACTIC EXTEND decide_equality
-| [ "decide" "equality" ] -> [ decideEqualityGoal ]
+| [ "decide" "equality" ] -> { decideEqualityGoal }
END
TACTIC EXTEND compare
-| [ "compare" constr(c1) constr(c2) ] -> [ compare c1 c2 ]
+| [ "compare" constr(c1) constr(c2) ] -> { compare c1 c2 }
END
diff --git a/plugins/ltac/g_tactic.ml4 b/plugins/ltac/g_tactic.mlg
index 31bc34a32..2e1ce814a 100644
--- a/plugins/ltac/g_tactic.ml4
+++ b/plugins/ltac/g_tactic.mlg
@@ -8,6 +8,8 @@
(* * (see LICENSE file for the text of the license) *)
(************************************************************************)
+{
+
open Pp
open CErrors
open Util
@@ -215,486 +217,488 @@ let warn_deprecated_eqn_syntax =
open Pvernac.Vernac_
-GEXTEND Gram
+}
+
+GRAMMAR EXTEND Gram
GLOBAL: simple_tactic constr_with_bindings quantified_hypothesis
bindings red_expr int_or_var open_constr uconstr
simple_intropattern in_clause clause_dft_concl hypident destruction_arg;
int_or_var:
- [ [ n = integer -> ArgArg n
- | id = identref -> ArgVar id ] ]
+ [ [ n = integer -> { ArgArg n }
+ | id = identref -> { ArgVar id } ] ]
;
nat_or_var:
- [ [ n = natural -> ArgArg n
- | id = identref -> ArgVar id ] ]
+ [ [ n = natural -> { ArgArg n }
+ | id = identref -> { ArgVar id } ] ]
;
(* An identifier or a quotation meta-variable *)
id_or_meta:
- [ [ id = identref -> id ] ]
+ [ [ id = identref -> { id } ] ]
;
open_constr:
- [ [ c = constr -> c ] ]
+ [ [ c = constr -> { c } ] ]
;
uconstr:
- [ [ c = constr -> c ] ]
+ [ [ c = constr -> { c } ] ]
;
destruction_arg:
- [ [ n = natural -> (None,ElimOnAnonHyp n)
+ [ [ n = natural -> { (None,ElimOnAnonHyp n) }
| test_lpar_id_rpar; c = constr_with_bindings ->
- (Some false,destruction_arg_of_constr c)
- | c = constr_with_bindings_arg -> on_snd destruction_arg_of_constr c
+ { (Some false,destruction_arg_of_constr c) }
+ | c = constr_with_bindings_arg -> { on_snd destruction_arg_of_constr c }
] ]
;
constr_with_bindings_arg:
- [ [ ">"; c = constr_with_bindings -> (Some true,c)
- | c = constr_with_bindings -> (None,c) ] ]
+ [ [ ">"; c = constr_with_bindings -> { (Some true,c) }
+ | c = constr_with_bindings -> { (None,c) } ] ]
;
quantified_hypothesis:
- [ [ id = ident -> NamedHyp id
- | n = natural -> AnonHyp n ] ]
+ [ [ id = ident -> { NamedHyp id }
+ | n = natural -> { AnonHyp n } ] ]
;
conversion:
- [ [ c = constr -> (None, c)
- | c1 = constr; "with"; c2 = constr -> (Some (AllOccurrences,c1),c2)
+ [ [ c = constr -> { (None, c) }
+ | c1 = constr; "with"; c2 = constr -> { (Some (AllOccurrences,c1),c2) }
| c1 = constr; "at"; occs = occs_nums; "with"; c2 = constr ->
- (Some (occs,c1), c2) ] ]
+ { (Some (occs,c1), c2) } ] ]
;
occs_nums:
- [ [ nl = LIST1 nat_or_var -> OnlyOccurrences nl
+ [ [ nl = LIST1 nat_or_var -> { OnlyOccurrences nl }
| "-"; n = nat_or_var; nl = LIST0 int_or_var ->
(* have used int_or_var instead of nat_or_var for compatibility *)
- AllOccurrencesBut (List.map (map_int_or_var abs) (n::nl)) ] ]
+ { AllOccurrencesBut (List.map (map_int_or_var abs) (n::nl)) } ] ]
;
occs:
- [ [ "at"; occs = occs_nums -> occs | -> AllOccurrences ] ]
+ [ [ "at"; occs = occs_nums -> { occs } | -> { AllOccurrences } ] ]
;
pattern_occ:
- [ [ c = constr; nl = occs -> (nl,c) ] ]
+ [ [ c = constr; nl = occs -> { (nl,c) } ] ]
;
ref_or_pattern_occ:
(* If a string, it is interpreted as a ref
(anyway a Coq string does not reduce) *)
- [ [ c = smart_global; nl = occs -> nl,Inl c
- | c = constr; nl = occs -> nl,Inr c ] ]
+ [ [ c = smart_global; nl = occs -> { nl,Inl c }
+ | c = constr; nl = occs -> { nl,Inr c } ] ]
;
unfold_occ:
- [ [ c = smart_global; nl = occs -> (nl,c) ] ]
+ [ [ c = smart_global; nl = occs -> { (nl,c) } ] ]
;
intropatterns:
- [ [ l = LIST0 nonsimple_intropattern -> l ]]
+ [ [ l = LIST0 nonsimple_intropattern -> { l } ] ]
;
ne_intropatterns:
- [ [ l = LIST1 nonsimple_intropattern -> l ]]
+ [ [ l = LIST1 nonsimple_intropattern -> { l } ] ]
;
or_and_intropattern:
- [ [ "["; tc = LIST1 intropatterns SEP "|"; "]" -> IntroOrPattern tc
- | "()" -> IntroAndPattern []
- | "("; si = simple_intropattern; ")" -> IntroAndPattern [si]
+ [ [ "["; tc = LIST1 intropatterns SEP "|"; "]" -> { IntroOrPattern tc }
+ | "()" -> { IntroAndPattern [] }
+ | "("; si = simple_intropattern; ")" -> { IntroAndPattern [si] }
| "("; si = simple_intropattern; ",";
tc = LIST1 simple_intropattern SEP "," ; ")" ->
- IntroAndPattern (si::tc)
+ { IntroAndPattern (si::tc) }
| "("; si = simple_intropattern; "&";
tc = LIST1 simple_intropattern SEP "&" ; ")" ->
(* (A & B & C) is translated into (A,(B,C)) *)
- let rec pairify = function
+ { let rec pairify = function
| ([]|[_]|[_;_]) as l -> l
| t::q -> [t; CAst.make ?loc:(loc_of_ne_list q) (IntroAction (IntroOrAndPattern (IntroAndPattern (pairify q))))]
- in IntroAndPattern (pairify (si::tc)) ] ]
+ in IntroAndPattern (pairify (si::tc)) } ] ]
;
equality_intropattern:
- [ [ "->" -> IntroRewrite true
- | "<-" -> IntroRewrite false
- | "[="; tc = intropatterns; "]" -> IntroInjection tc ] ]
+ [ [ "->" -> { IntroRewrite true }
+ | "<-" -> { IntroRewrite false }
+ | "[="; tc = intropatterns; "]" -> { IntroInjection tc } ] ]
;
naming_intropattern:
- [ [ prefix = pattern_ident -> IntroFresh prefix
- | "?" -> IntroAnonymous
- | id = ident -> IntroIdentifier id ] ]
+ [ [ prefix = pattern_ident -> { IntroFresh prefix }
+ | "?" -> { IntroAnonymous }
+ | id = ident -> { IntroIdentifier id } ] ]
;
nonsimple_intropattern:
- [ [ l = simple_intropattern -> l
- | "*" -> CAst.make ~loc:!@loc @@ IntroForthcoming true
- | "**" -> CAst.make ~loc:!@loc @@ IntroForthcoming false ]]
+ [ [ l = simple_intropattern -> { l }
+ | "*" -> { CAst.make ~loc @@ IntroForthcoming true }
+ | "**" -> { CAst.make ~loc @@ IntroForthcoming false } ] ]
;
simple_intropattern:
[ [ pat = simple_intropattern_closed;
- l = LIST0 ["%"; c = operconstr LEVEL "0" -> c] ->
- let {CAst.loc=loc0;v=pat} = pat in
+ l = LIST0 ["%"; c = operconstr LEVEL "0" -> { c } ] ->
+ { let {CAst.loc=loc0;v=pat} = pat in
let f c pat =
let loc1 = Constrexpr_ops.constr_loc c in
let loc = Loc.merge_opt loc0 loc1 in
IntroAction (IntroApplyOn (CAst.(make ?loc:loc1 c),CAst.(make ?loc pat))) in
- CAst.make ~loc:!@loc @@ List.fold_right f l pat ] ]
+ CAst.make ~loc @@ List.fold_right f l pat } ] ]
;
simple_intropattern_closed:
- [ [ pat = or_and_intropattern -> CAst.make ~loc:!@loc @@ IntroAction (IntroOrAndPattern pat)
- | pat = equality_intropattern -> CAst.make ~loc:!@loc @@ IntroAction pat
- | "_" -> CAst.make ~loc:!@loc @@ IntroAction IntroWildcard
- | pat = naming_intropattern -> CAst.make ~loc:!@loc @@ IntroNaming pat ] ]
+ [ [ pat = or_and_intropattern -> { CAst.make ~loc @@ IntroAction (IntroOrAndPattern pat) }
+ | pat = equality_intropattern -> { CAst.make ~loc @@ IntroAction pat }
+ | "_" -> { CAst.make ~loc @@ IntroAction IntroWildcard }
+ | pat = naming_intropattern -> { CAst.make ~loc @@ IntroNaming pat } ] ]
;
simple_binding:
- [ [ "("; id = ident; ":="; c = lconstr; ")" -> CAst.make ~loc:!@loc (NamedHyp id, c)
- | "("; n = natural; ":="; c = lconstr; ")" -> CAst.make ~loc:!@loc (AnonHyp n, c) ] ]
+ [ [ "("; id = ident; ":="; c = lconstr; ")" -> { CAst.make ~loc (NamedHyp id, c) }
+ | "("; n = natural; ":="; c = lconstr; ")" -> { CAst.make ~loc (AnonHyp n, c) } ] ]
;
bindings:
[ [ test_lpar_idnum_coloneq; bl = LIST1 simple_binding ->
- ExplicitBindings bl
- | bl = LIST1 constr -> ImplicitBindings bl ] ]
+ { ExplicitBindings bl }
+ | bl = LIST1 constr -> { ImplicitBindings bl } ] ]
;
constr_with_bindings:
- [ [ c = constr; l = with_bindings -> (c, l) ] ]
+ [ [ c = constr; l = with_bindings -> { (c, l) } ] ]
;
with_bindings:
- [ [ "with"; bl = bindings -> bl | -> NoBindings ] ]
+ [ [ "with"; bl = bindings -> { bl } | -> { NoBindings } ] ]
;
red_flags:
- [ [ IDENT "beta" -> [FBeta]
- | IDENT "iota" -> [FMatch;FFix;FCofix]
- | IDENT "match" -> [FMatch]
- | IDENT "fix" -> [FFix]
- | IDENT "cofix" -> [FCofix]
- | IDENT "zeta" -> [FZeta]
- | IDENT "delta"; d = delta_flag -> [d]
+ [ [ IDENT "beta" -> { [FBeta] }
+ | IDENT "iota" -> { [FMatch;FFix;FCofix] }
+ | IDENT "match" -> { [FMatch] }
+ | IDENT "fix" -> { [FFix] }
+ | IDENT "cofix" -> { [FCofix] }
+ | IDENT "zeta" -> { [FZeta] }
+ | IDENT "delta"; d = delta_flag -> { [d] }
] ]
;
delta_flag:
- [ [ "-"; "["; idl = LIST1 smart_global; "]" -> FDeltaBut idl
- | "["; idl = LIST1 smart_global; "]" -> FConst idl
- | -> FDeltaBut []
+ [ [ "-"; "["; idl = LIST1 smart_global; "]" -> { FDeltaBut idl }
+ | "["; idl = LIST1 smart_global; "]" -> { FConst idl }
+ | -> { FDeltaBut [] }
] ]
;
strategy_flag:
- [ [ s = LIST1 red_flags -> Redops.make_red_flag (List.flatten s)
- | d = delta_flag -> all_with d
+ [ [ s = LIST1 red_flags -> { Redops.make_red_flag (List.flatten s) }
+ | d = delta_flag -> { all_with d }
] ]
;
red_expr:
- [ [ IDENT "red" -> Red false
- | IDENT "hnf" -> Hnf
- | IDENT "simpl"; d = delta_flag; po = OPT ref_or_pattern_occ -> Simpl (all_with d,po)
- | IDENT "cbv"; s = strategy_flag -> Cbv s
- | IDENT "cbn"; s = strategy_flag -> Cbn s
- | IDENT "lazy"; s = strategy_flag -> Lazy s
- | IDENT "compute"; delta = delta_flag -> Cbv (all_with delta)
- | IDENT "vm_compute"; po = OPT ref_or_pattern_occ -> CbvVm po
- | IDENT "native_compute"; po = OPT ref_or_pattern_occ -> CbvNative po
- | IDENT "unfold"; ul = LIST1 unfold_occ SEP "," -> Unfold ul
- | IDENT "fold"; cl = LIST1 constr -> Fold cl
- | IDENT "pattern"; pl = LIST1 pattern_occ SEP"," -> Pattern pl
- | s = IDENT -> ExtraRedExpr s ] ]
+ [ [ IDENT "red" -> { Red false }
+ | IDENT "hnf" -> { Hnf }
+ | IDENT "simpl"; d = delta_flag; po = OPT ref_or_pattern_occ -> { Simpl (all_with d,po) }
+ | IDENT "cbv"; s = strategy_flag -> { Cbv s }
+ | IDENT "cbn"; s = strategy_flag -> { Cbn s }
+ | IDENT "lazy"; s = strategy_flag -> { Lazy s }
+ | IDENT "compute"; delta = delta_flag -> { Cbv (all_with delta) }
+ | IDENT "vm_compute"; po = OPT ref_or_pattern_occ -> { CbvVm po }
+ | IDENT "native_compute"; po = OPT ref_or_pattern_occ -> { CbvNative po }
+ | IDENT "unfold"; ul = LIST1 unfold_occ SEP "," -> { Unfold ul }
+ | IDENT "fold"; cl = LIST1 constr -> { Fold cl }
+ | IDENT "pattern"; pl = LIST1 pattern_occ SEP"," -> { Pattern pl }
+ | s = IDENT -> { ExtraRedExpr s } ] ]
;
hypident:
[ [ id = id_or_meta ->
- let id : lident = id in
- id,InHyp
+ { let id : lident = id in
+ id,InHyp }
| "("; IDENT "type"; IDENT "of"; id = id_or_meta; ")" ->
- let id : lident = id in
- id,InHypTypeOnly
+ { let id : lident = id in
+ id,InHypTypeOnly }
| "("; IDENT "value"; IDENT "of"; id = id_or_meta; ")" ->
- let id : lident = id in
- id,InHypValueOnly
+ { let id : lident = id in
+ id,InHypValueOnly }
] ]
;
hypident_occ:
- [ [ (id,l)=hypident; occs=occs ->
+ [ [ h=hypident; occs=occs ->
+ { let (id,l) = h in
let id : lident = id in
- ((occs,id),l) ] ]
+ ((occs,id),l) } ] ]
;
in_clause:
[ [ "*"; occs=occs ->
- {onhyps=None; concl_occs=occs}
+ { {onhyps=None; concl_occs=occs} }
| "*"; "|-"; occs=concl_occ ->
- {onhyps=None; concl_occs=occs}
+ { {onhyps=None; concl_occs=occs} }
| hl=LIST0 hypident_occ SEP","; "|-"; occs=concl_occ ->
- {onhyps=Some hl; concl_occs=occs}
+ { {onhyps=Some hl; concl_occs=occs} }
| hl=LIST0 hypident_occ SEP"," ->
- {onhyps=Some hl; concl_occs=NoOccurrences} ] ]
+ { {onhyps=Some hl; concl_occs=NoOccurrences} } ] ]
;
clause_dft_concl:
- [ [ "in"; cl = in_clause -> cl
- | occs=occs -> {onhyps=Some[]; concl_occs=occs}
- | -> all_concl_occs_clause ] ]
+ [ [ "in"; cl = in_clause -> { cl }
+ | occs=occs -> { {onhyps=Some[]; concl_occs=occs} }
+ | -> { all_concl_occs_clause } ] ]
;
clause_dft_all:
- [ [ "in"; cl = in_clause -> cl
- | -> {onhyps=None; concl_occs=AllOccurrences} ] ]
+ [ [ "in"; cl = in_clause -> { cl }
+ | -> { {onhyps=None; concl_occs=AllOccurrences} } ] ]
;
opt_clause:
- [ [ "in"; cl = in_clause -> Some cl
- | "at"; occs = occs_nums -> Some {onhyps=Some[]; concl_occs=occs}
- | -> None ] ]
+ [ [ "in"; cl = in_clause -> { Some cl }
+ | "at"; occs = occs_nums -> { Some {onhyps=Some[]; concl_occs=occs} }
+ | -> { None } ] ]
;
concl_occ:
- [ [ "*"; occs = occs -> occs
- | -> NoOccurrences ] ]
+ [ [ "*"; occs = occs -> { occs }
+ | -> { NoOccurrences } ] ]
;
in_hyp_list:
- [ [ "in"; idl = LIST1 id_or_meta -> idl
- | -> [] ] ]
+ [ [ "in"; idl = LIST1 id_or_meta -> { idl }
+ | -> { [] } ] ]
;
in_hyp_as:
- [ [ "in"; id = id_or_meta; ipat = as_ipat -> Some (id,ipat)
- | -> None ] ]
+ [ [ "in"; id = id_or_meta; ipat = as_ipat -> { Some (id,ipat) }
+ | -> { None } ] ]
;
orient:
- [ [ "->" -> true
- | "<-" -> false
- | -> true ]]
+ [ [ "->" -> { true }
+ | "<-" -> { false }
+ | -> { true } ] ]
;
simple_binder:
- [ [ na=name -> ([na],Default Explicit, CAst.make ~loc:!@loc @@
- CHole (Some (Evar_kinds.BinderType na.CAst.v), IntroAnonymous, None))
- | "("; nal=LIST1 name; ":"; c=lconstr; ")" -> (nal,Default Explicit,c)
+ [ [ na=name -> { ([na],Default Explicit, CAst.make ~loc @@
+ CHole (Some (Evar_kinds.BinderType na.CAst.v), IntroAnonymous, None)) }
+ | "("; nal=LIST1 name; ":"; c=lconstr; ")" -> { (nal,Default Explicit,c) }
] ]
;
fixdecl:
[ [ "("; id = ident; bl=LIST0 simple_binder; ann=fixannot;
- ":"; ty=lconstr; ")" -> (!@loc, id, bl, ann, ty) ] ]
+ ":"; ty=lconstr; ")" -> { (loc, id, bl, ann, ty) } ] ]
;
fixannot:
- [ [ "{"; IDENT "struct"; id=name; "}" -> Some id
- | -> None ] ]
+ [ [ "{"; IDENT "struct"; id=name; "}" -> { Some id }
+ | -> { None } ] ]
;
cofixdecl:
[ [ "("; id = ident; bl=LIST0 simple_binder; ":"; ty=lconstr; ")" ->
- (!@loc, id, bl, None, ty) ] ]
+ { (loc, id, bl, None, ty) } ] ]
;
bindings_with_parameters:
[ [ check_for_coloneq; "("; id = ident; bl = LIST0 simple_binder;
- ":="; c = lconstr; ")" -> (id, mkCLambdaN_simple bl c) ] ]
+ ":="; c = lconstr; ")" -> { (id, mkCLambdaN_simple bl c) } ] ]
;
eliminator:
- [ [ "using"; el = constr_with_bindings -> el ] ]
+ [ [ "using"; el = constr_with_bindings -> { el } ] ]
;
as_ipat:
- [ [ "as"; ipat = simple_intropattern -> Some ipat
- | -> None ] ]
+ [ [ "as"; ipat = simple_intropattern -> { Some ipat }
+ | -> { None } ] ]
;
or_and_intropattern_loc:
- [ [ ipat = or_and_intropattern -> ArgArg (CAst.make ~loc:!@loc ipat)
- | locid = identref -> ArgVar locid ] ]
+ [ [ ipat = or_and_intropattern -> { ArgArg (CAst.make ~loc ipat) }
+ | locid = identref -> { ArgVar locid } ] ]
;
as_or_and_ipat:
- [ [ "as"; ipat = or_and_intropattern_loc -> Some ipat
- | -> None ] ]
+ [ [ "as"; ipat = or_and_intropattern_loc -> { Some ipat }
+ | -> { None } ] ]
;
eqn_ipat:
- [ [ IDENT "eqn"; ":"; pat = naming_intropattern -> Some (CAst.make ~loc:!@loc pat)
+ [ [ IDENT "eqn"; ":"; pat = naming_intropattern -> { Some (CAst.make ~loc pat) }
| IDENT "_eqn"; ":"; pat = naming_intropattern ->
- let loc = !@loc in
- warn_deprecated_eqn_syntax ~loc "H"; Some (CAst.make ~loc pat)
+ { warn_deprecated_eqn_syntax ~loc "H"; Some (CAst.make ~loc pat) }
| IDENT "_eqn" ->
- let loc = !@loc in
- warn_deprecated_eqn_syntax ~loc "?"; Some (CAst.make ~loc IntroAnonymous)
- | -> None ] ]
+ { warn_deprecated_eqn_syntax ~loc "?"; Some (CAst.make ~loc IntroAnonymous) }
+ | -> { None } ] ]
;
as_name:
- [ [ "as"; id = ident ->Names.Name.Name id | -> Names.Name.Anonymous ] ]
+ [ [ "as"; id = ident -> { Names.Name.Name id } | -> { Names.Name.Anonymous } ] ]
;
by_tactic:
- [ [ "by"; tac = tactic_expr LEVEL "3" -> Some tac
- | -> None ] ]
+ [ [ "by"; tac = tactic_expr LEVEL "3" -> { Some tac }
+ | -> { None } ] ]
;
rewriter :
- [ [ "!"; c = constr_with_bindings_arg -> (Equality.RepeatPlus,c)
- | ["?"| LEFTQMARK]; c = constr_with_bindings_arg -> (Equality.RepeatStar,c)
- | n = natural; "!"; c = constr_with_bindings_arg -> (Equality.Precisely n,c)
- | n = natural; ["?" | LEFTQMARK]; c = constr_with_bindings_arg -> (Equality.UpTo n,c)
- | n = natural; c = constr_with_bindings_arg -> (Equality.Precisely n,c)
- | c = constr_with_bindings_arg -> (Equality.Precisely 1, c)
+ [ [ "!"; c = constr_with_bindings_arg -> { (Equality.RepeatPlus,c) }
+ | ["?" -> { () } | LEFTQMARK -> { () } ]; c = constr_with_bindings_arg -> { (Equality.RepeatStar,c) }
+ | n = natural; "!"; c = constr_with_bindings_arg -> { (Equality.Precisely n,c) }
+ | n = natural; ["?" -> { () } | LEFTQMARK -> { () } ]; c = constr_with_bindings_arg -> { (Equality.UpTo n,c) }
+ | n = natural; c = constr_with_bindings_arg -> { (Equality.Precisely n,c) }
+ | c = constr_with_bindings_arg -> { (Equality.Precisely 1, c) }
] ]
;
oriented_rewriter :
- [ [ b = orient; p = rewriter -> let (m,c) = p in (b,m,c) ] ]
+ [ [ b = orient; p = rewriter -> { let (m,c) = p in (b,m,c) } ] ]
;
induction_clause:
[ [ c = destruction_arg; pat = as_or_and_ipat; eq = eqn_ipat;
- cl = opt_clause -> (c,(eq,pat),cl) ] ]
+ cl = opt_clause -> { (c,(eq,pat),cl) } ] ]
;
induction_clause_list:
[ [ ic = LIST1 induction_clause SEP ","; el = OPT eliminator;
cl_tolerance = opt_clause ->
(* Condition for accepting "in" at the end by compatibility *)
- match ic,el,cl_tolerance with
+ { match ic,el,cl_tolerance with
| [c,pat,None],Some _,Some _ -> ([c,pat,cl_tolerance],el)
| _,_,Some _ -> err ()
- | _,_,None -> (ic,el) ]]
+ | _,_,None -> (ic,el) } ] ]
;
simple_tactic:
[ [
(* Basic tactics *)
IDENT "intros"; pl = ne_intropatterns ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacIntroPattern (false,pl))
+ { TacAtom (Loc.tag ~loc @@ TacIntroPattern (false,pl)) }
| IDENT "intros" ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacIntroPattern (false,[CAst.make ~loc:!@loc @@IntroForthcoming false]))
+ { TacAtom (Loc.tag ~loc @@ TacIntroPattern (false,[CAst.make ~loc @@IntroForthcoming false])) }
| IDENT "eintros"; pl = ne_intropatterns ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacIntroPattern (true,pl))
+ { TacAtom (Loc.tag ~loc @@ TacIntroPattern (true,pl)) }
| IDENT "apply"; cl = LIST1 constr_with_bindings_arg SEP ",";
- inhyp = in_hyp_as -> TacAtom (Loc.tag ~loc:!@loc @@ TacApply (true,false,cl,inhyp))
+ inhyp = in_hyp_as -> { TacAtom (Loc.tag ~loc @@ TacApply (true,false,cl,inhyp)) }
| IDENT "eapply"; cl = LIST1 constr_with_bindings_arg SEP ",";
- inhyp = in_hyp_as -> TacAtom (Loc.tag ~loc:!@loc @@ TacApply (true,true,cl,inhyp))
+ inhyp = in_hyp_as -> { TacAtom (Loc.tag ~loc @@ TacApply (true,true,cl,inhyp)) }
| IDENT "simple"; IDENT "apply";
cl = LIST1 constr_with_bindings_arg SEP ",";
- inhyp = in_hyp_as -> TacAtom (Loc.tag ~loc:!@loc @@ TacApply (false,false,cl,inhyp))
+ inhyp = in_hyp_as -> { TacAtom (Loc.tag ~loc @@ TacApply (false,false,cl,inhyp)) }
| IDENT "simple"; IDENT "eapply";
cl = LIST1 constr_with_bindings_arg SEP",";
- inhyp = in_hyp_as -> TacAtom (Loc.tag ~loc:!@loc @@ TacApply (false,true,cl,inhyp))
+ inhyp = in_hyp_as -> { TacAtom (Loc.tag ~loc @@ TacApply (false,true,cl,inhyp)) }
| IDENT "elim"; cl = constr_with_bindings_arg; el = OPT eliminator ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacElim (false,cl,el))
+ { TacAtom (Loc.tag ~loc @@ TacElim (false,cl,el)) }
| IDENT "eelim"; cl = constr_with_bindings_arg; el = OPT eliminator ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacElim (true,cl,el))
- | IDENT "case"; icl = induction_clause_list -> TacAtom (Loc.tag ~loc:!@loc @@ mkTacCase false icl)
- | IDENT "ecase"; icl = induction_clause_list -> TacAtom (Loc.tag ~loc:!@loc @@ mkTacCase true icl)
+ { TacAtom (Loc.tag ~loc @@ TacElim (true,cl,el)) }
+ | IDENT "case"; icl = induction_clause_list -> { TacAtom (Loc.tag ~loc @@ mkTacCase false icl) }
+ | IDENT "ecase"; icl = induction_clause_list -> { TacAtom (Loc.tag ~loc @@ mkTacCase true icl) }
| "fix"; id = ident; n = natural; "with"; fd = LIST1 fixdecl ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacMutualFix (id,n,List.map mk_fix_tac fd))
+ { TacAtom (Loc.tag ~loc @@ TacMutualFix (id,n,List.map mk_fix_tac fd)) }
| "cofix"; id = ident; "with"; fd = LIST1 cofixdecl ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacMutualCofix (id,List.map mk_cofix_tac fd))
+ { TacAtom (Loc.tag ~loc @@ TacMutualCofix (id,List.map mk_cofix_tac fd)) }
- | IDENT "pose"; (id,b) = bindings_with_parameters ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacLetTac (false,Names.Name.Name id,b,Locusops.nowhere,true,None))
+ | IDENT "pose"; bl = bindings_with_parameters ->
+ { let (id,b) = bl in TacAtom (Loc.tag ~loc @@ TacLetTac (false,Names.Name.Name id,b,Locusops.nowhere,true,None)) }
| IDENT "pose"; b = constr; na = as_name ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacLetTac (false,na,b,Locusops.nowhere,true,None))
- | IDENT "epose"; (id,b) = bindings_with_parameters ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacLetTac (true,Names.Name id,b,Locusops.nowhere,true,None))
+ { TacAtom (Loc.tag ~loc @@ TacLetTac (false,na,b,Locusops.nowhere,true,None)) }
+ | IDENT "epose"; bl = bindings_with_parameters ->
+ { let (id,b) = bl in TacAtom (Loc.tag ~loc @@ TacLetTac (true,Names.Name id,b,Locusops.nowhere,true,None)) }
| IDENT "epose"; b = constr; na = as_name ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacLetTac (true,na,b,Locusops.nowhere,true,None))
- | IDENT "set"; (id,c) = bindings_with_parameters; p = clause_dft_concl ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacLetTac (false,Names.Name.Name id,c,p,true,None))
+ { TacAtom (Loc.tag ~loc @@ TacLetTac (true,na,b,Locusops.nowhere,true,None)) }
+ | IDENT "set"; bl = bindings_with_parameters; p = clause_dft_concl ->
+ { let (id,c) = bl in TacAtom (Loc.tag ~loc @@ TacLetTac (false,Names.Name.Name id,c,p,true,None)) }
| IDENT "set"; c = constr; na = as_name; p = clause_dft_concl ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacLetTac (false,na,c,p,true,None))
- | IDENT "eset"; (id,c) = bindings_with_parameters; p = clause_dft_concl ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacLetTac (true,Names.Name id,c,p,true,None))
+ { TacAtom (Loc.tag ~loc @@ TacLetTac (false,na,c,p,true,None)) }
+ | IDENT "eset"; bl = bindings_with_parameters; p = clause_dft_concl ->
+ { let (id,c) = bl in TacAtom (Loc.tag ~loc @@ TacLetTac (true,Names.Name id,c,p,true,None)) }
| IDENT "eset"; c = constr; na = as_name; p = clause_dft_concl ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacLetTac (true,na,c,p,true,None))
+ { TacAtom (Loc.tag ~loc @@ TacLetTac (true,na,c,p,true,None)) }
| IDENT "remember"; c = constr; na = as_name; e = eqn_ipat;
p = clause_dft_all ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacLetTac (false,na,c,p,false,e))
+ { TacAtom (Loc.tag ~loc @@ TacLetTac (false,na,c,p,false,e)) }
| IDENT "eremember"; c = constr; na = as_name; e = eqn_ipat;
p = clause_dft_all ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacLetTac (true,na,c,p,false,e))
+ { TacAtom (Loc.tag ~loc @@ TacLetTac (true,na,c,p,false,e)) }
(* Alternative syntax for "pose proof c as id" *)
| IDENT "assert"; test_lpar_id_coloneq; "("; lid = identref; ":=";
c = lconstr; ")" ->
- let { CAst.loc = loc; v = id } = lid in
- TacAtom (Loc.tag ?loc @@ TacAssert (false,true,None,Some (CAst.make ?loc @@ IntroNaming (IntroIdentifier id)),c))
+ { let { CAst.loc = loc; v = id } = lid in
+ TacAtom (Loc.tag ?loc @@ TacAssert (false,true,None,Some (CAst.make ?loc @@ IntroNaming (IntroIdentifier id)),c)) }
| IDENT "eassert"; test_lpar_id_coloneq; "("; lid = identref; ":=";
c = lconstr; ")" ->
- let { CAst.loc = loc; v = id } = lid in
- TacAtom (Loc.tag ?loc @@ TacAssert (true,true,None,Some (CAst.make ?loc @@ IntroNaming (IntroIdentifier id)),c))
+ { let { CAst.loc = loc; v = id } = lid in
+ TacAtom (Loc.tag ?loc @@ TacAssert (true,true,None,Some (CAst.make ?loc @@ IntroNaming (IntroIdentifier id)),c)) }
(* Alternative syntax for "assert c as id by tac" *)
| IDENT "assert"; test_lpar_id_colon; "("; lid = identref; ":";
c = lconstr; ")"; tac=by_tactic ->
- let { CAst.loc = loc; v = id } = lid in
- TacAtom (Loc.tag ?loc @@ TacAssert (false,true,Some tac,Some (CAst.make ?loc @@ IntroNaming (IntroIdentifier id)),c))
+ { let { CAst.loc = loc; v = id } = lid in
+ TacAtom (Loc.tag ?loc @@ TacAssert (false,true,Some tac,Some (CAst.make ?loc @@ IntroNaming (IntroIdentifier id)),c)) }
| IDENT "eassert"; test_lpar_id_colon; "("; lid = identref; ":";
c = lconstr; ")"; tac=by_tactic ->
- let { CAst.loc = loc; v = id } = lid in
- TacAtom (Loc.tag ?loc @@ TacAssert (true,true,Some tac,Some (CAst.make ?loc @@ IntroNaming (IntroIdentifier id)),c))
+ { let { CAst.loc = loc; v = id } = lid in
+ TacAtom (Loc.tag ?loc @@ TacAssert (true,true,Some tac,Some (CAst.make ?loc @@ IntroNaming (IntroIdentifier id)),c)) }
(* Alternative syntax for "enough c as id by tac" *)
| IDENT "enough"; test_lpar_id_colon; "("; lid = identref; ":";
c = lconstr; ")"; tac=by_tactic ->
- let { CAst.loc = loc; v = id } = lid in
- TacAtom (Loc.tag ?loc @@ TacAssert (false,false,Some tac,Some (CAst.make ?loc @@ IntroNaming (IntroIdentifier id)),c))
+ { let { CAst.loc = loc; v = id } = lid in
+ TacAtom (Loc.tag ?loc @@ TacAssert (false,false,Some tac,Some (CAst.make ?loc @@ IntroNaming (IntroIdentifier id)),c)) }
| IDENT "eenough"; test_lpar_id_colon; "("; lid = identref; ":";
c = lconstr; ")"; tac=by_tactic ->
- let { CAst.loc = loc; v = id } = lid in
- TacAtom (Loc.tag ?loc @@ TacAssert (true,false,Some tac,Some (CAst.make ?loc @@ IntroNaming (IntroIdentifier id)),c))
+ { let { CAst.loc = loc; v = id } = lid in
+ TacAtom (Loc.tag ?loc @@ TacAssert (true,false,Some tac,Some (CAst.make ?loc @@ IntroNaming (IntroIdentifier id)),c)) }
| IDENT "assert"; c = constr; ipat = as_ipat; tac = by_tactic ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacAssert (false,true,Some tac,ipat,c))
+ { TacAtom (Loc.tag ~loc @@ TacAssert (false,true,Some tac,ipat,c)) }
| IDENT "eassert"; c = constr; ipat = as_ipat; tac = by_tactic ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacAssert (true,true,Some tac,ipat,c))
+ { TacAtom (Loc.tag ~loc @@ TacAssert (true,true,Some tac,ipat,c)) }
| IDENT "pose"; IDENT "proof"; c = lconstr; ipat = as_ipat ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacAssert (false,true,None,ipat,c))
+ { TacAtom (Loc.tag ~loc @@ TacAssert (false,true,None,ipat,c)) }
| IDENT "epose"; IDENT "proof"; c = lconstr; ipat = as_ipat ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacAssert (true,true,None,ipat,c))
+ { TacAtom (Loc.tag ~loc @@ TacAssert (true,true,None,ipat,c)) }
| IDENT "enough"; c = constr; ipat = as_ipat; tac = by_tactic ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacAssert (false,false,Some tac,ipat,c))
+ { TacAtom (Loc.tag ~loc @@ TacAssert (false,false,Some tac,ipat,c)) }
| IDENT "eenough"; c = constr; ipat = as_ipat; tac = by_tactic ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacAssert (true,false,Some tac,ipat,c))
+ { TacAtom (Loc.tag ~loc @@ TacAssert (true,false,Some tac,ipat,c)) }
| IDENT "generalize"; c = constr ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacGeneralize [((AllOccurrences,c),Names.Name.Anonymous)])
+ { TacAtom (Loc.tag ~loc @@ TacGeneralize [((AllOccurrences,c),Names.Name.Anonymous)]) }
| IDENT "generalize"; c = constr; l = LIST1 constr ->
- let gen_everywhere c = ((AllOccurrences,c),Names.Name.Anonymous) in
- TacAtom (Loc.tag ~loc:!@loc @@ TacGeneralize (List.map gen_everywhere (c::l)))
+ { let gen_everywhere c = ((AllOccurrences,c),Names.Name.Anonymous) in
+ TacAtom (Loc.tag ~loc @@ TacGeneralize (List.map gen_everywhere (c::l))) }
| IDENT "generalize"; c = constr; lookup_at_as_comma; nl = occs;
na = as_name;
- l = LIST0 [","; c = pattern_occ; na = as_name -> (c,na)] ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacGeneralize (((nl,c),na)::l))
+ l = LIST0 [","; c = pattern_occ; na = as_name -> { (c,na) } ] ->
+ { TacAtom (Loc.tag ~loc @@ TacGeneralize (((nl,c),na)::l)) }
(* Derived basic tactics *)
| IDENT "induction"; ic = induction_clause_list ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacInductionDestruct (true,false,ic))
+ { TacAtom (Loc.tag ~loc @@ TacInductionDestruct (true,false,ic)) }
| IDENT "einduction"; ic = induction_clause_list ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacInductionDestruct(true,true,ic))
+ { TacAtom (Loc.tag ~loc @@ TacInductionDestruct(true,true,ic)) }
| IDENT "destruct"; icl = induction_clause_list ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacInductionDestruct(false,false,icl))
+ { TacAtom (Loc.tag ~loc @@ TacInductionDestruct(false,false,icl)) }
| IDENT "edestruct"; icl = induction_clause_list ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacInductionDestruct(false,true,icl))
+ { TacAtom (Loc.tag ~loc @@ TacInductionDestruct(false,true,icl)) }
(* Equality and inversion *)
| IDENT "rewrite"; l = LIST1 oriented_rewriter SEP ",";
- cl = clause_dft_concl; t=by_tactic -> TacAtom (Loc.tag ~loc:!@loc @@ TacRewrite (false,l,cl,t))
+ cl = clause_dft_concl; t=by_tactic -> { TacAtom (Loc.tag ~loc @@ TacRewrite (false,l,cl,t)) }
| IDENT "erewrite"; l = LIST1 oriented_rewriter SEP ",";
- cl = clause_dft_concl; t=by_tactic -> TacAtom (Loc.tag ~loc:!@loc @@ TacRewrite (true,l,cl,t))
+ cl = clause_dft_concl; t=by_tactic -> { TacAtom (Loc.tag ~loc @@ TacRewrite (true,l,cl,t)) }
| IDENT "dependent"; k =
- [ IDENT "simple"; IDENT "inversion" -> SimpleInversion
- | IDENT "inversion" -> FullInversion
- | IDENT "inversion_clear" -> FullInversionClear ];
+ [ IDENT "simple"; IDENT "inversion" -> { SimpleInversion }
+ | IDENT "inversion" -> { FullInversion }
+ | IDENT "inversion_clear" -> { FullInversionClear } ];
hyp = quantified_hypothesis;
- ids = as_or_and_ipat; co = OPT ["with"; c = constr -> c] ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacInversion (DepInversion (k,co,ids),hyp))
+ ids = as_or_and_ipat; co = OPT ["with"; c = constr -> { c } ] ->
+ { TacAtom (Loc.tag ~loc @@ TacInversion (DepInversion (k,co,ids),hyp)) }
| IDENT "simple"; IDENT "inversion";
hyp = quantified_hypothesis; ids = as_or_and_ipat;
cl = in_hyp_list ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacInversion (NonDepInversion (SimpleInversion, cl, ids), hyp))
+ { TacAtom (Loc.tag ~loc @@ TacInversion (NonDepInversion (SimpleInversion, cl, ids), hyp)) }
| IDENT "inversion";
hyp = quantified_hypothesis; ids = as_or_and_ipat;
cl = in_hyp_list ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacInversion (NonDepInversion (FullInversion, cl, ids), hyp))
+ { TacAtom (Loc.tag ~loc @@ TacInversion (NonDepInversion (FullInversion, cl, ids), hyp)) }
| IDENT "inversion_clear";
hyp = quantified_hypothesis; ids = as_or_and_ipat;
cl = in_hyp_list ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacInversion (NonDepInversion (FullInversionClear, cl, ids), hyp))
+ { TacAtom (Loc.tag ~loc @@ TacInversion (NonDepInversion (FullInversionClear, cl, ids), hyp)) }
| IDENT "inversion"; hyp = quantified_hypothesis;
"using"; c = constr; cl = in_hyp_list ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacInversion (InversionUsing (c,cl), hyp))
+ { TacAtom (Loc.tag ~loc @@ TacInversion (InversionUsing (c,cl), hyp)) }
(* Conversion *)
| IDENT "red"; cl = clause_dft_concl ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacReduce (Red false, cl))
+ { TacAtom (Loc.tag ~loc @@ TacReduce (Red false, cl)) }
| IDENT "hnf"; cl = clause_dft_concl ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacReduce (Hnf, cl))
+ { TacAtom (Loc.tag ~loc @@ TacReduce (Hnf, cl)) }
| IDENT "simpl"; d = delta_flag; po = OPT ref_or_pattern_occ; cl = clause_dft_concl ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacReduce (Simpl (all_with d, po), cl))
+ { TacAtom (Loc.tag ~loc @@ TacReduce (Simpl (all_with d, po), cl)) }
| IDENT "cbv"; s = strategy_flag; cl = clause_dft_concl ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacReduce (Cbv s, cl))
+ { TacAtom (Loc.tag ~loc @@ TacReduce (Cbv s, cl)) }
| IDENT "cbn"; s = strategy_flag; cl = clause_dft_concl ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacReduce (Cbn s, cl))
+ { TacAtom (Loc.tag ~loc @@ TacReduce (Cbn s, cl)) }
| IDENT "lazy"; s = strategy_flag; cl = clause_dft_concl ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacReduce (Lazy s, cl))
+ { TacAtom (Loc.tag ~loc @@ TacReduce (Lazy s, cl)) }
| IDENT "compute"; delta = delta_flag; cl = clause_dft_concl ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacReduce (Cbv (all_with delta), cl))
+ { TacAtom (Loc.tag ~loc @@ TacReduce (Cbv (all_with delta), cl)) }
| IDENT "vm_compute"; po = OPT ref_or_pattern_occ; cl = clause_dft_concl ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacReduce (CbvVm po, cl))
+ { TacAtom (Loc.tag ~loc @@ TacReduce (CbvVm po, cl)) }
| IDENT "native_compute"; po = OPT ref_or_pattern_occ; cl = clause_dft_concl ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacReduce (CbvNative po, cl))
+ { TacAtom (Loc.tag ~loc @@ TacReduce (CbvNative po, cl)) }
| IDENT "unfold"; ul = LIST1 unfold_occ SEP ","; cl = clause_dft_concl ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacReduce (Unfold ul, cl))
+ { TacAtom (Loc.tag ~loc @@ TacReduce (Unfold ul, cl)) }
| IDENT "fold"; l = LIST1 constr; cl = clause_dft_concl ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacReduce (Fold l, cl))
+ { TacAtom (Loc.tag ~loc @@ TacReduce (Fold l, cl)) }
| IDENT "pattern"; pl = LIST1 pattern_occ SEP","; cl = clause_dft_concl ->
- TacAtom (Loc.tag ~loc:!@loc @@ TacReduce (Pattern pl, cl))
+ { TacAtom (Loc.tag ~loc @@ TacReduce (Pattern pl, cl)) }
(* Change ne doit pas s'appliquer dans un Definition t := Eval ... *)
- | IDENT "change"; (oc,c) = conversion; cl = clause_dft_concl ->
- let p,cl = merge_occurrences (!@loc) cl oc in
- TacAtom (Loc.tag ~loc:!@loc @@ TacChange (p,c,cl))
+ | IDENT "change"; c = conversion; cl = clause_dft_concl ->
+ { let (oc, c) = c in
+ let p,cl = merge_occurrences loc cl oc in
+ TacAtom (Loc.tag ~loc @@ TacChange (p,c,cl)) }
] ]
;
-END;;
+END
diff --git a/plugins/ltac/pptactic.ml b/plugins/ltac/pptactic.ml
index 09179dad3..4357689ee 100644
--- a/plugins/ltac/pptactic.ml
+++ b/plugins/ltac/pptactic.ml
@@ -115,7 +115,7 @@ let string_of_genarg_arg (ArgumentType arg) =
let keyword x = tag_keyword (str x)
let primitive x = tag_primitive (str x)
- let has_type (Val.Dyn (tag, x)) t = match Val.eq tag t with
+ let has_type (Val.Dyn (tag, _)) t = match Val.eq tag t with
| None -> false
| Some _ -> true
@@ -188,7 +188,7 @@ let string_of_genarg_arg (ArgumentType arg) =
| AN v -> f v
| ByNotation (s,sc) -> qs s ++ pr_opt (fun sc -> str "%" ++ str sc) sc)
- let pr_located pr (loc,x) = pr x
+ let pr_located pr (_,x) = pr x
let pr_evaluable_reference = function
| EvalVarRef id -> pr_id id
@@ -240,7 +240,7 @@ let string_of_genarg_arg (ArgumentType arg) =
in
pr_sequence (fun x -> x) l
- let pr_extend_gen pr_gen lev { mltac_name = s; mltac_index = i } l =
+ let pr_extend_gen pr_gen _ { mltac_name = s; mltac_index = i } l =
let name =
str s.mltac_plugin ++ str "::" ++ str s.mltac_tactic ++
str "@" ++ int i
@@ -260,7 +260,7 @@ let string_of_genarg_arg (ArgumentType arg) =
| Extend.Uentry tag ->
let ArgT.Any tag = tag in
ArgT.repr tag
- | Extend.Uentryl (tkn, lvl) -> "tactic" ^ string_of_int lvl
+ | Extend.Uentryl (_, lvl) -> "tactic" ^ string_of_int lvl
let pr_alias_key key =
try
@@ -288,7 +288,7 @@ let string_of_genarg_arg (ArgumentType arg) =
let p = pr_tacarg_using_rule pr_gen prods in
if pp.pptac_level > lev then surround p else p
with Not_found ->
- let pr arg = str "_" in
+ let pr _ = str "_" in
KerName.print key ++ spc() ++ pr_sequence pr l ++ str" (* Generic printer *)"
let pr_farg prtac arg = prtac (1, Any) (TacArg (Loc.tag arg))
@@ -341,14 +341,14 @@ let string_of_genarg_arg (ArgumentType arg) =
pr_any_arg pr symb arg
| _ -> str "ltac:(" ++ prtac (1, Any) arg ++ str ")"
- let pr_raw_extend_rec prc prlc prtac prpat =
+ let pr_raw_extend_rec prtac =
pr_extend_gen (pr_farg prtac)
- let pr_glob_extend_rec prc prlc prtac prpat =
+ let pr_glob_extend_rec prtac =
pr_extend_gen (pr_farg prtac)
- let pr_raw_alias prc prlc prtac prpat lev key args =
+ let pr_raw_alias prtac lev key args =
pr_alias_gen (pr_targ (fun l a -> prtac l (TacArg (Loc.tag a)))) lev key args
- let pr_glob_alias prc prlc prtac prpat lev key args =
+ let pr_glob_alias prtac lev key args =
pr_alias_gen (pr_targ (fun l a -> prtac l (TacArg (Loc.tag a)))) lev key args
(**********************************************************************)
@@ -743,7 +743,7 @@ let pr_goal_selector ~toplevel s =
(* Main tactic printer *)
and pr_atom1 a = tag_atom a (match a with
(* Basic tactics *)
- | TacIntroPattern (ev,[]) as t ->
+ | TacIntroPattern (_,[]) as t ->
pr_atom0 t
| TacIntroPattern (ev,(_::_ as p)) ->
hov 1 (primitive (if ev then "eintros" else "intros") ++
@@ -1054,7 +1054,7 @@ let pr_goal_selector ~toplevel s =
primitive "fresh" ++ pr_fresh_ids l, latom
| TacArg(_,TacGeneric arg) ->
pr.pr_generic arg, latom
- | TacArg(_,TacCall(loc,(f,[]))) ->
+ | TacArg(_,TacCall(_,(f,[]))) ->
pr.pr_reference f, latom
| TacArg(_,TacCall(loc,(f,l))) ->
pr_with_comments ?loc (hov 1 (
@@ -1112,8 +1112,8 @@ let pr_goal_selector ~toplevel s =
pr_reference = pr_qualid;
pr_name = pr_lident;
pr_generic = (fun arg -> Pputils.pr_raw_generic (Global.env ()) arg);
- pr_extend = pr_raw_extend_rec pr_constr_expr pr_lconstr_expr pr_raw_tactic_level pr_constr_pattern_expr;
- pr_alias = pr_raw_alias pr_constr_expr pr_lconstr_expr pr_raw_tactic_level pr_constr_pattern_expr;
+ pr_extend = pr_raw_extend_rec pr_raw_tactic_level;
+ pr_alias = pr_raw_alias pr_raw_tactic_level;
} in
make_pr_tac
pr raw_printers
@@ -1142,12 +1142,8 @@ let pr_goal_selector ~toplevel s =
pr_reference = pr_ltac_or_var (pr_located pr_ltac_constant);
pr_name = pr_lident;
pr_generic = (fun arg -> Pputils.pr_glb_generic (Global.env ()) arg);
- pr_extend = pr_glob_extend_rec
- (pr_and_constr_expr (pr_glob_constr_env env)) (pr_and_constr_expr (pr_lglob_constr_env env))
- prtac (pr_pat_and_constr_expr (pr_glob_constr_env env));
- pr_alias = pr_glob_alias
- (pr_and_constr_expr (pr_glob_constr_env env)) (pr_and_constr_expr (pr_lglob_constr_env env))
- prtac (pr_pat_and_constr_expr (pr_glob_constr_env env));
+ pr_extend = pr_glob_extend_rec prtac;
+ pr_alias = pr_glob_alias prtac;
} in
make_pr_tac
pr glob_printers
@@ -1168,8 +1164,8 @@ let pr_goal_selector ~toplevel s =
| _ -> user_err Pp.(str "Cannot translate fix tactic: not enough products") in
strip_ty [] n ty
- let pr_atomic_tactic_level env sigma n t =
- let prtac n (t:atomic_tactic_expr) =
+ let pr_atomic_tactic_level env sigma t =
+ let prtac (t:atomic_tactic_expr) =
let pr = {
pr_tactic = (fun _ _ -> str "<tactic>");
pr_constr = (fun c -> pr_econstr_env env sigma c);
@@ -1188,18 +1184,15 @@ let pr_goal_selector ~toplevel s =
in
pr_atom pr strip_prod_binders_constr tag_atomic_tactic_expr t
in
- prtac n t
+ prtac t
let pr_raw_generic = Pputils.pr_raw_generic
let pr_glb_generic = Pputils.pr_glb_generic
- let pr_raw_extend env = pr_raw_extend_rec
- pr_constr_expr pr_lconstr_expr pr_raw_tactic_level pr_constr_pattern_expr
+ let pr_raw_extend _ = pr_raw_extend_rec pr_raw_tactic_level
- let pr_glob_extend env = pr_glob_extend_rec
- (pr_and_constr_expr (pr_glob_constr_env env)) (pr_and_constr_expr (pr_lglob_constr_env env))
- (pr_glob_tactic_level env) (pr_pat_and_constr_expr (pr_glob_constr_env env))
+ let pr_glob_extend env = pr_glob_extend_rec (pr_glob_tactic_level env)
let pr_alias pr lev key args =
pr_alias_gen (fun _ arg -> pr arg) lev key args
@@ -1207,14 +1200,14 @@ let pr_goal_selector ~toplevel s =
let pr_extend pr lev ml args =
pr_extend_gen pr lev ml args
- let pr_atomic_tactic env sigma c = pr_atomic_tactic_level env sigma ltop c
+ let pr_atomic_tactic env sigma c = pr_atomic_tactic_level env sigma c
let declare_extra_genarg_pprule wit
(f : 'a raw_extra_genarg_printer)
(g : 'b glob_extra_genarg_printer)
(h : 'c extra_genarg_printer) =
begin match wit with
- | ExtraArg s -> ()
+ | ExtraArg _ -> ()
| _ -> user_err Pp.(str "Can declare a pretty-printing rule only for extra argument types.")
end;
let f x =
diff --git a/plugins/ltac/rewrite.ml b/plugins/ltac/rewrite.ml
index 01c52c413..9f8cd2fc4 100644
--- a/plugins/ltac/rewrite.ml
+++ b/plugins/ltac/rewrite.ml
@@ -409,7 +409,7 @@ module TypeGlobal = struct
let inverse env (evd,cstrs) car rel =
- let (evd, sort) = Evarutil.new_Type ~rigid:Evd.univ_flexible env evd in
+ let (evd, sort) = Evarutil.new_Type ~rigid:Evd.univ_flexible evd in
app_poly_check env (evd,cstrs) coq_inverse [| car ; car; sort; rel |]
end
diff --git a/plugins/ltac/taccoerce.ml b/plugins/ltac/taccoerce.ml
index 84baea964..026c00b84 100644
--- a/plugins/ltac/taccoerce.ml
+++ b/plugins/ltac/taccoerce.ml
@@ -165,8 +165,7 @@ let coerce_var_to_ident fresh env sigma v =
(* Interprets, if possible, a constr to an identifier which may not
be fresh but suitable to be given to the fresh tactic. Works for
vars, constants, inductive, constructors and sorts. *)
-let coerce_to_ident_not_fresh env sigma v =
-let g = sigma in
+let coerce_to_ident_not_fresh sigma v =
let id_of_name = function
| Name.Anonymous -> Id.of_string "x"
| Name.Name x -> x in
@@ -183,9 +182,9 @@ let id_of_name = function
| Some c ->
match EConstr.kind sigma c with
| Var id -> id
- | Meta m -> id_of_name (Evd.meta_name g m)
+ | Meta m -> id_of_name (Evd.meta_name sigma m)
| Evar (kn,_) ->
- begin match Evd.evar_ident kn g with
+ begin match Evd.evar_ident kn sigma with
| None -> fail ()
| Some id -> id
end
@@ -208,7 +207,7 @@ let id_of_name = function
| _ -> fail()
-let coerce_to_intro_pattern env sigma v =
+let coerce_to_intro_pattern sigma v =
if has_type v (topwit wit_intro_pattern) then
(out_gen (topwit wit_intro_pattern) v).CAst.v
else if has_type v (topwit wit_var) then
@@ -221,8 +220,8 @@ let coerce_to_intro_pattern env sigma v =
IntroNaming (IntroIdentifier (destVar sigma c))
| _ -> raise (CannotCoerceTo "an introduction pattern")
-let coerce_to_intro_pattern_naming env sigma v =
- match coerce_to_intro_pattern env sigma v with
+let coerce_to_intro_pattern_naming sigma v =
+ match coerce_to_intro_pattern sigma v with
| IntroNaming pat -> pat
| _ -> raise (CannotCoerceTo "a naming introduction pattern")
@@ -255,7 +254,7 @@ let coerce_to_constr env v =
(try [], constr_of_id env id with Not_found -> fail ())
else fail ()
-let coerce_to_uconstr env v =
+let coerce_to_uconstr v =
if has_type v (topwit wit_uconstr) then
out_gen (topwit wit_uconstr) v
else
@@ -299,11 +298,11 @@ let coerce_to_constr_list env v =
List.map map l
| None -> raise (CannotCoerceTo "a term list")
-let coerce_to_intro_pattern_list ?loc env sigma v =
+let coerce_to_intro_pattern_list ?loc sigma v =
match Value.to_list v with
| None -> raise (CannotCoerceTo "an intro pattern list")
| Some l ->
- let map v = CAst.make ?loc @@ coerce_to_intro_pattern env sigma v in
+ let map v = CAst.make ?loc @@ coerce_to_intro_pattern sigma v in
List.map map l
let coerce_to_hyp env sigma v =
@@ -328,7 +327,7 @@ let coerce_to_hyp_list env sigma v =
| None -> raise (CannotCoerceTo "a variable list")
(* Interprets a qualified name *)
-let coerce_to_reference env sigma v =
+let coerce_to_reference sigma v =
match Value.to_constr v with
| Some c ->
begin
@@ -356,7 +355,7 @@ let coerce_to_quantified_hypothesis sigma v =
(* Quantified named or numbered hypothesis or hypothesis in context *)
(* (as in Inversion) *)
-let coerce_to_decl_or_quant_hyp env sigma v =
+let coerce_to_decl_or_quant_hyp sigma v =
if has_type v (topwit wit_int) then
AnonHyp (out_gen (topwit wit_int) v)
else
diff --git a/plugins/ltac/taccoerce.mli b/plugins/ltac/taccoerce.mli
index 56f881684..d2ae92f6c 100644
--- a/plugins/ltac/taccoerce.mli
+++ b/plugins/ltac/taccoerce.mli
@@ -51,12 +51,12 @@ val coerce_to_constr_context : Value.t -> constr
val coerce_var_to_ident : bool -> Environ.env -> Evd.evar_map -> Value.t -> Id.t
-val coerce_to_ident_not_fresh : Environ.env -> Evd.evar_map -> Value.t -> Id.t
+val coerce_to_ident_not_fresh : Evd.evar_map -> Value.t -> Id.t
-val coerce_to_intro_pattern : Environ.env -> Evd.evar_map -> Value.t -> Tacexpr.delayed_open_constr intro_pattern_expr
+val coerce_to_intro_pattern : Evd.evar_map -> Value.t -> Tacexpr.delayed_open_constr intro_pattern_expr
val coerce_to_intro_pattern_naming :
- Environ.env -> Evd.evar_map -> Value.t -> Namegen.intro_pattern_naming_expr
+ Evd.evar_map -> Value.t -> Namegen.intro_pattern_naming_expr
val coerce_to_hint_base : Value.t -> string
@@ -64,7 +64,7 @@ val coerce_to_int : Value.t -> int
val coerce_to_constr : Environ.env -> Value.t -> Ltac_pretype.constr_under_binders
-val coerce_to_uconstr : Environ.env -> Value.t -> Ltac_pretype.closed_glob_constr
+val coerce_to_uconstr : Value.t -> Ltac_pretype.closed_glob_constr
val coerce_to_closed_constr : Environ.env -> Value.t -> constr
@@ -74,17 +74,17 @@ val coerce_to_evaluable_ref :
val coerce_to_constr_list : Environ.env -> Value.t -> constr list
val coerce_to_intro_pattern_list :
- ?loc:Loc.t -> Environ.env -> Evd.evar_map -> Value.t -> Tacexpr.intro_patterns
+ ?loc:Loc.t -> Evd.evar_map -> Value.t -> Tacexpr.intro_patterns
val coerce_to_hyp : Environ.env -> Evd.evar_map -> Value.t -> Id.t
val coerce_to_hyp_list : Environ.env -> Evd.evar_map -> Value.t -> Id.t list
-val coerce_to_reference : Environ.env -> Evd.evar_map -> Value.t -> GlobRef.t
+val coerce_to_reference : Evd.evar_map -> Value.t -> GlobRef.t
val coerce_to_quantified_hypothesis : Evd.evar_map -> Value.t -> quantified_hypothesis
-val coerce_to_decl_or_quant_hyp : Environ.env -> Evd.evar_map -> Value.t -> quantified_hypothesis
+val coerce_to_decl_or_quant_hyp : Evd.evar_map -> Value.t -> quantified_hypothesis
val coerce_to_int_or_var_list : Value.t -> int Locus.or_var list
diff --git a/plugins/ltac/tacentries.ml b/plugins/ltac/tacentries.ml
index 98d451536..fac464a62 100644
--- a/plugins/ltac/tacentries.ml
+++ b/plugins/ltac/tacentries.ml
@@ -45,7 +45,7 @@ let coincide s pat off =
let atactic n =
if n = 5 then Aentry Pltac.binder_tactic
- else Aentryl (Pltac.tactic_expr, n)
+ else Aentryl (Pltac.tactic_expr, string_of_int n)
type entry_name = EntryName :
'a raw_abstract_argument_type * (Tacexpr.raw_tactic_expr, 'a) Extend.symbol -> entry_name
@@ -398,7 +398,7 @@ let create_ltac_quotation name cast (e, l) =
let () = ltac_quotations := String.Set.add name !ltac_quotations in
let entry = match l with
| None -> Aentry e
- | Some l -> Aentryl (e, l)
+ | Some l -> Aentryl (e, string_of_int l)
in
(* let level = Some "1" in *)
let level = None in
@@ -554,13 +554,18 @@ let () =
] in
register_grammars_by_name "tactic" entries
+let get_identifier id =
+ (** Workaround for badly-designed generic arguments lacking a closure *)
+ Names.Id.of_string_soft ("$" ^ id)
+
+
type _ ty_sig =
| TyNil : (Geninterp.interp_sign -> unit Proofview.tactic) ty_sig
| TyIdent : string * 'r ty_sig -> 'r ty_sig
| TyArg :
- (('a, 'b, 'c) Extend.ty_user_symbol * Id.t) Loc.located * 'r ty_sig -> ('c -> 'r) ty_sig
+ ('a, 'b, 'c) Extend.ty_user_symbol * string * 'r ty_sig -> ('c -> 'r) ty_sig
| TyAnonArg :
- ('a, 'b, 'c) Extend.ty_user_symbol Loc.located * 'r ty_sig -> 'r ty_sig
+ ('a, 'b, 'c) Extend.ty_user_symbol * 'r ty_sig -> 'r ty_sig
type ty_ml = TyML : 'r ty_sig * 'r -> ty_ml
@@ -578,10 +583,11 @@ let rec clause_of_sign : type a. a ty_sig -> Genarg.ArgT.any Extend.user_symbol
fun sign -> match sign with
| TyNil -> []
| TyIdent (s, sig') -> TacTerm s :: clause_of_sign sig'
- | TyArg ((loc,(a,id)),sig') ->
- TacNonTerm (loc,(untype_user_symbol a,Some id)) :: clause_of_sign sig'
- | TyAnonArg ((loc,a),sig') ->
- TacNonTerm (loc,(untype_user_symbol a,None)) :: clause_of_sign sig'
+ | TyArg (a, id, sig') ->
+ let id = get_identifier id in
+ TacNonTerm (None,(untype_user_symbol a,Some id)) :: clause_of_sign sig'
+ | TyAnonArg (a, sig') ->
+ TacNonTerm (None,(untype_user_symbol a,None)) :: clause_of_sign sig'
let clause_of_ty_ml = function
| TyML (t,_) -> clause_of_sign t
@@ -604,7 +610,7 @@ let rec eval_sign : type a. a ty_sig -> a -> Geninterp.Val.t list -> Geninterp.i
| _ :: _ -> assert false
end
| TyIdent (s, sig') -> eval_sign sig' tac
- | TyArg ((_loc,(a,id)), sig') ->
+ | TyArg (a, _, sig') ->
let f = eval_sign sig' in
begin fun tac vals ist -> match vals with
| [] -> assert false
@@ -612,7 +618,7 @@ let rec eval_sign : type a. a ty_sig -> a -> Geninterp.Val.t list -> Geninterp.i
let v' = Taccoerce.Value.cast (topwit (prj a)) v in
f (tac v') vals ist
end tac
- | TyAnonArg ((_loc,a), sig') -> eval_sign sig' tac
+ | TyAnonArg (a, sig') -> eval_sign sig' tac
let eval : ty_ml -> Geninterp.Val.t list -> Geninterp.interp_sign -> unit Proofview.tactic = function
| TyML (t,tac) -> eval_sign t tac
@@ -624,14 +630,14 @@ let is_constr_entry = function
let rec only_constr : type a. a ty_sig -> bool = function
| TyNil -> true
| TyIdent(_,_) -> false
-| TyArg((_,(u,_)),s) -> if is_constr_entry u then only_constr s else false
-| TyAnonArg((_,u),s) -> if is_constr_entry u then only_constr s else false
+| TyArg (u, _, s) -> if is_constr_entry u then only_constr s else false
+| TyAnonArg (u, s) -> if is_constr_entry u then only_constr s else false
let rec mk_sign_vars : type a. a ty_sig -> Name.t list = function
| TyNil -> []
| TyIdent (_,s) -> mk_sign_vars s
-| TyArg((_,(_,name)),s) -> Name name :: mk_sign_vars s
-| TyAnonArg((_,_),s) -> Anonymous :: mk_sign_vars s
+| TyArg (_, name, s) -> Name (get_identifier name) :: mk_sign_vars s
+| TyAnonArg (_, s) -> Anonymous :: mk_sign_vars s
let dummy_id = Id.of_string "_"
diff --git a/plugins/ltac/tacentries.mli b/plugins/ltac/tacentries.mli
index 2bfbbe2e1..9bba9ba71 100644
--- a/plugins/ltac/tacentries.mli
+++ b/plugins/ltac/tacentries.mli
@@ -72,9 +72,9 @@ type _ ty_sig =
| TyNil : (Geninterp.interp_sign -> unit Proofview.tactic) ty_sig
| TyIdent : string * 'r ty_sig -> 'r ty_sig
| TyArg :
- (('a, 'b, 'c) Extend.ty_user_symbol * Names.Id.t) Loc.located * 'r ty_sig -> ('c -> 'r) ty_sig
+ ('a, 'b, 'c) Extend.ty_user_symbol * string * 'r ty_sig -> ('c -> 'r) ty_sig
| TyAnonArg :
- ('a, 'b, 'c) Extend.ty_user_symbol Loc.located * 'r ty_sig -> 'r ty_sig
+ ('a, 'b, 'c) Extend.ty_user_symbol * 'r ty_sig -> 'r ty_sig
type ty_ml = TyML : 'r ty_sig * 'r -> ty_ml
diff --git a/plugins/ltac/tacinterp.ml b/plugins/ltac/tacinterp.ml
index 9d1cc1643..d9ac96d89 100644
--- a/plugins/ltac/tacinterp.ml
+++ b/plugins/ltac/tacinterp.ml
@@ -312,11 +312,11 @@ let interp_name ist env sigma = function
| Name id -> Name (interp_ident ist env sigma id)
let interp_intro_pattern_var loc ist env sigma id =
- try try_interp_ltac_var (coerce_to_intro_pattern env sigma) ist (Some (env,sigma)) (make ?loc id)
+ try try_interp_ltac_var (coerce_to_intro_pattern sigma) ist (Some (env,sigma)) (make ?loc id)
with Not_found -> IntroNaming (IntroIdentifier id)
let interp_intro_pattern_naming_var loc ist env sigma id =
- try try_interp_ltac_var (coerce_to_intro_pattern_naming env sigma) ist (Some (env,sigma)) (make ?loc id)
+ try try_interp_ltac_var (coerce_to_intro_pattern_naming sigma) ist (Some (env,sigma)) (make ?loc id)
with Not_found -> IntroIdentifier id
let interp_int ist ({loc;v=id} as locid) =
@@ -357,7 +357,7 @@ let interp_hyp_list ist env sigma l =
let interp_reference ist env sigma = function
| ArgArg (_,r) -> r
| ArgVar {loc;v=id} ->
- try try_interp_ltac_var (coerce_to_reference env sigma) ist (Some (env,sigma)) (make ?loc id)
+ try try_interp_ltac_var (coerce_to_reference sigma) ist (Some (env,sigma)) (make ?loc id)
with Not_found ->
try
VarRef (get_id (Environ.lookup_named id env))
@@ -451,7 +451,7 @@ let default_fresh_id = Id.of_string "H"
let interp_fresh_id ist env sigma l =
let extract_ident ist env sigma id =
- try try_interp_ltac_var (coerce_to_ident_not_fresh env sigma)
+ try try_interp_ltac_var (coerce_to_ident_not_fresh sigma)
ist (Some (env,sigma)) (make id)
with Not_found -> id in
let ids = List.map_filter (function ArgVar {v=id} -> Some id | _ -> None) l in
@@ -474,7 +474,7 @@ let interp_fresh_id ist env sigma l =
(* Extract the uconstr list from lfun *)
let extract_ltac_constr_context ist env sigma =
let add_uconstr id v map =
- try Id.Map.add id (coerce_to_uconstr env v) map
+ try Id.Map.add id (coerce_to_uconstr v) map
with CannotCoerceTo _ -> map
in
let add_constr id v map =
@@ -799,7 +799,7 @@ and interp_or_and_intro_pattern ist env sigma = function
and interp_intro_pattern_list_as_list ist env sigma = function
| [{loc;v=IntroNaming (IntroIdentifier id)}] as l ->
- (try sigma, coerce_to_intro_pattern_list ?loc env sigma (Id.Map.find id ist.lfun)
+ (try sigma, coerce_to_intro_pattern_list ?loc sigma (Id.Map.find id ist.lfun)
with Not_found | CannotCoerceTo _ ->
List.fold_left_map (interp_intro_pattern ist env) sigma l)
| l -> List.fold_left_map (interp_intro_pattern ist env) sigma l
@@ -842,7 +842,7 @@ let interp_declared_or_quantified_hypothesis ist env sigma = function
| AnonHyp n -> AnonHyp n
| NamedHyp id ->
try try_interp_ltac_var
- (coerce_to_decl_or_quant_hyp env sigma) ist (Some (env,sigma)) (make id)
+ (coerce_to_decl_or_quant_hyp sigma) ist (Some (env,sigma)) (make id)
with Not_found -> NamedHyp id
let interp_binding ist env sigma {loc;v=(b,c)} =
diff --git a/plugins/micromega/g_micromega.ml4 b/plugins/micromega/g_micromega.mlg
index 81140a46a..21f0414e9 100644
--- a/plugins/micromega/g_micromega.ml4
+++ b/plugins/micromega/g_micromega.mlg
@@ -16,70 +16,74 @@
(* *)
(************************************************************************)
+{
+
open Ltac_plugin
open Stdarg
open Tacarg
+}
+
DECLARE PLUGIN "micromega_plugin"
TACTIC EXTEND RED
-| [ "myred" ] -> [ Tactics.red_in_concl ]
+| [ "myred" ] -> { Tactics.red_in_concl }
END
TACTIC EXTEND PsatzZ
-| [ "psatz_Z" int_or_var(i) tactic(t) ] -> [ (Coq_micromega.psatz_Z i
+| [ "psatz_Z" int_or_var(i) tactic(t) ] -> { (Coq_micromega.psatz_Z i
(Tacinterp.tactic_of_value ist t))
- ]
-| [ "psatz_Z" tactic(t)] -> [ (Coq_micromega.psatz_Z (-1)) (Tacinterp.tactic_of_value ist t) ]
+ }
+| [ "psatz_Z" tactic(t)] -> { (Coq_micromega.psatz_Z (-1)) (Tacinterp.tactic_of_value ist t) }
END
TACTIC EXTEND Lia
-[ "xlia" tactic(t) ] -> [ (Coq_micromega.xlia (Tacinterp.tactic_of_value ist t)) ]
+| [ "xlia" tactic(t) ] -> { (Coq_micromega.xlia (Tacinterp.tactic_of_value ist t)) }
END
TACTIC EXTEND Nia
-[ "xnlia" tactic(t) ] -> [ (Coq_micromega.xnlia (Tacinterp.tactic_of_value ist t)) ]
+| [ "xnlia" tactic(t) ] -> { (Coq_micromega.xnlia (Tacinterp.tactic_of_value ist t)) }
END
TACTIC EXTEND NRA
-[ "xnra" tactic(t) ] -> [ (Coq_micromega.nra (Tacinterp.tactic_of_value ist t))]
+| [ "xnra" tactic(t) ] -> { (Coq_micromega.nra (Tacinterp.tactic_of_value ist t))}
END
TACTIC EXTEND NQA
-[ "xnqa" tactic(t) ] -> [ (Coq_micromega.nqa (Tacinterp.tactic_of_value ist t))]
+| [ "xnqa" tactic(t) ] -> { (Coq_micromega.nqa (Tacinterp.tactic_of_value ist t))}
END
TACTIC EXTEND Sos_Z
-| [ "sos_Z" tactic(t) ] -> [ (Coq_micromega.sos_Z (Tacinterp.tactic_of_value ist t)) ]
+| [ "sos_Z" tactic(t) ] -> { (Coq_micromega.sos_Z (Tacinterp.tactic_of_value ist t)) }
END
TACTIC EXTEND Sos_Q
-| [ "sos_Q" tactic(t) ] -> [ (Coq_micromega.sos_Q (Tacinterp.tactic_of_value ist t)) ]
+| [ "sos_Q" tactic(t) ] -> { (Coq_micromega.sos_Q (Tacinterp.tactic_of_value ist t)) }
END
TACTIC EXTEND Sos_R
-| [ "sos_R" tactic(t) ] -> [ (Coq_micromega.sos_R (Tacinterp.tactic_of_value ist t)) ]
+| [ "sos_R" tactic(t) ] -> { (Coq_micromega.sos_R (Tacinterp.tactic_of_value ist t)) }
END
TACTIC EXTEND LRA_Q
-[ "lra_Q" tactic(t) ] -> [ (Coq_micromega.lra_Q (Tacinterp.tactic_of_value ist t)) ]
+| [ "lra_Q" tactic(t) ] -> { (Coq_micromega.lra_Q (Tacinterp.tactic_of_value ist t)) }
END
TACTIC EXTEND LRA_R
-[ "lra_R" tactic(t) ] -> [ (Coq_micromega.lra_R (Tacinterp.tactic_of_value ist t)) ]
+| [ "lra_R" tactic(t) ] -> { (Coq_micromega.lra_R (Tacinterp.tactic_of_value ist t)) }
END
TACTIC EXTEND PsatzR
-| [ "psatz_R" int_or_var(i) tactic(t) ] -> [ (Coq_micromega.psatz_R i (Tacinterp.tactic_of_value ist t)) ]
-| [ "psatz_R" tactic(t) ] -> [ (Coq_micromega.psatz_R (-1) (Tacinterp.tactic_of_value ist t)) ]
+| [ "psatz_R" int_or_var(i) tactic(t) ] -> { (Coq_micromega.psatz_R i (Tacinterp.tactic_of_value ist t)) }
+| [ "psatz_R" tactic(t) ] -> { (Coq_micromega.psatz_R (-1) (Tacinterp.tactic_of_value ist t)) }
END
TACTIC EXTEND PsatzQ
-| [ "psatz_Q" int_or_var(i) tactic(t) ] -> [ (Coq_micromega.psatz_Q i (Tacinterp.tactic_of_value ist t)) ]
-| [ "psatz_Q" tactic(t) ] -> [ (Coq_micromega.psatz_Q (-1) (Tacinterp.tactic_of_value ist t)) ]
+| [ "psatz_Q" int_or_var(i) tactic(t) ] -> { (Coq_micromega.psatz_Q i (Tacinterp.tactic_of_value ist t)) }
+| [ "psatz_Q" tactic(t) ] -> { (Coq_micromega.psatz_Q (-1) (Tacinterp.tactic_of_value ist t)) }
END
diff --git a/plugins/nsatz/g_nsatz.ml4 b/plugins/nsatz/g_nsatz.mlg
index 4ac49adb9..16ff512e8 100644
--- a/plugins/nsatz/g_nsatz.ml4
+++ b/plugins/nsatz/g_nsatz.mlg
@@ -8,11 +8,15 @@
(* * (see LICENSE file for the text of the license) *)
(************************************************************************)
+{
+
open Ltac_plugin
open Stdarg
+}
+
DECLARE PLUGIN "nsatz_plugin"
TACTIC EXTEND nsatz_compute
-| [ "nsatz_compute" constr(lt) ] -> [ Nsatz.nsatz_compute (EConstr.Unsafe.to_constr lt) ]
+| [ "nsatz_compute" constr(lt) ] -> { Nsatz.nsatz_compute (EConstr.Unsafe.to_constr lt) }
END
diff --git a/plugins/omega/coq_omega.ml b/plugins/omega/coq_omega.ml
index 6f4138828..e14c4e2ec 100644
--- a/plugins/omega/coq_omega.ml
+++ b/plugins/omega/coq_omega.ml
@@ -38,15 +38,9 @@ open OmegaSolver
(* Added by JCF, 09/03/98 *)
-let elim_id id =
- Proofview.Goal.enter begin fun gl ->
- simplest_elim (mkVar id)
- end
-let resolve_id id = Proofview.Goal.enter begin fun gl ->
- apply (mkVar id)
-end
+let elim_id id = simplest_elim (mkVar id)
-let timing timer_name f arg = f arg
+let resolve_id id = apply (mkVar id)
let display_time_flag = ref false
let display_system_flag = ref false
diff --git a/plugins/omega/g_omega.ml4 b/plugins/omega/g_omega.mlg
index 170b937c9..c3d063cff 100644
--- a/plugins/omega/g_omega.ml4
+++ b/plugins/omega/g_omega.mlg
@@ -18,6 +18,8 @@
DECLARE PLUGIN "omega_plugin"
+{
+
open Ltac_plugin
open Names
open Coq_omega
@@ -43,14 +45,15 @@ let omega_tactic l =
(Tacticals.New.tclREPEAT (Tacticals.New.tclTHENLIST tacs))
(omega_solver)
+}
TACTIC EXTEND omega
-| [ "omega" ] -> [ omega_tactic [] ]
+| [ "omega" ] -> { omega_tactic [] }
END
TACTIC EXTEND omega'
| [ "omega" "with" ne_ident_list(l) ] ->
- [ omega_tactic (List.map Names.Id.to_string l) ]
-| [ "omega" "with" "*" ] -> [ omega_tactic ["nat";"positive";"N";"Z"] ]
+ { omega_tactic (List.map Names.Id.to_string l) }
+| [ "omega" "with" "*" ] -> { omega_tactic ["nat";"positive";"N";"Z"] }
END
diff --git a/plugins/quote/g_quote.ml4 b/plugins/quote/g_quote.mlg
index 09209dc22..749903c3a 100644
--- a/plugins/quote/g_quote.ml4
+++ b/plugins/quote/g_quote.mlg
@@ -8,6 +8,8 @@
(* * (see LICENSE file for the text of the license) *)
(************************************************************************)
+{
+
open Ltac_plugin
open Names
open Tacexpr
@@ -16,8 +18,12 @@ open Quote
open Stdarg
open Tacarg
+}
+
DECLARE PLUGIN "quote_plugin"
+{
+
let cont = Id.of_string "cont"
let x = Id.of_string "x"
@@ -27,12 +33,14 @@ let make_cont (k : Val.t) (c : EConstr.t) =
let ist = { lfun = Id.Map.add cont k (Id.Map.singleton x c); extra = TacStore.empty; } in
Tacinterp.eval_tactic_ist ist (TacArg (Loc.tag tac))
+}
+
TACTIC EXTEND quote
- [ "quote" ident(f) ] -> [ quote f [] ]
-| [ "quote" ident(f) "[" ne_ident_list(lc) "]"] -> [ quote f lc ]
+| [ "quote" ident(f) ] -> { quote f [] }
+| [ "quote" ident(f) "[" ne_ident_list(lc) "]"] -> { quote f lc }
| [ "quote" ident(f) "in" constr(c) "using" tactic(k) ] ->
- [ gen_quote (make_cont k) c f [] ]
+ { gen_quote (make_cont k) c f [] }
| [ "quote" ident(f) "[" ne_ident_list(lc) "]"
"in" constr(c) "using" tactic(k) ] ->
- [ gen_quote (make_cont k) c f lc ]
+ { gen_quote (make_cont k) c f lc }
END
diff --git a/plugins/romega/g_romega.ml4 b/plugins/romega/g_romega.mlg
index 5b77d08de..c1ce30027 100644
--- a/plugins/romega/g_romega.ml4
+++ b/plugins/romega/g_romega.mlg
@@ -9,6 +9,8 @@
DECLARE PLUGIN "romega_plugin"
+{
+
open Ltac_plugin
open Names
open Refl_omega
@@ -39,13 +41,15 @@ let romega_tactic unsafe l =
(Tactics.intros)
(total_reflexive_omega_tactic unsafe))
+}
+
TACTIC EXTEND romega
-| [ "romega" ] -> [ romega_tactic false [] ]
-| [ "unsafe_romega" ] -> [ romega_tactic true [] ]
+| [ "romega" ] -> { romega_tactic false [] }
+| [ "unsafe_romega" ] -> { romega_tactic true [] }
END
TACTIC EXTEND romega'
| [ "romega" "with" ne_ident_list(l) ] ->
- [ romega_tactic false (List.map Names.Id.to_string l) ]
-| [ "romega" "with" "*" ] -> [ romega_tactic false ["nat";"positive";"N";"Z"] ]
+ { romega_tactic false (List.map Names.Id.to_string l) }
+| [ "romega" "with" "*" ] -> { romega_tactic false ["nat";"positive";"N";"Z"] }
END
diff --git a/plugins/rtauto/g_rtauto.ml4 b/plugins/rtauto/g_rtauto.mlg
index aa6757634..9c9fdcfa2 100644
--- a/plugins/rtauto/g_rtauto.ml4
+++ b/plugins/rtauto/g_rtauto.mlg
@@ -8,12 +8,15 @@
(* * (see LICENSE file for the text of the license) *)
(************************************************************************)
+{
open Ltac_plugin
+}
+
DECLARE PLUGIN "rtauto_plugin"
TACTIC EXTEND rtauto
- [ "rtauto" ] -> [ Proofview.V82.tactic (Refl_tauto.rtauto_tac) ]
+| [ "rtauto" ] -> { Proofview.V82.tactic (Refl_tauto.rtauto_tac) }
END
diff --git a/plugins/syntax/n_syntax.ml b/plugins/syntax/n_syntax.ml
new file mode 100644
index 000000000..0e202be47
--- /dev/null
+++ b/plugins/syntax/n_syntax.ml
@@ -0,0 +1,81 @@
+(************************************************************************)
+(* * The Coq Proof Assistant / The Coq Development Team *)
+(* v * INRIA, CNRS and contributors - Copyright 1999-2018 *)
+(* <O___,, * (see CREDITS file for the list of authors) *)
+(* \VV/ **************************************************************)
+(* // * This file is distributed under the terms of the *)
+(* * GNU Lesser General Public License Version 2.1 *)
+(* * (see LICENSE file for the text of the license) *)
+(************************************************************************)
+
+open Pp
+open CErrors
+open Names
+open Bigint
+open Positive_syntax_plugin.Positive_syntax
+
+(* Poor's man DECLARE PLUGIN *)
+let __coq_plugin_name = "n_syntax_plugin"
+let () = Mltop.add_known_module __coq_plugin_name
+
+(**********************************************************************)
+(* Parsing N via scopes *)
+(**********************************************************************)
+
+open Globnames
+open Glob_term
+
+let binnums = ["Coq";"Numbers";"BinNums"]
+
+let make_dir l = DirPath.make (List.rev_map Id.of_string l)
+let make_path dir id = Libnames.make_path (make_dir dir) (Id.of_string id)
+
+(* TODO: temporary hack *)
+let make_kn dir id = Globnames.encode_mind dir id
+
+let n_kn = make_kn (make_dir binnums) (Id.of_string "N")
+let glob_n = IndRef (n_kn,0)
+let path_of_N0 = ((n_kn,0),1)
+let path_of_Npos = ((n_kn,0),2)
+let glob_N0 = ConstructRef path_of_N0
+let glob_Npos = ConstructRef path_of_Npos
+
+let n_path = make_path binnums "N"
+
+let n_of_binnat ?loc pos_or_neg n = DAst.make ?loc @@
+ if not (Bigint.equal n zero) then
+ GApp(DAst.make @@ GRef (glob_Npos,None), [pos_of_bignat ?loc n])
+ else
+ GRef(glob_N0, None)
+
+let error_negative ?loc =
+ user_err ?loc ~hdr:"interp_N" (str "No negative numbers in type \"N\".")
+
+let n_of_int ?loc n =
+ if is_pos_or_zero n then n_of_binnat ?loc true n
+ else error_negative ?loc
+
+(**********************************************************************)
+(* Printing N via scopes *)
+(**********************************************************************)
+
+let bignat_of_n n = DAst.with_val (function
+ | GApp (r, [a]) when is_gr r glob_Npos -> bignat_of_pos a
+ | GRef (a,_) when GlobRef.equal a glob_N0 -> Bigint.zero
+ | _ -> raise Non_closed_number
+ ) n
+
+let uninterp_n (AnyGlobConstr p) =
+ try Some (bignat_of_n p)
+ with Non_closed_number -> None
+
+(************************************************************************)
+(* Declaring interpreters and uninterpreters for N *)
+
+let _ = Notation.declare_numeral_interpreter "N_scope"
+ (n_path,binnums)
+ n_of_int
+ ([DAst.make @@ GRef (glob_N0, None);
+ DAst.make @@ GRef (glob_Npos, None)],
+ uninterp_n,
+ true)
diff --git a/plugins/syntax/n_syntax_plugin.mlpack b/plugins/syntax/n_syntax_plugin.mlpack
new file mode 100644
index 000000000..4c56645f0
--- /dev/null
+++ b/plugins/syntax/n_syntax_plugin.mlpack
@@ -0,0 +1 @@
+N_syntax
diff --git a/plugins/syntax/positive_syntax.ml b/plugins/syntax/positive_syntax.ml
new file mode 100644
index 000000000..0c82e4744
--- /dev/null
+++ b/plugins/syntax/positive_syntax.ml
@@ -0,0 +1,101 @@
+(************************************************************************)
+(* * The Coq Proof Assistant / The Coq Development Team *)
+(* v * INRIA, CNRS and contributors - Copyright 1999-2018 *)
+(* <O___,, * (see CREDITS file for the list of authors) *)
+(* \VV/ **************************************************************)
+(* // * This file is distributed under the terms of the *)
+(* * GNU Lesser General Public License Version 2.1 *)
+(* * (see LICENSE file for the text of the license) *)
+(************************************************************************)
+
+open Pp
+open CErrors
+open Util
+open Names
+open Bigint
+
+(* Poor's man DECLARE PLUGIN *)
+let __coq_plugin_name = "positive_syntax_plugin"
+let () = Mltop.add_known_module __coq_plugin_name
+
+exception Non_closed_number
+
+(**********************************************************************)
+(* Parsing positive via scopes *)
+(**********************************************************************)
+
+open Globnames
+open Glob_term
+
+let binnums = ["Coq";"Numbers";"BinNums"]
+
+let make_dir l = DirPath.make (List.rev_map Id.of_string l)
+let make_path dir id = Libnames.make_path (make_dir dir) (Id.of_string id)
+
+let positive_path = make_path binnums "positive"
+
+(* TODO: temporary hack *)
+let make_kn dir id = Globnames.encode_mind dir id
+
+let positive_kn = make_kn (make_dir binnums) (Id.of_string "positive")
+let glob_positive = IndRef (positive_kn,0)
+let path_of_xI = ((positive_kn,0),1)
+let path_of_xO = ((positive_kn,0),2)
+let path_of_xH = ((positive_kn,0),3)
+let glob_xI = ConstructRef path_of_xI
+let glob_xO = ConstructRef path_of_xO
+let glob_xH = ConstructRef path_of_xH
+
+let pos_of_bignat ?loc x =
+ let ref_xI = DAst.make ?loc @@ GRef (glob_xI, None) in
+ let ref_xH = DAst.make ?loc @@ GRef (glob_xH, None) in
+ let ref_xO = DAst.make ?loc @@ GRef (glob_xO, None) in
+ let rec pos_of x =
+ match div2_with_rest x with
+ | (q,false) -> DAst.make ?loc @@ GApp (ref_xO,[pos_of q])
+ | (q,true) when not (Bigint.equal q zero) -> DAst.make ?loc @@ GApp (ref_xI,[pos_of q])
+ | (q,true) -> ref_xH
+ in
+ pos_of x
+
+let error_non_positive ?loc =
+ user_err ?loc ~hdr:"interp_positive"
+ (str "Only strictly positive numbers in type \"positive\".")
+
+let interp_positive ?loc n =
+ if is_strictly_pos n then pos_of_bignat ?loc n
+ else error_non_positive ?loc
+
+(**********************************************************************)
+(* Printing positive via scopes *)
+(**********************************************************************)
+
+let is_gr c gr = match DAst.get c with
+| GRef (r, _) -> GlobRef.equal r gr
+| _ -> false
+
+let rec bignat_of_pos x = DAst.with_val (function
+ | GApp (r ,[a]) when is_gr r glob_xO -> mult_2(bignat_of_pos a)
+ | GApp (r ,[a]) when is_gr r glob_xI -> add_1(mult_2(bignat_of_pos a))
+ | GRef (a, _) when GlobRef.equal a glob_xH -> Bigint.one
+ | _ -> raise Non_closed_number
+ ) x
+
+let uninterp_positive (AnyGlobConstr p) =
+ try
+ Some (bignat_of_pos p)
+ with Non_closed_number ->
+ None
+
+(************************************************************************)
+(* Declaring interpreters and uninterpreters for positive *)
+(************************************************************************)
+
+let _ = Notation.declare_numeral_interpreter "positive_scope"
+ (positive_path,binnums)
+ interp_positive
+ ([DAst.make @@ GRef (glob_xI, None);
+ DAst.make @@ GRef (glob_xO, None);
+ DAst.make @@ GRef (glob_xH, None)],
+ uninterp_positive,
+ true)
diff --git a/plugins/syntax/positive_syntax_plugin.mlpack b/plugins/syntax/positive_syntax_plugin.mlpack
new file mode 100644
index 000000000..ac8f3c425
--- /dev/null
+++ b/plugins/syntax/positive_syntax_plugin.mlpack
@@ -0,0 +1 @@
+Positive_syntax
diff --git a/plugins/syntax/z_syntax.ml b/plugins/syntax/z_syntax.ml
index 09fe8bf70..2534162e3 100644
--- a/plugins/syntax/z_syntax.ml
+++ b/plugins/syntax/z_syntax.ml
@@ -8,20 +8,16 @@
(* * (see LICENSE file for the text of the license) *)
(************************************************************************)
-open Pp
-open CErrors
-open Util
open Names
open Bigint
+open Positive_syntax_plugin.Positive_syntax
(* Poor's man DECLARE PLUGIN *)
let __coq_plugin_name = "z_syntax_plugin"
let () = Mltop.add_known_module __coq_plugin_name
-exception Non_closed_number
-
(**********************************************************************)
-(* Parsing positive via scopes *)
+(* Parsing Z via scopes *)
(**********************************************************************)
open Globnames
@@ -32,129 +28,9 @@ let binnums = ["Coq";"Numbers";"BinNums"]
let make_dir l = DirPath.make (List.rev_map Id.of_string l)
let make_path dir id = Libnames.make_path (make_dir dir) (Id.of_string id)
-let positive_path = make_path binnums "positive"
-
(* TODO: temporary hack *)
let make_kn dir id = Globnames.encode_mind dir id
-let positive_kn = make_kn (make_dir binnums) (Id.of_string "positive")
-let glob_positive = IndRef (positive_kn,0)
-let path_of_xI = ((positive_kn,0),1)
-let path_of_xO = ((positive_kn,0),2)
-let path_of_xH = ((positive_kn,0),3)
-let glob_xI = ConstructRef path_of_xI
-let glob_xO = ConstructRef path_of_xO
-let glob_xH = ConstructRef path_of_xH
-
-let pos_of_bignat ?loc x =
- let ref_xI = DAst.make ?loc @@ GRef (glob_xI, None) in
- let ref_xH = DAst.make ?loc @@ GRef (glob_xH, None) in
- let ref_xO = DAst.make ?loc @@ GRef (glob_xO, None) in
- let rec pos_of x =
- match div2_with_rest x with
- | (q,false) -> DAst.make ?loc @@ GApp (ref_xO,[pos_of q])
- | (q,true) when not (Bigint.equal q zero) -> DAst.make ?loc @@ GApp (ref_xI,[pos_of q])
- | (q,true) -> ref_xH
- in
- pos_of x
-
-let error_non_positive ?loc =
- user_err ?loc ~hdr:"interp_positive"
- (str "Only strictly positive numbers in type \"positive\".")
-
-let interp_positive ?loc n =
- if is_strictly_pos n then pos_of_bignat ?loc n
- else error_non_positive ?loc
-
-(**********************************************************************)
-(* Printing positive via scopes *)
-(**********************************************************************)
-
-let is_gr c gr = match DAst.get c with
-| GRef (r, _) -> GlobRef.equal r gr
-| _ -> false
-
-let rec bignat_of_pos x = DAst.with_val (function
- | GApp (r ,[a]) when is_gr r glob_xO -> mult_2(bignat_of_pos a)
- | GApp (r ,[a]) when is_gr r glob_xI -> add_1(mult_2(bignat_of_pos a))
- | GRef (a, _) when GlobRef.equal a glob_xH -> Bigint.one
- | _ -> raise Non_closed_number
- ) x
-
-let uninterp_positive (AnyGlobConstr p) =
- try
- Some (bignat_of_pos p)
- with Non_closed_number ->
- None
-
-(************************************************************************)
-(* Declaring interpreters and uninterpreters for positive *)
-(************************************************************************)
-
-let _ = Notation.declare_numeral_interpreter "positive_scope"
- (positive_path,binnums)
- interp_positive
- ([DAst.make @@ GRef (glob_xI, None);
- DAst.make @@ GRef (glob_xO, None);
- DAst.make @@ GRef (glob_xH, None)],
- uninterp_positive,
- true)
-
-(**********************************************************************)
-(* Parsing N via scopes *)
-(**********************************************************************)
-
-let n_kn = make_kn (make_dir binnums) (Id.of_string "N")
-let glob_n = IndRef (n_kn,0)
-let path_of_N0 = ((n_kn,0),1)
-let path_of_Npos = ((n_kn,0),2)
-let glob_N0 = ConstructRef path_of_N0
-let glob_Npos = ConstructRef path_of_Npos
-
-let n_path = make_path binnums "N"
-
-let n_of_binnat ?loc pos_or_neg n = DAst.make ?loc @@
- if not (Bigint.equal n zero) then
- GApp(DAst.make @@ GRef (glob_Npos,None), [pos_of_bignat ?loc n])
- else
- GRef(glob_N0, None)
-
-let error_negative ?loc =
- user_err ?loc ~hdr:"interp_N" (str "No negative numbers in type \"N\".")
-
-let n_of_int ?loc n =
- if is_pos_or_zero n then n_of_binnat ?loc true n
- else error_negative ?loc
-
-(**********************************************************************)
-(* Printing N via scopes *)
-(**********************************************************************)
-
-let bignat_of_n n = DAst.with_val (function
- | GApp (r, [a]) when is_gr r glob_Npos -> bignat_of_pos a
- | GRef (a,_) when GlobRef.equal a glob_N0 -> Bigint.zero
- | _ -> raise Non_closed_number
- ) n
-
-let uninterp_n (AnyGlobConstr p) =
- try Some (bignat_of_n p)
- with Non_closed_number -> None
-
-(************************************************************************)
-(* Declaring interpreters and uninterpreters for N *)
-
-let _ = Notation.declare_numeral_interpreter "N_scope"
- (n_path,binnums)
- n_of_int
- ([DAst.make @@ GRef (glob_N0, None);
- DAst.make @@ GRef (glob_Npos, None)],
- uninterp_n,
- true)
-
-(**********************************************************************)
-(* Parsing Z via scopes *)
-(**********************************************************************)
-
let z_path = make_path binnums "Z"
let z_kn = make_kn (make_dir binnums) (Id.of_string "Z")
let glob_z = IndRef (z_kn,0)
diff --git a/pretyping/glob_ops.ml b/pretyping/glob_ops.ml
index ba193da60..4dfa789ba 100644
--- a/pretyping/glob_ops.ml
+++ b/pretyping/glob_ops.ml
@@ -112,7 +112,7 @@ let fix_kind_eq f k1 k2 = match k1, k2 with
let eq (i1, o1) (i2, o2) =
Option.equal Int.equal i1 i2 && fix_recursion_order_eq f o1 o2
in
- Int.equal i1 i2 && Array.equal eq a1 a1
+ Int.equal i1 i2 && Array.equal eq a1 a2
| GCoFix i1, GCoFix i2 -> Int.equal i1 i2
| (GFix _ | GCoFix _), _ -> false
@@ -452,7 +452,7 @@ let rec rename_glob_vars l c = force @@ DAst.map_with_loc (fun ?loc -> function
else r
| GProd (na,bk,t,c) ->
let na',l' = update_subst na l in
- GProd (na,bk,rename_glob_vars l t,rename_glob_vars l' c)
+ GProd (na',bk,rename_glob_vars l t,rename_glob_vars l' c)
| GLambda (na,bk,t,c) ->
let na',l' = update_subst na l in
GLambda (na',bk,rename_glob_vars l t,rename_glob_vars l' c)
diff --git a/pretyping/indrec.ml b/pretyping/indrec.ml
index 4ab932723..551cc67b6 100644
--- a/pretyping/indrec.ml
+++ b/pretyping/indrec.ml
@@ -86,7 +86,7 @@ let mis_make_case_com dep env sigma (ind, u as pind) (mib,mip as specif) kind =
if not (Sorts.List.mem kind (elim_sorts specif)) then
raise
(RecursionSchemeError
- (NotAllowedCaseAnalysis (false, fst (UnivGen.fresh_sort_in_family env kind), pind)))
+ (NotAllowedCaseAnalysis (false, fst (UnivGen.fresh_sort_in_family kind), pind)))
in
let ndepar = mip.mind_nrealdecls + 1 in
@@ -136,7 +136,7 @@ let mis_make_case_com dep env sigma (ind, u as pind) (mib,mip as specif) kind =
mkLambda_string "f" t
(add_branch (push_rel (LocalAssum (Anonymous, t)) env) (k+1))
in
- let (sigma, s) = Evd.fresh_sort_in_family ~rigid:Evd.univ_flexible_alg env sigma kind in
+ let (sigma, s) = Evd.fresh_sort_in_family ~rigid:Evd.univ_flexible_alg sigma kind in
let typP = make_arity env' sigma dep indf s in
let typP = EConstr.Unsafe.to_constr typP in
let c =
@@ -455,7 +455,7 @@ let mis_make_indrec env sigma ?(force_mutual=false) listdepkind mib u =
| ((indi,u),_,_,dep,kinds)::rest ->
let indf = make_ind_family ((indi,u), Context.Rel.to_extended_list mkRel i lnamesparrec) in
let s =
- Evarutil.evd_comb1 (Evd.fresh_sort_in_family ~rigid:Evd.univ_flexible_alg env)
+ Evarutil.evd_comb1 (Evd.fresh_sort_in_family ~rigid:Evd.univ_flexible_alg)
evdref kinds
in
let typP = make_arity env !evdref dep indf s in
@@ -550,8 +550,7 @@ let check_arities env listdepkind =
let kelim = elim_sorts (mibi,mipi) in
if not (Sorts.List.mem kind kelim) then raise
(RecursionSchemeError
- (NotAllowedCaseAnalysis (true, fst (UnivGen.fresh_sort_in_family env
- kind),(mind,u))))
+ (NotAllowedCaseAnalysis (true, fst (UnivGen.fresh_sort_in_family kind),(mind,u))))
else if Int.List.mem ni ln then raise
(RecursionSchemeError (NotMutualInScheme (mind,mind)))
else ni::ln)
diff --git a/pretyping/nativenorm.ml b/pretyping/nativenorm.ml
index 16d003f67..21c202205 100644
--- a/pretyping/nativenorm.ml
+++ b/pretyping/nativenorm.ml
@@ -144,7 +144,7 @@ let construct_of_constr_const env tag typ =
let construct_of_constr_block = construct_of_constr false
-let build_branches_type env sigma (mind,_ as _ind) mib mip u params dep p =
+let build_branches_type env sigma (mind,_ as _ind) mib mip u params p =
let rtbl = mip.mind_reloc_tbl in
(* [build_one_branch i cty] construit le type de la ieme branche (commence
a 0) et les lambda correspondant aux realargs *)
@@ -161,20 +161,17 @@ let build_branches_type env sigma (mind,_ as _ind) mib mip u params dep p =
let codom =
let ndecl = List.length decl in
let papp = mkApp(lift ndecl p,crealargs) in
- if dep then
- let cstr = ith_constructor_of_inductive (fst ind) (i+1) in
- let relargs = Array.init carity (fun i -> mkRel (carity-i)) in
- let params = Array.map (lift ndecl) params in
- let dep_cstr = mkApp(mkApp(mkConstructU (cstr,snd ind),params),relargs) in
- mkApp(papp,[|dep_cstr|])
- else papp
+ let cstr = ith_constructor_of_inductive (fst ind) (i+1) in
+ let relargs = Array.init carity (fun i -> mkRel (carity-i)) in
+ let params = Array.map (lift ndecl) params in
+ let dep_cstr = mkApp(mkApp(mkConstructU (cstr,snd ind),params),relargs) in
+ mkApp(papp,[|dep_cstr|])
in
decl, decl_with_letin, codom
in Array.mapi build_one_branch mip.mind_nf_lc
-let build_case_type dep p realargs c =
- if dep then mkApp(mkApp(p, realargs), [|c|])
- else mkApp(p, realargs)
+let build_case_type p realargs c =
+ mkApp(mkApp(p, realargs), [|c|])
(* normalisation of values *)
@@ -317,9 +314,9 @@ and nf_atom_type env sigma atom =
let pT =
hnf_prod_applist_assum env nparamdecls
(Inductiveops.type_of_inductive env ind) (Array.to_list params) in
- let dep, p = nf_predicate env sigma ind mip params p pT in
+ let p = nf_predicate env sigma ind mip params p pT in
(* Calcul du type des branches *)
- let btypes = build_branches_type env sigma (fst ind) mib mip u params dep p in
+ let btypes = build_branches_type env sigma (fst ind) mib mip u params p in
(* calcul des branches *)
let bsw = branch_of_switch (nb_rel env) ans bs in
let mkbranch i v =
@@ -328,7 +325,7 @@ and nf_atom_type env sigma atom =
Termops.it_mkLambda_or_LetIn_from_no_LetIn b decl_with_letin
in
let branchs = Array.mapi mkbranch bsw in
- let tcase = build_case_type dep p realargs a in
+ let tcase = build_case_type p realargs a in
let ci = ans.asw_ci in
mkCase(ci, p, a, branchs), tcase
| Afix(tt,ft,rp,s) ->
@@ -375,18 +372,18 @@ and nf_atom_type env sigma atom =
and nf_predicate env sigma ind mip params v pT =
match kind (whd_allnolet env pT) with
| LetIn (name,b,t,pT) ->
- let dep,body =
+ let body =
nf_predicate (push_rel (LocalDef (name,b,t)) env) sigma ind mip params v pT in
- dep, mkLetIn (name,b,t,body)
+ mkLetIn (name,b,t,body)
| Prod (name,dom,codom) -> begin
match kind_of_value v with
| Vfun f ->
let k = nb_rel env in
let vb = f (mk_rel_accu k) in
- let dep,body =
+ let body =
nf_predicate (push_rel (LocalAssum (name,dom)) env) sigma ind mip params vb codom in
- dep, mkLambda(name,dom,body)
- | _ -> false, nf_type env sigma v
+ mkLambda(name,dom,body)
+ | _ -> nf_type env sigma v
end
| _ ->
match kind_of_value v with
@@ -399,8 +396,8 @@ and nf_predicate env sigma ind mip params v pT =
let params = if Int.equal n 0 then params else Array.map (lift n) params in
let dom = mkApp(mkIndU ind,Array.append params rargs) in
let body = nf_type (push_rel (LocalAssum (name,dom)) env) sigma vb in
- true, mkLambda(name,dom,body)
- | _ -> false, nf_type env sigma v
+ mkLambda(name,dom,body)
+ | _ -> nf_type env sigma v
and nf_evar env sigma evk ty args =
let evi = try Evd.find sigma evk with Not_found -> assert false in
diff --git a/pretyping/typing.ml b/pretyping/typing.ml
index a66ecaaac..ca2702d74 100644
--- a/pretyping/typing.ml
+++ b/pretyping/typing.ml
@@ -138,7 +138,7 @@ let is_correct_arity env sigma c pj ind specif params =
then error ()
else sigma
| Evar (ev,_), [] ->
- let sigma, s = Evd.fresh_sort_in_family env sigma (max_sort allowed_sorts) in
+ let sigma, s = Evd.fresh_sort_in_family sigma (max_sort allowed_sorts) in
let sigma = Evd.define ev (mkSort s) sigma in
sigma
| _, (LocalDef _ as d)::ar' ->
diff --git a/pretyping/vnorm.ml b/pretyping/vnorm.ml
index bd6062824..440076c16 100644
--- a/pretyping/vnorm.ml
+++ b/pretyping/vnorm.ml
@@ -103,7 +103,7 @@ let construct_of_constr_block = construct_of_constr false
let type_of_ind env (ind, u) =
type_of_inductive env (Inductive.lookup_mind_specif env ind, u)
-let build_branches_type env sigma (mind,_ as _ind) mib mip u params dep p =
+let build_branches_type env sigma (mind,_ as _ind) mib mip u params p =
let rtbl = mip.mind_reloc_tbl in
(* [build_one_branch i cty] construit le type de la ieme branche (commence
a 0) et les lambda correspondant aux realargs *)
@@ -120,20 +120,17 @@ let build_branches_type env sigma (mind,_ as _ind) mib mip u params dep p =
let codom =
let ndecl = List.length decl in
let papp = mkApp(lift ndecl p,crealargs) in
- if dep then
- let cstr = ith_constructor_of_inductive ind (i+1) in
- let relargs = Array.init carity (fun i -> mkRel (carity-i)) in
- let params = Array.map (lift ndecl) params in
- let dep_cstr = mkApp(mkApp(mkConstructU (cstr,u),params),relargs) in
- mkApp(papp,[|dep_cstr|])
- else papp
+ let cstr = ith_constructor_of_inductive ind (i+1) in
+ let relargs = Array.init carity (fun i -> mkRel (carity-i)) in
+ let params = Array.map (lift ndecl) params in
+ let dep_cstr = mkApp(mkApp(mkConstructU (cstr,u),params),relargs) in
+ mkApp(papp,[|dep_cstr|])
in
decl, decl_with_letin, codom
in Array.mapi build_one_branch mip.mind_nf_lc
-let build_case_type dep p realargs c =
- if dep then mkApp(mkApp(p, realargs), [|c|])
- else mkApp(p, realargs)
+let build_case_type p realargs c =
+ mkApp(mkApp(p, realargs), [|c|])
(* La fonction de normalisation *)
@@ -266,9 +263,9 @@ and nf_stk ?from:(from=0) env sigma c t stk =
let nparamdecls = Context.Rel.length (Inductive.inductive_paramdecls (mib,u)) in
let pT =
hnf_prod_applist_assum env nparamdecls (type_of_ind env (ind,u)) (Array.to_list params) in
- let dep, p = nf_predicate env sigma (ind,u) mip params (type_of_switch sw) pT in
+ let p = nf_predicate env sigma (ind,u) mip params (type_of_switch sw) pT in
(* Calcul du type des branches *)
- let btypes = build_branches_type env sigma ind mib mip u params dep p in
+ let btypes = build_branches_type env sigma ind mib mip u params p in
(* calcul des branches *)
let bsw = branch_of_switch (nb_rel env) sw in
let mkbranch i (n,v) =
@@ -277,7 +274,7 @@ and nf_stk ?from:(from=0) env sigma c t stk =
Termops.it_mkLambda_or_LetIn_from_no_LetIn b decl_with_letin
in
let branchs = Array.mapi mkbranch bsw in
- let tcase = build_case_type dep p realargs c in
+ let tcase = build_case_type p realargs c in
let ci = sw.sw_annot.Cbytecodes.ci in
nf_stk env sigma (mkCase(ci, p, c, branchs)) tcase stk
| Zproj p :: stk ->
@@ -289,17 +286,17 @@ and nf_stk ?from:(from=0) env sigma c t stk =
and nf_predicate env sigma ind mip params v pT =
match kind (whd_allnolet env pT) with
| LetIn (name,b,t,pT) ->
- let dep,body =
+ let body =
nf_predicate (push_rel (LocalDef (name,b,t)) env) sigma ind mip params v pT in
- dep, mkLetIn (name,b,t,body)
+ mkLetIn (name,b,t,body)
| Prod (name,dom,codom) -> begin
match whd_val v with
| Vfun f ->
let k = nb_rel env in
let vb = reduce_fun k f in
- let dep,body =
+ let body =
nf_predicate (push_rel (LocalAssum (name,dom)) env) sigma ind mip params vb codom in
- dep, mkLambda(name,dom,body)
+ mkLambda(name,dom,body)
| _ -> assert false
end
| _ ->
@@ -313,8 +310,8 @@ and nf_predicate env sigma ind mip params v pT =
let params = if Int.equal n 0 then params else Array.map (lift n) params in
let dom = mkApp(mkIndU ind,Array.append params rargs) in
let body = nf_vtype (push_rel (LocalAssum (name,dom)) env) sigma vb in
- true, mkLambda(name,dom,body)
- | _ -> false, nf_val env sigma v crazy_type
+ mkLambda(name,dom,body)
+ | _ -> nf_val env sigma v crazy_type
and nf_args env sigma vargs ?from:(f=0) t =
let t = ref t in
diff --git a/printing/pputils.ml b/printing/pputils.ml
index c6b8d5022..59e5f68f2 100644
--- a/printing/pputils.ml
+++ b/printing/pputils.ml
@@ -68,7 +68,7 @@ let pr_short_red_flag pr r =
let pr_red_flag pr r =
try pr_short_red_flag pr r
- with complexRedFlags ->
+ with ComplexRedFlag ->
(if r.rBeta then pr_arg str "beta" else mt ()) ++
(if r.rMatch && r.rFix && r.rCofix then pr_arg str "iota" else
(if r.rMatch then pr_arg str "match" else mt ()) ++
diff --git a/proofs/pfedit.mli b/proofs/pfedit.mli
index 7b7973224..e02b5ab95 100644
--- a/proofs/pfedit.mli
+++ b/proofs/pfedit.mli
@@ -85,6 +85,9 @@ val solve : ?with_end_tac:unit Proofview.tactic ->
val by : unit Proofview.tactic -> bool
+(** Option telling if unification heuristics should be used. *)
+val use_unification_heuristics : unit -> bool
+
(** [instantiate_nth_evar_com n c] instantiate the [n]th undefined
existential variable of the current focused proof by [c] or raises a
UserError if no proof is focused or if there is no such [n]th
diff --git a/shell.nix b/shell.nix
new file mode 100644
index 000000000..3201c5050
--- /dev/null
+++ b/shell.nix
@@ -0,0 +1,4 @@
+# Some developers don't want a pinned nix-shell by default.
+# If you want to use the pin nix-shell or a more sophisticated set of arguments:
+# $ nix-shell default.nix --arg shell true
+import ./default.nix { pkgs = import <nixpkgs> {}; shell = true; }
diff --git a/tactics/elimschemes.ml b/tactics/elimschemes.ml
index 70f73df5c..3b69d9922 100644
--- a/tactics/elimschemes.ml
+++ b/tactics/elimschemes.ml
@@ -44,7 +44,7 @@ let optimize_non_type_induction_scheme kind dep sort _ ind =
mib.mind_nparams_rec
else
mib.mind_nparams in
- let sigma, sort = Evd.fresh_sort_in_family env sigma sort in
+ let sigma, sort = Evd.fresh_sort_in_family sigma sort in
let sigma, t', c' = weaken_sort_scheme env sigma false sort npars c t in
let sigma = Evd.minimize_universes sigma in
(Evarutil.nf_evars_universes sigma c', Evd.evar_universe_context sigma), eff
diff --git a/tactics/eqschemes.ml b/tactics/eqschemes.ml
index ad5239116..ea5ff4a6c 100644
--- a/tactics/eqschemes.ml
+++ b/tactics/eqschemes.ml
@@ -397,7 +397,7 @@ let build_l2r_rew_scheme dep env ind kind =
rel_vect (nrealargs+4) nrealargs;
rel_vect 1 nrealargs;
[|mkRel 1|]]) in
- let s, ctx' = UnivGen.fresh_sort_in_family (Global.env ()) kind in
+ let s, ctx' = UnivGen.fresh_sort_in_family kind in
let ctx = Univ.ContextSet.union ctx ctx' in
let s = mkSort s in
let ci = make_case_info (Global.env()) ind RegularStyle in
@@ -500,7 +500,7 @@ let build_l2r_forward_rew_scheme dep env ind kind =
name_context env ((LocalAssum (Name varH,applied_ind))::realsign) in
let realsign_ind_P n aP =
name_context env ((LocalAssum (Name varH,aP))::realsign_P n) in
- let s, ctx' = UnivGen.fresh_sort_in_family (Global.env ()) kind in
+ let s, ctx' = UnivGen.fresh_sort_in_family kind in
let ctx = Univ.ContextSet.union ctx ctx' in
let s = mkSort s in
let ci = make_case_info (Global.env()) ind RegularStyle in
@@ -578,7 +578,7 @@ let build_r2l_forward_rew_scheme dep env ind kind =
let applied_ind = build_dependent_inductive indu specif in
let realsign_ind =
name_context env ((LocalAssum (Name varH,applied_ind))::realsign) in
- let s, ctx' = UnivGen.fresh_sort_in_family (Global.env ()) kind in
+ let s, ctx' = UnivGen.fresh_sort_in_family kind in
let ctx = Univ.ContextSet.union ctx ctx' in
let s = mkSort s in
let ci = make_case_info (Global.env()) ind RegularStyle in
diff --git a/tactics/hints.ml b/tactics/hints.ml
index d28d4848c..748e0362c 100644
--- a/tactics/hints.ml
+++ b/tactics/hints.ml
@@ -165,12 +165,17 @@ type hint_mode =
| ModeNoHeadEvar (* No evar at the head *)
| ModeOutput (* Anything *)
+type 'a hints_transparency_target =
+ | HintsVariables
+ | HintsConstants
+ | HintsReferences of 'a list
+
type hints_expr =
| HintsResolve of (hint_info_expr * bool * reference_or_constr) list
| HintsResolveIFF of bool * qualid list * int option
| HintsImmediate of reference_or_constr list
| HintsUnfold of qualid list
- | HintsTransparency of qualid list * bool
+ | HintsTransparency of qualid hints_transparency_target * bool
| HintsMode of qualid * hint_mode list
| HintsConstructors of qualid list
| HintsExtern of int * Constrexpr.constr_expr option * Genarg.raw_generic_argument
@@ -1024,15 +1029,19 @@ let add_hint dbname hintlist =
let db' = Hint_db.add_list env sigma hintlist db in
searchtable_add (dbname,db')
-let add_transparency dbname grs b =
+let add_transparency dbname target b =
let db = get_db dbname in
- let st = Hint_db.transparent_state db in
+ let (ids, csts as st) = Hint_db.transparent_state db in
let st' =
- List.fold_left (fun (ids, csts) gr ->
- match gr with
- | EvalConstRef c -> (ids, (if b then Cpred.add else Cpred.remove) c csts)
- | EvalVarRef v -> (if b then Id.Pred.add else Id.Pred.remove) v ids, csts)
- st grs
+ match target with
+ | HintsVariables -> (if b then Id.Pred.full else Id.Pred.empty), csts
+ | HintsConstants -> ids, if b then Cpred.full else Cpred.empty
+ | HintsReferences grs ->
+ List.fold_left (fun (ids, csts) gr ->
+ match gr with
+ | EvalConstRef c -> (ids, (if b then Cpred.add else Cpred.remove) c csts)
+ | EvalVarRef v -> (if b then Id.Pred.add else Id.Pred.remove) v ids, csts)
+ st grs
in searchtable_add (dbname, Hint_db.set_transparent_state db st')
let remove_hint dbname grs =
@@ -1042,7 +1051,7 @@ let remove_hint dbname grs =
type hint_action =
| CreateDB of bool * transparent_state
- | AddTransparency of evaluable_global_reference list * bool
+ | AddTransparency of evaluable_global_reference hints_transparency_target * bool
| AddHints of hint_entry list
| RemoveHints of GlobRef.t list
| AddCut of hints_path
@@ -1132,9 +1141,17 @@ let subst_autohint (subst, obj) =
in
let action = match obj.hint_action with
| CreateDB _ -> obj.hint_action
- | AddTransparency (grs, b) ->
- let grs' = List.Smart.map (subst_evaluable_reference subst) grs in
- if grs == grs' then obj.hint_action else AddTransparency (grs', b)
+ | AddTransparency (target, b) ->
+ let target' =
+ match target with
+ | HintsVariables -> target
+ | HintsConstants -> target
+ | HintsReferences grs ->
+ let grs' = List.Smart.map (subst_evaluable_reference subst) grs in
+ if grs == grs' then target
+ else HintsReferences grs'
+ in
+ if target' == target then obj.hint_action else AddTransparency (target', b)
| AddHints hintlist ->
let hintlist' = List.Smart.map subst_hint hintlist in
if hintlist' == hintlist then obj.hint_action else AddHints hintlist'
@@ -1254,7 +1271,7 @@ type hints_entry =
| HintsImmediateEntry of (hints_path_atom * polymorphic * hint_term) list
| HintsCutEntry of hints_path
| HintsUnfoldEntry of evaluable_global_reference list
- | HintsTransparencyEntry of evaluable_global_reference list * bool
+ | HintsTransparencyEntry of evaluable_global_reference hints_transparency_target * bool
| HintsModeEntry of GlobRef.t * hint_mode list
| HintsExternEntry of hint_info * Genarg.glob_generic_argument
@@ -1357,14 +1374,19 @@ let interp_hints poly =
let info = { info with hint_pattern = Option.map fp info.hint_pattern } in
(info, poly, b, path, gr)
in
+ let ft = function
+ | HintsVariables -> HintsVariables
+ | HintsConstants -> HintsConstants
+ | HintsReferences lhints -> HintsReferences (List.map fr lhints)
+ in
+ let fp = Constrintern.intern_constr_pattern (Global.env()) in
match h with
| HintsResolve lhints -> HintsResolveEntry (List.map fres lhints)
| HintsResolveIFF (l2r, lc, n) ->
HintsResolveEntry (List.map (project_hint ~poly n l2r) lc)
| HintsImmediate lhints -> HintsImmediateEntry (List.map fi lhints)
| HintsUnfold lhints -> HintsUnfoldEntry (List.map fr lhints)
- | HintsTransparency (lhints, b) ->
- HintsTransparencyEntry (List.map fr lhints, b)
+ | HintsTransparency (t, b) -> HintsTransparencyEntry (ft t, b)
| HintsMode (r, l) -> HintsModeEntry (fref r, l)
| HintsConstructors lqid ->
let constr_hints_of_ind qid =
@@ -1379,7 +1401,7 @@ let interp_hints poly =
PathHints [gr], IsGlobRef gr)
in HintsResolveEntry (List.flatten (List.map constr_hints_of_ind lqid))
| HintsExtern (pri, patcom, tacexp) ->
- let pat = Option.map fp patcom in
+ let pat = Option.map (fp sigma) patcom in
let l = match pat with None -> [] | Some (l, _) -> l in
let ltacvars = List.fold_left (fun accu x -> Id.Set.add x accu) Id.Set.empty l in
let env = Genintern.({ (empty_glob_sign env) with ltacvars }) in
diff --git a/tactics/hints.mli b/tactics/hints.mli
index ca18f835a..9bf6c175a 100644
--- a/tactics/hints.mli
+++ b/tactics/hints.mli
@@ -81,12 +81,17 @@ type hint_mode =
| ModeNoHeadEvar (* No evar at the head *)
| ModeOutput (* Anything *)
+type 'a hints_transparency_target =
+ | HintsVariables
+ | HintsConstants
+ | HintsReferences of 'a list
+
type hints_expr =
| HintsResolve of (hint_info_expr * bool * reference_or_constr) list
| HintsResolveIFF of bool * Libnames.qualid list * int option
| HintsImmediate of reference_or_constr list
| HintsUnfold of Libnames.qualid list
- | HintsTransparency of Libnames.qualid list * bool
+ | HintsTransparency of Libnames.qualid hints_transparency_target * bool
| HintsMode of Libnames.qualid * hint_mode list
| HintsConstructors of Libnames.qualid list
| HintsExtern of int * Constrexpr.constr_expr option * Genarg.raw_generic_argument
@@ -173,7 +178,7 @@ type hints_entry =
| HintsImmediateEntry of (hints_path_atom * polymorphic * hint_term) list
| HintsCutEntry of hints_path
| HintsUnfoldEntry of evaluable_global_reference list
- | HintsTransparencyEntry of evaluable_global_reference list * bool
+ | HintsTransparencyEntry of evaluable_global_reference hints_transparency_target * bool
| HintsModeEntry of GlobRef.t * hint_mode list
| HintsExternEntry of hint_info * Genarg.glob_generic_argument
diff --git a/tactics/inv.ml b/tactics/inv.ml
index e467eacd9..43786c8e1 100644
--- a/tactics/inv.ml
+++ b/tactics/inv.ml
@@ -94,7 +94,7 @@ let make_inv_predicate env evd indf realargs id status concl =
| Some concl -> concl (*assumed it's some [x1..xn,H:I(x1..xn)]C*)
| None ->
let sort = get_sort_family_of env !evd concl in
- let sort = Evarutil.evd_comb1 (Evd.fresh_sort_in_family env) evd sort in
+ let sort = Evarutil.evd_comb1 Evd.fresh_sort_in_family evd sort in
let p = make_arity env !evd true indf sort in
let evd',(p,ptyp) = Unification.abstract_list_all env
!evd p concl (realargs@[mkVar id])
diff --git a/tactics/leminv.ml b/tactics/leminv.ml
index 10937322e..caf4c1eca 100644
--- a/tactics/leminv.ml
+++ b/tactics/leminv.ml
@@ -251,7 +251,7 @@ let add_inversion_lemma_exn ~poly na com comsort bool tac =
let env = Global.env () in
let sigma = Evd.from_env env in
let sigma, c = Constrintern.interp_type_evars env sigma com in
- let sigma, sort = Evd.fresh_sort_in_family ~rigid:univ_rigid env sigma comsort in
+ let sigma, sort = Evd.fresh_sort_in_family ~rigid:univ_rigid sigma comsort in
try
add_inversion_lemma ~poly na env sigma c sort bool tac
with
diff --git a/test-suite/bugs/closed/7712.v b/test-suite/bugs/closed/7712.v
new file mode 100644
index 000000000..a4e9697fa
--- /dev/null
+++ b/test-suite/bugs/closed/7712.v
@@ -0,0 +1,4 @@
+(* This used to raise an anomaly *)
+
+Fail Reserved Notation "'[tele_arg' x ';' .. ';' z ]"
+ (format "[tele_arg '[hv' x .. z ']' ]").
diff --git a/test-suite/bugs/closed/8004.v b/test-suite/bugs/closed/8004.v
new file mode 100644
index 000000000..818639997
--- /dev/null
+++ b/test-suite/bugs/closed/8004.v
@@ -0,0 +1,47 @@
+Require Export Coq.Program.Tactics Coq.Classes.SetoidTactics Coq.Classes.CMorphisms .
+
+Set Universe Polymorphism.
+
+Delimit Scope category_theory_scope with category_theory.
+Open Scope category_theory_scope.
+
+Infix "∧" := prod (at level 80, right associativity) : category_theory_scope.
+
+Class Setoid A := {
+ equiv : crelation A;
+ setoid_equiv :> Equivalence equiv
+}.
+
+Notation "f ≈ g" := (equiv f g) (at level 79) : category_theory_scope.
+
+Open Scope list_scope.
+
+Generalizable All Variables.
+
+Fixpoint list_equiv `{Setoid A} (xs ys : list A) : Type :=
+ match xs, ys with
+ | nil, nil => True
+ | x :: xs, y :: ys => x ≈ y ∧ list_equiv xs ys
+ | _, _ => False
+ end.
+
+Axiom proof_admitted : False.
+Tactic Notation "admit" := abstract case proof_admitted.
+
+Program Instance list_equivalence `{Setoid A} : Equivalence list_equiv.
+Next Obligation.
+ repeat intro.
+ induction x; simpl; split; auto.
+ reflexivity.
+Qed.
+Next Obligation.
+ repeat intro.
+ generalize dependent y.
+ induction x, y; simpl; intros; auto.
+ destruct X; split.
+ now symmetry.
+ intuition.
+Qed.
+Next Obligation.
+admit.
+Defined.
diff --git a/test-suite/coq-makefile/timing/precomputed-time-tests/001-correct-diff-sorting-order/time-of-build-both.log.expected b/test-suite/coq-makefile/timing/precomputed-time-tests/001-correct-diff-sorting-order/time-of-build-both.log.expected
index 975e359b7..159e64551 100644
--- a/test-suite/coq-makefile/timing/precomputed-time-tests/001-correct-diff-sorting-order/time-of-build-both.log.expected
+++ b/test-suite/coq-makefile/timing/precomputed-time-tests/001-correct-diff-sorting-order/time-of-build-both.log.expected
@@ -1,6 +1,6 @@
After | File Name | Before || Change | % Change
----------------------------------------------------------------------------------------------
-19m16.05s | Total | 21m25.28s || -2m09.23s | -10.05%
+19m16.04s | Total | 21m25.27s || -2m09.23s | -10.05%
----------------------------------------------------------------------------------------------
4m01.34s | Specific/X25519/C64/ladderstep | 4m59.49s || -0m58.15s | -19.41%
2m48.52s | Specific/solinas32_2e255m765_13limbs/femul | 3m12.95s || -0m24.42s | -12.66%
diff --git a/test-suite/coq-makefile/timing/precomputed-time-tests/002-single-file-sorting/time-of-build-pretty.log.expected b/test-suite/coq-makefile/timing/precomputed-time-tests/002-single-file-sorting/time-of-build-pretty.log.expected
index fdd5ec21d..b9739ddb1 100644
--- a/test-suite/coq-makefile/timing/precomputed-time-tests/002-single-file-sorting/time-of-build-pretty.log.expected
+++ b/test-suite/coq-makefile/timing/precomputed-time-tests/002-single-file-sorting/time-of-build-pretty.log.expected
@@ -1,6 +1,6 @@
Time | File Name
----------------------------------------------------------
-19m16.05s | Total
+19m16.04s | Total
----------------------------------------------------------
4m01.34s | Specific/X25519/C64/ladderstep
3m09.62s | Specific/NISTP256/AMD64/femul
diff --git a/test-suite/output/Notations3.out b/test-suite/output/Notations3.out
index 996af5927..5ab616160 100644
--- a/test-suite/output/Notations3.out
+++ b/test-suite/output/Notations3.out
@@ -241,3 +241,8 @@ Notation
Notation
"( x , y , .. , z )" := pair .. (pair x y) .. z : core_scope
(default interpretation)
+1 subgoal
+
+ ============================
+ ##@%
+ ^^^
diff --git a/test-suite/output/Notations3.v b/test-suite/output/Notations3.v
index 3cf0c913f..876aaa394 100644
--- a/test-suite/output/Notations3.v
+++ b/test-suite/output/Notations3.v
@@ -385,3 +385,17 @@ Module LocateNotations.
Locate "exists".
Locate "( _ , _ , .. , _ )".
End LocateNotations.
+
+Module Issue7731.
+
+Axiom (P : nat -> Prop).
+Parameter (X : nat).
+Notation "## @ E ^^^" := (P E) (at level 20, E at level 1, format "'[ ' ## '/' @ E '/' ^^^ ']'").
+Notation "%" := X.
+
+Set Printing Width 7.
+Goal ## @ % ^^^.
+Show.
+Abort.
+
+End Issue7731.
diff --git a/test-suite/success/Hints.v b/test-suite/success/Hints.v
index 717dc0deb..ebf5b094e 100644
--- a/test-suite/success/Hints.v
+++ b/test-suite/success/Hints.v
@@ -183,3 +183,33 @@ End HintCut.
Goal forall (m : nat), exists n, m = n /\ m = n.
intros m; eexists; split; [trivial | reflexivity].
Qed.
+
+Section HintTransparent.
+
+ Definition fn (x : nat) := S x.
+
+ Create HintDb trans.
+
+ Hint Resolve eq_refl | (_ = _) : trans.
+
+ (* No reduction *)
+ Hint Variables Opaque : trans. Hint Constants Opaque : trans.
+
+ Goal forall x : nat, fn x = S x.
+ Proof.
+ intros.
+ Fail typeclasses eauto with trans.
+ unfold fn.
+ typeclasses eauto with trans.
+ Qed.
+
+ (** Now allow unfolding fn *)
+ Hint Constants Transparent : trans.
+
+ Goal forall x : nat, fn x = S x.
+ Proof.
+ intros.
+ typeclasses eauto with trans.
+ Qed.
+
+End HintTransparent.
diff --git a/test-suite/success/uniform_inductive_parameters.v b/test-suite/success/uniform_inductive_parameters.v
new file mode 100644
index 000000000..42236a531
--- /dev/null
+++ b/test-suite/success/uniform_inductive_parameters.v
@@ -0,0 +1,13 @@
+Set Uniform Inductive Parameters.
+
+Inductive list (A : Type) :=
+ | nil : list
+ | cons : A -> list -> list.
+Check (list : Type -> Type).
+Check (cons : forall A, A -> list A -> list A).
+
+Inductive list2 (A : Type) (A' := prod A A) :=
+ | nil2 : list2
+ | cons2 : A' -> list2 -> list2.
+Check (list2 : Type -> Type).
+Check (cons2 : forall A (A' := prod A A), A' -> list2 A -> list2 A).
diff --git a/theories/Init/Logic.v b/theories/Init/Logic.v
index 10ca9ecc9..817581cb2 100644
--- a/theories/Init/Logic.v
+++ b/theories/Init/Logic.v
@@ -29,6 +29,13 @@ Definition not (A:Prop) := A -> False.
Notation "~ x" := (not x) : type_scope.
+(** Create the "core" hint database, and set its transparent state for
+ variables and constants explicitely. *)
+
+Create HintDb core.
+Hint Variables Opaque : core.
+Hint Constants Opaque : core.
+
Hint Unfold not: core.
(** [and A B], written [A /\ B], is the conjunction of [A] and [B]
diff --git a/theories/Numbers/BinNums.v b/theories/Numbers/BinNums.v
index f8b3d9e1d..d5eb4f268 100644
--- a/theories/Numbers/BinNums.v
+++ b/theories/Numbers/BinNums.v
@@ -12,6 +12,8 @@
Set Implicit Arguments.
+Declare ML Module "positive_syntax_plugin".
+Declare ML Module "n_syntax_plugin".
Declare ML Module "z_syntax_plugin".
(** [positive] is a datatype representing the strictly positive integers
diff --git a/tools/CoqMakefile.in b/tools/CoqMakefile.in
index 8e60d3932..2ebe4aba7 100644
--- a/tools/CoqMakefile.in
+++ b/tools/CoqMakefile.in
@@ -65,20 +65,20 @@ VERBOSE ?=
# Time the Coq process (set to non empty), and how (see default value)
TIMED?=
TIMECMD?=
-# Use /usr/bin/env time on linux, gtime on Mac OS
+# Use command time on linux, gtime on Mac OS
TIMEFMT?="$* (real: %e, user: %U, sys: %S, mem: %M ko)"
ifneq (,$(TIMED))
-ifeq (0,$(shell /usr/bin/env time -f $(TIMEFMT) true >/dev/null 2>/dev/null; echo $$?))
-STDTIME?=/usr/bin/env time -f $(TIMEFMT)
+ifeq (0,$(shell command time -f $(TIMEFMT) true >/dev/null 2>/dev/null; echo $$?))
+STDTIME?=command time -f $(TIMEFMT)
else
ifeq (0,$(shell gtime -f $(TIMEFMT) true >/dev/null 2>/dev/null; echo $$?))
STDTIME?=gtime -f $(TIMEFMT)
else
-STDTIME?=time
+STDTIME?=command time
endif
endif
else
-STDTIME?=/usr/bin/env time -f $(TIMEFMT)
+STDTIME?=command time -f $(TIMEFMT)
endif
# Coq binaries
@@ -91,9 +91,9 @@ COQDOC ?= "$(COQBIN)coqdoc"
COQMKFILE ?= "$(COQBIN)coq_makefile"
# Timing scripts
-COQMAKE_ONE_TIME_FILE ?= "$(COQLIB)/tools/make-one-time-file.py"
-COQMAKE_BOTH_TIME_FILES ?= "$(COQLIB)/tools/make-both-time-files.py"
-COQMAKE_BOTH_SINGLE_TIMING_FILES ?= "$(COQLIB)/tools/make-both-single-timing-files.py"
+COQMAKE_ONE_TIME_FILE ?= PYTHONIOENCODING=UTF-8 "$(COQLIB)/tools/make-one-time-file.py"
+COQMAKE_BOTH_TIME_FILES ?= PYTHONIOENCODING=UTF-8 "$(COQLIB)/tools/make-both-time-files.py"
+COQMAKE_BOTH_SINGLE_TIMING_FILES ?= PYTHONIOENCODING=UTF-8 "$(COQLIB)/tools/make-both-single-timing-files.py"
BEFORE ?=
AFTER ?=
diff --git a/tools/TimeFileMaker.py b/tools/TimeFileMaker.py
index 0d24332f1..ad001012a 100644
--- a/tools/TimeFileMaker.py
+++ b/tools/TimeFileMaker.py
@@ -1,6 +1,8 @@
-#!/usr/bin/env python
from __future__ import with_statement
+from __future__ import division
+from __future__ import unicode_literals
import os, sys, re
+from io import open
# This script parses the output of `make TIMED=1` into a dictionary
# mapping names of compiled files to the number of minutes and seconds
@@ -8,7 +10,7 @@ import os, sys, re
STRIP_REG = re.compile('^(coq/|contrib/|)(?:theories/|src/)?')
STRIP_REP = r'\1'
-INFINITY = '\xe2\x88\x9e'
+INFINITY = '\u221e'
def parse_args(argv, USAGE, HELP_STRING):
sort_by = 'auto'
@@ -27,7 +29,7 @@ def parse_args(argv, USAGE, HELP_STRING):
def reformat_time_string(time):
seconds, milliseconds = time.split('.')
seconds = int(seconds)
- minutes, seconds = int(seconds / 60), seconds % 60
+ minutes, seconds = divmod(seconds, 60)
return '%dm%02d.%ss' % (minutes, seconds, milliseconds)
def get_times(file_name):
@@ -40,7 +42,7 @@ def get_times(file_name):
if file_name == '-':
lines = sys.stdin.read()
else:
- with open(file_name, 'r') as f:
+ with open(file_name, 'r', encoding="utf-8") as f:
lines = f.read()
reg = re.compile(r'^([^\s]+) \([^\)]*?user: ([0-9\.]+)[^\)]*?\)\s*$', re.MULTILINE)
times = reg.findall(lines)
@@ -60,7 +62,7 @@ def get_single_file_times(file_name):
if file_name == '-':
lines = sys.stdin.read()
else:
- with open(file_name, 'r') as f:
+ with open(file_name, 'r', encoding="utf-8") as f:
lines = f.read()
reg = re.compile(r'^Chars ([0-9]+) - ([0-9]+) ([^ ]+) ([0-9\.]+) secs (.*)$', re.MULTILINE)
times = reg.findall(lines)
@@ -101,7 +103,7 @@ def from_seconds(seconds, signed=False):
'''
sign = ('-' if seconds < 0 else '+') if signed else ''
seconds = abs(seconds)
- minutes = int(seconds) / 60
+ minutes = int(seconds) // 60
seconds -= minutes * 60
full_seconds = int(seconds)
partial_seconds = int(100 * (seconds - full_seconds))
@@ -112,7 +114,8 @@ def sum_times(times, signed=False):
Takes the values of an output from get_times, parses the time
strings, and returns their sum, in the same string format.
'''
- return from_seconds(sum(map(to_seconds, times)), signed=signed)
+ # sort the times before summing because floating point addition is not associative
+ return from_seconds(sum(sorted(map(to_seconds, times))), signed=signed)
def format_percentage(num, signed=True):
sign = ('-' if num < 0 else '+') if signed else ''
@@ -153,8 +156,8 @@ def make_diff_table_string(left_times_dict, right_times_dict,
# set the widths of each of the columns by the longest thing to go in that column
left_sum = sum_times(left_times_dict.values())
right_sum = sum_times(right_times_dict.values())
- left_sum_float = sum(map(to_seconds, left_times_dict.values()))
- right_sum_float = sum(map(to_seconds, right_times_dict.values()))
+ left_sum_float = sum(sorted(map(to_seconds, left_times_dict.values())))
+ right_sum_float = sum(sorted(map(to_seconds, right_times_dict.values())))
diff_sum = from_seconds(left_sum_float - right_sum_float, signed=True)
percent_diff_sum = (format_percentage((left_sum_float - right_sum_float) / right_sum_float)
if right_sum_float > 0 else 'N/A')
@@ -206,5 +209,5 @@ def print_or_write_table(table, files):
print(table)
for file_name in files:
if file_name != '-':
- with open(file_name, 'w') as f:
+ with open(file_name, 'w', encoding="utf-8") as f:
f.write(table)
diff --git a/tools/coq_makefile.ml b/tools/coq_makefile.ml
index 6f11ee397..ad489da82 100644
--- a/tools/coq_makefile.ml
+++ b/tools/coq_makefile.ml
@@ -218,7 +218,7 @@ let windrive s =
else ""
;;
-let generate_conf_coq_config oc args =
+let generate_conf_coq_config oc =
section oc "Coq configuration.";
let src_dirs = Coq_config.all_src_dirs in
Envars.print_config ~prefix_var_name:"COQMF_" oc src_dirs;
@@ -282,7 +282,7 @@ let generate_conf oc project args =
fprintf oc "# %s\n\n" (String.concat " " (List.map quote args));
generate_conf_files oc project;
generate_conf_includes oc project;
- generate_conf_coq_config oc args;
+ generate_conf_coq_config oc;
generate_conf_defs oc project;
generate_conf_doc oc project;
generate_conf_extra_target oc project.extra_targets;
diff --git a/tools/coqdoc/index.ml b/tools/coqdoc/index.ml
index df493fdf7..885324aa0 100644
--- a/tools/coqdoc/index.ml
+++ b/tools/coqdoc/index.ml
@@ -77,7 +77,7 @@ let add_ref m loc m' sp id ty =
let find m l = Hashtbl.find reftable (m, l)
-let find_string m s = let (m,s,t) = Hashtbl.find byidtable s in Ref (m,s,t)
+let find_string s = let (m,s,t) = Hashtbl.find byidtable s in Ref (m,s,t)
(* Coq modules *)
diff --git a/tools/coqdoc/index.mli b/tools/coqdoc/index.mli
index 5cd301389..7c9aad67f 100644
--- a/tools/coqdoc/index.mli
+++ b/tools/coqdoc/index.mli
@@ -41,7 +41,7 @@ type index_entry =
val find : coq_module -> loc -> index_entry
(* Find what data is referred to by some string in some coq module *)
-val find_string : coq_module -> string -> index_entry
+val find_string : string -> index_entry
val add_module : coq_module -> unit
diff --git a/tools/coqdoc/output.ml b/tools/coqdoc/output.ml
index d25227002..c640167ac 100644
--- a/tools/coqdoc/output.ml
+++ b/tools/coqdoc/output.ml
@@ -431,7 +431,7 @@ module Latex = struct
else if !Cdglobals.interpolate && !in_doc (* always a var otherwise *)
then
try
- let tag = Index.find_string (get_module false) s in
+ let tag = Index.find_string s in
reference (translate s) tag
with _ -> Tokens.output_tagged_ident_string s
else Tokens.output_tagged_ident_string s
@@ -706,7 +706,7 @@ module Html = struct
else if is_keyword s then
printf "<span class=\"id\" title=\"keyword\">%s</span>" (translate s)
else if !Cdglobals.interpolate && !in_doc (* always a var otherwise *) then
- try reference (translate s) (Index.find_string (get_module false) s)
+ try reference (translate s) (Index.find_string s)
with Not_found -> Tokens.output_tagged_ident_string s
else
Tokens.output_tagged_ident_string s
diff --git a/tools/make-both-single-timing-files.py b/tools/make-both-single-timing-files.py
index c6af2ff1f..32c52c7a1 100755
--- a/tools/make-both-single-timing-files.py
+++ b/tools/make-both-single-timing-files.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python
import sys
from TimeFileMaker import *
diff --git a/tools/make-both-time-files.py b/tools/make-both-time-files.py
index 643429679..f730a8d6b 100755
--- a/tools/make-both-time-files.py
+++ b/tools/make-both-time-files.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python
import sys
from TimeFileMaker import *
diff --git a/tools/make-one-time-file.py b/tools/make-one-time-file.py
index c9905249e..e66136df9 100755
--- a/tools/make-one-time-file.py
+++ b/tools/make-one-time-file.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python
import sys
from TimeFileMaker import *
diff --git a/toplevel/g_toplevel.ml4 b/toplevel/g_toplevel.mlg
index e3cefe236..53d3eef23 100644
--- a/toplevel/g_toplevel.ml4
+++ b/toplevel/g_toplevel.mlg
@@ -8,6 +8,7 @@
(* * (see LICENSE file for the text of the license) *)
(************************************************************************)
+{
open Pcoq
open Pcoq.Prim
open Vernacexpr
@@ -28,20 +29,26 @@ end
open Toplevel_
-GEXTEND Gram
+}
+
+GRAMMAR EXTEND Gram
GLOBAL: vernac_toplevel;
vernac_toplevel: FIRST
- [ [ IDENT "Drop"; "." -> CAst.make VernacDrop
- | IDENT "Quit"; "." -> CAst.make VernacQuit
+ [ [ IDENT "Drop"; "." -> { CAst.make VernacDrop }
+ | IDENT "Quit"; "." -> { CAst.make VernacQuit }
| IDENT "Backtrack"; n = natural ; m = natural ; p = natural; "." ->
- CAst.make (VernacBacktrack (n,m,p))
+ { CAst.make (VernacBacktrack (n,m,p)) }
| cmd = Pvernac.main_entry ->
- match cmd with
+ { match cmd with
| None -> raise Stm.End_of_input
- | Some (loc,c) -> CAst.make ~loc (VernacControl c)
+ | Some (loc,c) -> CAst.make ~loc (VernacControl c) }
]
]
;
END
+{
+
let parse_toplevel pa = Pcoq.Gram.entry_parse vernac_toplevel pa
+
+}
diff --git a/vernac/classes.ml b/vernac/classes.ml
index 26d105ecf..bf734ab36 100644
--- a/vernac/classes.ml
+++ b/vernac/classes.ml
@@ -42,7 +42,7 @@ let typeclasses_db = "typeclass_instances"
let set_typeclass_transparency c local b =
Hints.add_hints ~local [typeclasses_db]
- (Hints.HintsTransparencyEntry ([c], b))
+ (Hints.HintsTransparencyEntry (Vernacexpr.HintsReferences [c], b))
let _ =
Hook.set Typeclasses.add_instance_hint_hook
diff --git a/vernac/comInductive.ml b/vernac/comInductive.ml
index 0387b32ba..ad1ffa35a 100644
--- a/vernac/comInductive.ml
+++ b/vernac/comInductive.ml
@@ -146,7 +146,6 @@ let interp_cstrs env sigma impls mldata arity ind =
let sigma, (ctyps'', cimpls) =
on_snd List.split @@
List.fold_left_map (fun sigma l ->
- on_snd (on_fst EConstr.Unsafe.to_constr) @@
interp_type_evars_impls env sigma ~impls l) sigma ctyps' in
sigma, (cnames, ctyps'', cimpls)
@@ -327,14 +326,17 @@ let check_param = function
| CLocalPattern {CAst.loc} ->
Loc.raise ?loc (Stream.Error "pattern with quote not allowed here")
-let interp_mutual_inductive (paramsl,indl) notations cum poly prv finite =
+let interp_mutual_inductive_gen env0 (uparamsl,paramsl,indl) notations cum poly prv finite =
check_all_names_different indl;
List.iter check_param paramsl;
- let env0 = Global.env() in
+ if not (List.is_empty uparamsl) && not (List.is_empty notations)
+ then user_err (str "Inductives with uniform parameters may not have attached notations.");
let pl = (List.hd indl).ind_univs in
let sigma, decl = interp_univ_decl_opt env0 pl in
+ let sigma, (uimpls, ((env_uparams, ctx_uparams), useruimpls)) =
+ interp_context_evars env0 sigma uparamsl in
let sigma, (impls, ((env_params, ctx_params), userimpls)) =
- interp_context_evars env0 sigma paramsl
+ interp_context_evars ~impl_env:uimpls env_uparams sigma paramsl
in
let indnames = List.map (fun ind -> ind.ind_name) indl in
@@ -346,15 +348,15 @@ let interp_mutual_inductive (paramsl,indl) notations cum poly prv finite =
let sigma, arities = List.fold_left_map (fun sigma -> interp_ind_arity env_params sigma) sigma indl in
let fullarities = List.map (fun (c, _, _) -> EConstr.it_mkProd_or_LetIn c ctx_params) arities in
- let env_ar = push_types env0 indnames fullarities in
+ let env_ar = push_types env_uparams indnames fullarities in
let env_ar_params = EConstr.push_rel_context ctx_params env_ar in
(* Compute interpretation metadatas *)
let indimpls = List.map (fun (_, _, impls) -> userimpls @
lift_implicits (Context.Rel.nhyps ctx_params) impls) arities in
let arities = List.map pi1 arities and aritypoly = List.map pi2 arities in
- let impls = compute_internalization_env env0 sigma ~impls (Inductive (params,true)) indnames fullarities indimpls in
- let ntn_impls = compute_internalization_env env0 sigma (Inductive (params,true)) indnames fullarities indimpls in
+ let impls = compute_internalization_env env_uparams sigma ~impls (Inductive (params,true)) indnames fullarities indimpls in
+ let ntn_impls = compute_internalization_env env_uparams sigma (Inductive (params,true)) indnames fullarities indimpls in
let mldatas = List.map2 (mk_mltype_data sigma env_params params) arities indnames in
let sigma, constructors =
@@ -365,6 +367,25 @@ let interp_mutual_inductive (paramsl,indl) notations cum poly prv finite =
List.fold_left3_map (fun sigma -> interp_cstrs env_ar_params sigma impls) sigma mldatas arities indl)
() in
+ (* generalize over the uniform parameters *)
+ let nparams = Context.Rel.length ctx_params in
+ let nuparams = Context.Rel.length ctx_uparams in
+ let uargs = Context.Rel.to_extended_vect EConstr.mkRel 0 ctx_uparams in
+ let uparam_subst =
+ List.init (List.length indl) EConstr.(fun i -> mkApp (mkRel (i + 1 + nuparams), uargs))
+ @ List.init nuparams EConstr.(fun i -> mkRel (i + 1)) in
+ let generalize_constructor c = EConstr.Unsafe.to_constr (EConstr.Vars.substnl uparam_subst nparams c) in
+ let constructors = List.map (fun (cnames,ctypes,cimpls) ->
+ (cnames,List.map generalize_constructor ctypes,cimpls))
+ constructors
+ in
+ let ctx_params = ctx_params @ ctx_uparams in
+ let userimpls = useruimpls @ (lift_implicits (Context.Rel.nhyps ctx_uparams) userimpls) in
+ let indimpls = List.map (fun iimpl -> useruimpls @ (lift_implicits (Context.Rel.nhyps ctx_uparams) iimpl)) indimpls in
+ let fullarities = List.map (fun c -> EConstr.it_mkProd_or_LetIn c ctx_uparams) fullarities in
+ let env_ar = push_types env0 indnames fullarities in
+ let env_ar_params = EConstr.push_rel_context ctx_params env_ar in
+
(* Try further to solve evars, and instantiate them *)
let sigma = solve_remaining_evars all_and_fail_flags env_params sigma (Evd.from_env env_params) in
(* Compute renewed arities *)
@@ -423,6 +444,9 @@ let interp_mutual_inductive (paramsl,indl) notations cum poly prv finite =
InferCumulativity.infer_inductive env_ar mind_ent
else mind_ent), Evd.universe_binders sigma, impls
+let interp_mutual_inductive (paramsl,indl) notations cum poly prv finite =
+ interp_mutual_inductive_gen (Global.env()) ([],paramsl,indl) notations cum poly prv finite
+
(* Very syntactical equality *)
let eq_local_binders bl1 bl2 =
List.equal local_binder_eq bl1 bl2
@@ -505,10 +529,15 @@ type one_inductive_impls =
Impargs.manual_explicitation list (* for inds *)*
Impargs.manual_explicitation list list (* for constrs *)
-let do_mutual_inductive indl cum poly prv finite =
- let indl,coes,ntns = extract_mutual_inductive_declaration_components indl in
+type uniform_inductive_flag =
+ | UniformParameters
+ | NonUniformParameters
+
+let do_mutual_inductive indl cum poly prv ~uniform finite =
+ let (params,indl),coes,ntns = extract_mutual_inductive_declaration_components indl in
(* Interpret the types *)
- let mie,pl,impls = interp_mutual_inductive indl ntns cum poly prv finite in
+ let indl = match uniform with UniformParameters -> (params, [], indl) | NonUniformParameters -> ([], params, indl) in
+ let mie,pl,impls = interp_mutual_inductive_gen (Global.env()) indl ntns cum poly prv finite in
(* Declare the mutual inductive block with its associated schemes *)
ignore (declare_mutual_inductive_with_eliminations mie pl impls);
(* Declare the possible notations of inductive types *)
diff --git a/vernac/comInductive.mli b/vernac/comInductive.mli
index 7ae8e8f71..4e30ed7de 100644
--- a/vernac/comInductive.mli
+++ b/vernac/comInductive.mli
@@ -19,9 +19,14 @@ open Decl_kinds
(** Entry points for the vernacular commands Inductive and CoInductive *)
+type uniform_inductive_flag =
+ | UniformParameters
+ | NonUniformParameters
+
val do_mutual_inductive :
(one_inductive_expr * decl_notation list) list -> cumulative_inductive_flag ->
- polymorphic -> private_flag -> Declarations.recursivity_kind -> unit
+ polymorphic -> private_flag -> uniform:uniform_inductive_flag ->
+ Declarations.recursivity_kind -> unit
(************************************************************************)
(** Internal API *)
diff --git a/vernac/egramcoq.ml b/vernac/egramcoq.ml
index cc9be7b0e..48f225f97 100644
--- a/vernac/egramcoq.ml
+++ b/vernac/egramcoq.ml
@@ -274,7 +274,7 @@ let make_sep_rules = function
Arules [r]
let symbol_of_target : type s. _ -> _ -> _ -> s target -> (s, s) symbol = fun p assoc from forpat ->
- if is_binder_level from p then Aentryl (target_entry forpat, 200)
+ if is_binder_level from p then Aentryl (target_entry forpat, "200")
else if is_self from p then Aself
else
let g = target_entry forpat in
@@ -282,7 +282,7 @@ let symbol_of_target : type s. _ -> _ -> _ -> s target -> (s, s) symbol = fun p
begin match lev with
| None -> Aentry g
| Some None -> Anext
- | Some (Some (lev, cur)) -> Aentryl (g, lev)
+ | Some (Some (lev, cur)) -> Aentryl (g, string_of_int lev)
end
let symbol_of_entry : type s r. _ -> _ -> (s, r) entry -> (s, r) symbol = fun assoc from typ -> match typ with
@@ -291,7 +291,7 @@ let symbol_of_entry : type s r. _ -> _ -> (s, r) entry -> (s, r) symbol = fun as
Alist1 (symbol_of_target typ' assoc from forpat)
| TTConstrList (typ', tkl, forpat) ->
Alist1sep (symbol_of_target typ' assoc from forpat, make_sep_rules tkl)
-| TTPattern p -> Aentryl (Constr.pattern, p)
+| TTPattern p -> Aentryl (Constr.pattern, string_of_int p)
| TTClosedBinderList [] -> Alist1 (Aentry Constr.binder)
| TTClosedBinderList tkl -> Alist1sep (Aentry Constr.binder, make_sep_rules tkl)
| TTName -> Aentry Prim.name
@@ -428,6 +428,7 @@ let prepare_empty_levels forpat (pos,p4assoc,name,reinit) =
let rec pure_sublevels : type a b c. int option -> (a, b, c) rule -> int list = fun level r -> match r with
| Stop -> []
| Next (rem, Aentryl (_, i)) ->
+ let i = int_of_string i in
let rem = pure_sublevels level rem in
begin match level with
| Some j when Int.equal i j -> rem
diff --git a/vernac/g_proofs.ml4 b/vernac/g_proofs.ml4
deleted file mode 100644
index 4b11276af..000000000
--- a/vernac/g_proofs.ml4
+++ /dev/null
@@ -1,131 +0,0 @@
-(************************************************************************)
-(* * The Coq Proof Assistant / The Coq Development Team *)
-(* v * INRIA, CNRS and contributors - Copyright 1999-2018 *)
-(* <O___,, * (see CREDITS file for the list of authors) *)
-(* \VV/ **************************************************************)
-(* // * This file is distributed under the terms of the *)
-(* * GNU Lesser General Public License Version 2.1 *)
-(* * (see LICENSE file for the text of the license) *)
-(************************************************************************)
-
-open Glob_term
-open Constrexpr
-open Vernacexpr
-open Proof_global
-
-open Pcoq
-open Pcoq.Prim
-open Pcoq.Constr
-open Pvernac.Vernac_
-
-let thm_token = G_vernac.thm_token
-
-let hint = Gram.entry_create "hint"
-
-let warn_deprecated_focus =
- CWarnings.create ~name:"deprecated-focus" ~category:"deprecated"
- (fun () ->
- Pp.strbrk
- "The Focus command is deprecated; use bullets or focusing brackets instead"
- )
-
-let warn_deprecated_focus_n n =
- CWarnings.create ~name:"deprecated-focus" ~category:"deprecated"
- (fun () ->
- Pp.(str "The Focus command is deprecated;" ++ spc ()
- ++ str "use '" ++ int n ++ str ": {' instead")
- )
-
-let warn_deprecated_unfocus =
- CWarnings.create ~name:"deprecated-unfocus" ~category:"deprecated"
- (fun () -> Pp.strbrk "The Unfocus command is deprecated")
-
-(* Proof commands *)
-GEXTEND Gram
- GLOBAL: hint command;
-
- opt_hintbases:
- [ [ -> []
- | ":"; l = LIST1 [id = IDENT -> id ] -> l ] ]
- ;
- command:
- [ [ IDENT "Goal"; c = lconstr ->
- VernacDefinition (Decl_kinds.(NoDischarge, Definition), ((CAst.make ~loc:!@loc Names.Anonymous), None), ProveBody ([], c))
- | IDENT "Proof" -> VernacProof (None,None)
- | IDENT "Proof" ; IDENT "Mode" ; mn = string -> VernacProofMode mn
- | IDENT "Proof"; c = lconstr -> VernacExactProof c
- | IDENT "Abort" -> VernacAbort None
- | IDENT "Abort"; IDENT "All" -> VernacAbortAll
- | IDENT "Abort"; id = identref -> VernacAbort (Some id)
- | IDENT "Existential"; n = natural; c = constr_body ->
- VernacSolveExistential (n,c)
- | IDENT "Admitted" -> VernacEndProof Admitted
- | IDENT "Qed" -> VernacEndProof (Proved (Opaque,None))
- | IDENT "Save"; id = identref ->
- VernacEndProof (Proved (Opaque, Some id))
- | IDENT "Defined" -> VernacEndProof (Proved (Transparent,None))
- | IDENT "Defined"; id=identref ->
- VernacEndProof (Proved (Transparent,Some id))
- | IDENT "Restart" -> VernacRestart
- | IDENT "Undo" -> VernacUndo 1
- | IDENT "Undo"; n = natural -> VernacUndo n
- | IDENT "Undo"; IDENT "To"; n = natural -> VernacUndoTo n
- | IDENT "Focus" ->
- warn_deprecated_focus ~loc:!@loc ();
- VernacFocus None
- | IDENT "Focus"; n = natural ->
- warn_deprecated_focus_n n ~loc:!@loc ();
- VernacFocus (Some n)
- | IDENT "Unfocus" ->
- warn_deprecated_unfocus ~loc:!@loc ();
- VernacUnfocus
- | IDENT "Unfocused" -> VernacUnfocused
- | IDENT "Show" -> VernacShow (ShowGoal OpenSubgoals)
- | IDENT "Show"; n = natural -> VernacShow (ShowGoal (NthGoal n))
- | IDENT "Show"; id = ident -> VernacShow (ShowGoal (GoalId id))
- | IDENT "Show"; IDENT "Script" -> VernacShow ShowScript
- | IDENT "Show"; IDENT "Existentials" -> VernacShow ShowExistentials
- | IDENT "Show"; IDENT "Universes" -> VernacShow ShowUniverses
- | IDENT "Show"; IDENT "Conjectures" -> VernacShow ShowProofNames
- | IDENT "Show"; IDENT "Proof" -> VernacShow ShowProof
- | IDENT "Show"; IDENT "Intro" -> VernacShow (ShowIntros false)
- | IDENT "Show"; IDENT "Intros" -> VernacShow (ShowIntros true)
- | IDENT "Show"; IDENT "Match"; id = reference -> VernacShow (ShowMatch id)
- | IDENT "Guarded" -> VernacCheckGuard
- (* Hints for Auto and EAuto *)
- | IDENT "Create"; IDENT "HintDb" ;
- id = IDENT ; b = [ "discriminated" -> true | -> false ] ->
- VernacCreateHintDb (id, b)
- | IDENT "Remove"; IDENT "Hints"; ids = LIST1 global; dbnames = opt_hintbases ->
- VernacRemoveHints (dbnames, ids)
- | IDENT "Hint"; h = hint; dbnames = opt_hintbases ->
- VernacHints (dbnames, h)
- ] ];
- reference_or_constr:
- [ [ r = global -> HintsReference r
- | c = constr -> HintsConstr c ] ]
- ;
- hint:
- [ [ IDENT "Resolve"; lc = LIST1 reference_or_constr; info = hint_info ->
- HintsResolve (List.map (fun x -> (info, true, x)) lc)
- | IDENT "Resolve"; "->"; lc = LIST1 global; n = OPT natural ->
- HintsResolveIFF (true, lc, n)
- | IDENT "Resolve"; "<-"; lc = LIST1 global; n = OPT natural ->
- HintsResolveIFF (false, lc, n)
- | IDENT "Immediate"; lc = LIST1 reference_or_constr -> HintsImmediate lc
- | IDENT "Transparent"; lc = LIST1 global -> HintsTransparency (lc, true)
- | IDENT "Opaque"; lc = LIST1 global -> HintsTransparency (lc, false)
- | IDENT "Mode"; l = global; m = mode -> HintsMode (l, m)
- | IDENT "Unfold"; lqid = LIST1 global -> HintsUnfold lqid
- | IDENT "Constructors"; lc = LIST1 global -> HintsConstructors lc ] ]
- ;
- constr_body:
- [ [ ":="; c = lconstr -> c
- | ":"; t = lconstr; ":="; c = lconstr -> CAst.make ~loc:!@loc @@ CCast(c,CastConv t) ] ]
- ;
- mode:
- [ [ l = LIST1 [ "+" -> ModeInput
- | "!" -> ModeNoHeadEvar
- | "-" -> ModeOutput ] -> l ] ]
- ;
-END
diff --git a/vernac/g_proofs.mlg b/vernac/g_proofs.mlg
new file mode 100644
index 000000000..cccdbfc91
--- /dev/null
+++ b/vernac/g_proofs.mlg
@@ -0,0 +1,139 @@
+(************************************************************************)
+(* * The Coq Proof Assistant / The Coq Development Team *)
+(* v * INRIA, CNRS and contributors - Copyright 1999-2018 *)
+(* <O___,, * (see CREDITS file for the list of authors) *)
+(* \VV/ **************************************************************)
+(* // * This file is distributed under the terms of the *)
+(* * GNU Lesser General Public License Version 2.1 *)
+(* * (see LICENSE file for the text of the license) *)
+(************************************************************************)
+
+{
+
+open Glob_term
+open Constrexpr
+open Vernacexpr
+open Proof_global
+
+open Pcoq
+open Pcoq.Prim
+open Pcoq.Constr
+open Pvernac.Vernac_
+
+let thm_token = G_vernac.thm_token
+
+let hint = Gram.entry_create "hint"
+
+let warn_deprecated_focus =
+ CWarnings.create ~name:"deprecated-focus" ~category:"deprecated"
+ (fun () ->
+ Pp.strbrk
+ "The Focus command is deprecated; use bullets or focusing brackets instead"
+ )
+
+let warn_deprecated_focus_n n =
+ CWarnings.create ~name:"deprecated-focus" ~category:"deprecated"
+ (fun () ->
+ Pp.(str "The Focus command is deprecated;" ++ spc ()
+ ++ str "use '" ++ int n ++ str ": {' instead")
+ )
+
+let warn_deprecated_unfocus =
+ CWarnings.create ~name:"deprecated-unfocus" ~category:"deprecated"
+ (fun () -> Pp.strbrk "The Unfocus command is deprecated")
+
+}
+
+(* Proof commands *)
+GRAMMAR EXTEND Gram
+ GLOBAL: hint command;
+
+ opt_hintbases:
+ [ [ -> { [] }
+ | ":"; l = LIST1 [id = IDENT -> { id } ] -> { l } ] ]
+ ;
+ command:
+ [ [ IDENT "Goal"; c = lconstr ->
+ { VernacDefinition (Decl_kinds.(NoDischarge, Definition), ((CAst.make ~loc Names.Anonymous), None), ProveBody ([], c)) }
+ | IDENT "Proof" -> { VernacProof (None,None) }
+ | IDENT "Proof" ; IDENT "Mode" ; mn = string -> { VernacProofMode mn }
+ | IDENT "Proof"; c = lconstr -> { VernacExactProof c }
+ | IDENT "Abort" -> { VernacAbort None }
+ | IDENT "Abort"; IDENT "All" -> { VernacAbortAll }
+ | IDENT "Abort"; id = identref -> { VernacAbort (Some id) }
+ | IDENT "Existential"; n = natural; c = constr_body ->
+ { VernacSolveExistential (n,c) }
+ | IDENT "Admitted" -> { VernacEndProof Admitted }
+ | IDENT "Qed" -> { VernacEndProof (Proved (Opaque,None)) }
+ | IDENT "Save"; id = identref ->
+ { VernacEndProof (Proved (Opaque, Some id)) }
+ | IDENT "Defined" -> { VernacEndProof (Proved (Transparent,None)) }
+ | IDENT "Defined"; id=identref ->
+ { VernacEndProof (Proved (Transparent,Some id)) }
+ | IDENT "Restart" -> { VernacRestart }
+ | IDENT "Undo" -> { VernacUndo 1 }
+ | IDENT "Undo"; n = natural -> { VernacUndo n }
+ | IDENT "Undo"; IDENT "To"; n = natural -> { VernacUndoTo n }
+ | IDENT "Focus" ->
+ { warn_deprecated_focus ~loc ();
+ VernacFocus None }
+ | IDENT "Focus"; n = natural ->
+ { warn_deprecated_focus_n n ~loc ();
+ VernacFocus (Some n) }
+ | IDENT "Unfocus" ->
+ { warn_deprecated_unfocus ~loc ();
+ VernacUnfocus }
+ | IDENT "Unfocused" -> { VernacUnfocused }
+ | IDENT "Show" -> { VernacShow (ShowGoal OpenSubgoals) }
+ | IDENT "Show"; n = natural -> { VernacShow (ShowGoal (NthGoal n)) }
+ | IDENT "Show"; id = ident -> { VernacShow (ShowGoal (GoalId id)) }
+ | IDENT "Show"; IDENT "Script" -> { VernacShow ShowScript }
+ | IDENT "Show"; IDENT "Existentials" -> { VernacShow ShowExistentials }
+ | IDENT "Show"; IDENT "Universes" -> { VernacShow ShowUniverses }
+ | IDENT "Show"; IDENT "Conjectures" -> { VernacShow ShowProofNames }
+ | IDENT "Show"; IDENT "Proof" -> { VernacShow ShowProof }
+ | IDENT "Show"; IDENT "Intro" -> { VernacShow (ShowIntros false) }
+ | IDENT "Show"; IDENT "Intros" -> { VernacShow (ShowIntros true) }
+ | IDENT "Show"; IDENT "Match"; id = reference -> { VernacShow (ShowMatch id) }
+ | IDENT "Guarded" -> { VernacCheckGuard }
+ (* Hints for Auto and EAuto *)
+ | IDENT "Create"; IDENT "HintDb" ;
+ id = IDENT ; b = [ "discriminated" -> { true } | -> { false } ] ->
+ { VernacCreateHintDb (id, b) }
+ | IDENT "Remove"; IDENT "Hints"; ids = LIST1 global; dbnames = opt_hintbases ->
+ { VernacRemoveHints (dbnames, ids) }
+ | IDENT "Hint"; h = hint; dbnames = opt_hintbases ->
+ { VernacHints (dbnames, h) }
+ ] ];
+ reference_or_constr:
+ [ [ r = global -> { HintsReference r }
+ | c = constr -> { HintsConstr c } ] ]
+ ;
+ hint:
+ [ [ IDENT "Resolve"; lc = LIST1 reference_or_constr; info = hint_info ->
+ { HintsResolve (List.map (fun x -> (info, true, x)) lc) }
+ | IDENT "Resolve"; "->"; lc = LIST1 global; n = OPT natural ->
+ { HintsResolveIFF (true, lc, n) }
+ | IDENT "Resolve"; "<-"; lc = LIST1 global; n = OPT natural ->
+ { HintsResolveIFF (false, lc, n) }
+ | IDENT "Immediate"; lc = LIST1 reference_or_constr -> { HintsImmediate lc }
+ | IDENT "Variables"; IDENT "Transparent" -> { HintsTransparency (HintsVariables, true) }
+ | IDENT "Variables"; IDENT "Opaque" -> { HintsTransparency (HintsVariables, false) }
+ | IDENT "Constants"; IDENT "Transparent" -> { HintsTransparency (HintsConstants, true) }
+ | IDENT "Constants"; IDENT "Opaque" -> { HintsTransparency (HintsConstants, false) }
+ | IDENT "Transparent"; lc = LIST1 global -> { HintsTransparency (HintsReferences lc, true) }
+ | IDENT "Opaque"; lc = LIST1 global -> { HintsTransparency (HintsReferences lc, false) }
+ | IDENT "Mode"; l = global; m = mode -> { HintsMode (l, m) }
+ | IDENT "Unfold"; lqid = LIST1 global -> { HintsUnfold lqid }
+ | IDENT "Constructors"; lc = LIST1 global -> { HintsConstructors lc } ] ]
+ ;
+ constr_body:
+ [ [ ":="; c = lconstr -> { c }
+ | ":"; t = lconstr; ":="; c = lconstr -> { CAst.make ~loc @@ CCast(c,CastConv t) } ] ]
+ ;
+ mode:
+ [ [ l = LIST1 [ "+" -> { ModeInput }
+ | "!" -> { ModeNoHeadEvar }
+ | "-" -> { ModeOutput } ] -> { l } ] ]
+ ;
+END
diff --git a/vernac/g_vernac.ml4 b/vernac/g_vernac.ml4
deleted file mode 100644
index 16934fc86..000000000
--- a/vernac/g_vernac.ml4
+++ /dev/null
@@ -1,1156 +0,0 @@
-(************************************************************************)
-(* * The Coq Proof Assistant / The Coq Development Team *)
-(* v * INRIA, CNRS and contributors - Copyright 1999-2018 *)
-(* <O___,, * (see CREDITS file for the list of authors) *)
-(* \VV/ **************************************************************)
-(* // * This file is distributed under the terms of the *)
-(* * GNU Lesser General Public License Version 2.1 *)
-(* * (see LICENSE file for the text of the license) *)
-(************************************************************************)
-
-open Pp
-open CErrors
-open Util
-open Names
-open Glob_term
-open Vernacexpr
-open Constrexpr
-open Constrexpr_ops
-open Extend
-open Decl_kinds
-open Declaremods
-open Declarations
-open Namegen
-open Tok (* necessary for camlp5 *)
-
-open Pcoq
-open Pcoq.Prim
-open Pcoq.Constr
-open Pcoq.Module
-open Pvernac.Vernac_
-
-let vernac_kw = [ ";"; ","; ">->"; ":<"; "<:"; "where"; "at" ]
-let _ = List.iter CLexer.add_keyword vernac_kw
-
-(* Rem: do not join the different GEXTEND into one, it breaks native *)
-(* compilation on PowerPC and Sun architectures *)
-
-let query_command = Gram.entry_create "vernac:query_command"
-
-let subprf = Gram.entry_create "vernac:subprf"
-
-let class_rawexpr = Gram.entry_create "vernac:class_rawexpr"
-let thm_token = Gram.entry_create "vernac:thm_token"
-let def_body = Gram.entry_create "vernac:def_body"
-let decl_notation = Gram.entry_create "vernac:decl_notation"
-let record_field = Gram.entry_create "vernac:record_field"
-let of_type_with_opt_coercion = Gram.entry_create "vernac:of_type_with_opt_coercion"
-let instance_name = Gram.entry_create "vernac:instance_name"
-let section_subset_expr = Gram.entry_create "vernac:section_subset_expr"
-
-let make_bullet s =
- let open Proof_bullet in
- let n = String.length s in
- match s.[0] with
- | '-' -> Dash n
- | '+' -> Plus n
- | '*' -> Star n
- | _ -> assert false
-
-let parse_compat_version ?(allow_old = true) = let open Flags in function
- | "8.8" -> Current
- | "8.7" -> V8_7
- | "8.6" -> V8_6
- | ("8.5" | "8.4" | "8.3" | "8.2" | "8.1" | "8.0") as s ->
- CErrors.user_err ~hdr:"get_compat_version"
- Pp.(str "Compatibility with version " ++ str s ++ str " not supported.")
- | s ->
- CErrors.user_err ~hdr:"get_compat_version"
- Pp.(str "Unknown compatibility version \"" ++ str s ++ str "\".")
-
-GEXTEND Gram
- GLOBAL: vernac_control gallina_ext noedit_mode subprf;
- vernac_control: FIRST
- [ [ IDENT "Time"; c = located_vernac -> VernacTime (false,c)
- | IDENT "Redirect"; s = ne_string; c = located_vernac -> VernacRedirect (s, c)
- | IDENT "Timeout"; n = natural; v = vernac_control -> VernacTimeout(n,v)
- | IDENT "Fail"; v = vernac_control -> VernacFail v
- | (f, v) = vernac -> VernacExpr(f, v) ]
- ]
- ;
- vernac:
- [ [ IDENT "Local"; (f, v) = vernac_poly -> (VernacLocal true :: f, v)
- | IDENT "Global"; (f, v) = vernac_poly -> (VernacLocal false :: f, v)
-
- | v = vernac_poly -> v ]
- ]
- ;
- vernac_poly:
- [ [ IDENT "Polymorphic"; (f, v) = vernac_aux -> (VernacPolymorphic true :: f, v)
- | IDENT "Monomorphic"; (f, v) = vernac_aux -> (VernacPolymorphic false :: f, v)
- | v = vernac_aux -> v ]
- ]
- ;
- vernac_aux:
- (* Better to parse "." here: in case of failure (e.g. in coerce_to_var), *)
- (* "." is still in the stream and discard_to_dot works correctly *)
- [ [ IDENT "Program"; g = gallina; "." -> ([VernacProgram], g)
- | IDENT "Program"; g = gallina_ext; "." -> ([VernacProgram], g)
- | g = gallina; "." -> ([], g)
- | g = gallina_ext; "." -> ([], g)
- | c = command; "." -> ([], c)
- | c = syntax; "." -> ([], c)
- | c = subprf -> ([], c)
- ] ]
- ;
- vernac_aux: LAST
- [ [ prfcom = command_entry -> ([], prfcom) ] ]
- ;
- noedit_mode:
- [ [ c = query_command -> c None] ]
- ;
-
- subprf:
- [ [ s = BULLET -> VernacBullet (make_bullet s)
- | "{" -> VernacSubproof None
- | "}" -> VernacEndSubproof
- ] ]
- ;
-
- located_vernac:
- [ [ v = vernac_control -> CAst.make ~loc:!@loc v ] ]
- ;
-END
-
-let warn_plural_command =
- CWarnings.create ~name:"plural-command" ~category:"pedantic" ~default:CWarnings.Disabled
- (fun kwd -> strbrk (Printf.sprintf "Command \"%s\" expects more than one assumption." kwd))
-
-let test_plural_form loc kwd = function
- | [(_,([_],_))] ->
- warn_plural_command ~loc:!@loc kwd
- | _ -> ()
-
-let test_plural_form_types loc kwd = function
- | [([_],_)] ->
- warn_plural_command ~loc:!@loc kwd
- | _ -> ()
-
-let lname_of_lident : lident -> lname =
- CAst.map (fun s -> Name s)
-
-let name_of_ident_decl : ident_decl -> name_decl =
- on_fst lname_of_lident
-
-(* Gallina declarations *)
-GEXTEND Gram
- GLOBAL: gallina gallina_ext thm_token def_body of_type_with_opt_coercion
- record_field decl_notation rec_definition ident_decl univ_decl;
-
- gallina:
- (* Definition, Theorem, Variable, Axiom, ... *)
- [ [ thm = thm_token; id = ident_decl; bl = binders; ":"; c = lconstr;
- l = LIST0
- [ "with"; id = ident_decl; bl = binders; ":"; c = lconstr ->
- (id,(bl,c)) ] ->
- VernacStartTheoremProof (thm, (id,(bl,c))::l)
- | stre = assumption_token; nl = inline; bl = assum_list ->
- VernacAssumption (stre, nl, bl)
- | (kwd,stre) = assumptions_token; nl = inline; bl = assum_list ->
- test_plural_form loc kwd bl;
- VernacAssumption (stre, nl, bl)
- | d = def_token; id = ident_decl; b = def_body ->
- VernacDefinition (d, name_of_ident_decl id, b)
- | IDENT "Let"; id = identref; b = def_body ->
- VernacDefinition ((DoDischarge, Let), (lname_of_lident id, None), b)
- (* Gallina inductive declarations *)
- | cum = OPT cumulativity_token; priv = private_token; f = finite_token;
- indl = LIST1 inductive_definition SEP "with" ->
- let (k,f) = f in
- let indl=List.map (fun ((a,b,c,d),e) -> ((a,b,c,k,d),e)) indl in
- VernacInductive (cum, priv,f,indl)
- | "Fixpoint"; recs = LIST1 rec_definition SEP "with" ->
- VernacFixpoint (NoDischarge, recs)
- | IDENT "Let"; "Fixpoint"; recs = LIST1 rec_definition SEP "with" ->
- VernacFixpoint (DoDischarge, recs)
- | "CoFixpoint"; corecs = LIST1 corec_definition SEP "with" ->
- VernacCoFixpoint (NoDischarge, corecs)
- | IDENT "Let"; "CoFixpoint"; corecs = LIST1 corec_definition SEP "with" ->
- VernacCoFixpoint (DoDischarge, corecs)
- | IDENT "Scheme"; l = LIST1 scheme SEP "with" -> VernacScheme l
- | IDENT "Combined"; IDENT "Scheme"; id = identref; IDENT "from";
- l = LIST1 identref SEP "," -> VernacCombinedScheme (id, l)
- | IDENT "Register"; IDENT "Inline"; id = identref ->
- VernacRegister(id, RegisterInline)
- | IDENT "Universe"; l = LIST1 identref -> VernacUniverse l
- | IDENT "Universes"; l = LIST1 identref -> VernacUniverse l
- | IDENT "Constraint"; l = LIST1 univ_constraint SEP "," -> VernacConstraint l
- ] ]
- ;
-
- thm_token:
- [ [ "Theorem" -> Theorem
- | IDENT "Lemma" -> Lemma
- | IDENT "Fact" -> Fact
- | IDENT "Remark" -> Remark
- | IDENT "Corollary" -> Corollary
- | IDENT "Proposition" -> Proposition
- | IDENT "Property" -> Property ] ]
- ;
- def_token:
- [ [ "Definition" -> (NoDischarge,Definition)
- | IDENT "Example" -> (NoDischarge,Example)
- | IDENT "SubClass" -> (NoDischarge,SubClass) ] ]
- ;
- assumption_token:
- [ [ "Hypothesis" -> (DoDischarge, Logical)
- | "Variable" -> (DoDischarge, Definitional)
- | "Axiom" -> (NoDischarge, Logical)
- | "Parameter" -> (NoDischarge, Definitional)
- | IDENT "Conjecture" -> (NoDischarge, Conjectural) ] ]
- ;
- assumptions_token:
- [ [ IDENT "Hypotheses" -> ("Hypotheses", (DoDischarge, Logical))
- | IDENT "Variables" -> ("Variables", (DoDischarge, Definitional))
- | IDENT "Axioms" -> ("Axioms", (NoDischarge, Logical))
- | IDENT "Parameters" -> ("Parameters", (NoDischarge, Definitional))
- | IDENT "Conjectures" -> ("Conjectures", (NoDischarge, Conjectural)) ] ]
- ;
- inline:
- [ [ IDENT "Inline"; "("; i = INT; ")" -> InlineAt (int_of_string i)
- | IDENT "Inline" -> DefaultInline
- | -> NoInline] ]
- ;
- univ_constraint:
- [ [ l = universe_level; ord = [ "<" -> Univ.Lt | "=" -> Univ.Eq | "<=" -> Univ.Le ];
- r = universe_level -> (l, ord, r) ] ]
- ;
- univ_decl :
- [ [ "@{" ; l = LIST0 identref; ext = [ "+" -> true | -> false ];
- cs = [ "|"; l' = LIST0 univ_constraint SEP ",";
- ext = [ "+" -> true | -> false ]; "}" -> (l',ext)
- | ext = [ "}" -> true | "|}" -> false ] -> ([], ext) ]
- ->
- let open UState in
- { univdecl_instance = l;
- univdecl_extensible_instance = ext;
- univdecl_constraints = fst cs;
- univdecl_extensible_constraints = snd cs }
- ] ]
- ;
- ident_decl:
- [ [ i = identref; l = OPT univ_decl -> (i, l)
- ] ]
- ;
- finite_token:
- [ [ IDENT "Inductive" -> (Inductive_kw,Finite)
- | IDENT "CoInductive" -> (CoInductive,CoFinite)
- | IDENT "Variant" -> (Variant,BiFinite)
- | IDENT "Record" -> (Record,BiFinite)
- | IDENT "Structure" -> (Structure,BiFinite)
- | IDENT "Class" -> (Class true,BiFinite) ] ]
- ;
- cumulativity_token:
- [ [ IDENT "Cumulative" -> VernacCumulative
- | IDENT "NonCumulative" -> VernacNonCumulative ] ]
- ;
- private_token:
- [ [ IDENT "Private" -> true | -> false ] ]
- ;
- (* Simple definitions *)
- def_body:
- [ [ bl = binders; ":="; red = reduce; c = lconstr ->
- if List.exists (function CLocalPattern _ -> true | _ -> false) bl
- then
- (* FIXME: "red" will be applied to types in bl and Cast with remain *)
- let c = mkCLambdaN ~loc:!@loc bl c in
- DefineBody ([], red, c, None)
- else
- (match c with
- | { CAst.v = CCast(c, CastConv t) } -> DefineBody (bl, red, c, Some t)
- | _ -> DefineBody (bl, red, c, None))
- | bl = binders; ":"; t = lconstr; ":="; red = reduce; c = lconstr ->
- let ((bl, c), tyo) =
- if List.exists (function CLocalPattern _ -> true | _ -> false) bl
- then
- (* FIXME: "red" will be applied to types in bl and Cast with remain *)
- let c = CAst.make ~loc:!@loc @@ CCast (c, CastConv t) in
- (([],mkCLambdaN ~loc:!@loc bl c), None)
- else ((bl, c), Some t)
- in
- DefineBody (bl, red, c, tyo)
- | bl = binders; ":"; t = lconstr ->
- ProveBody (bl, t) ] ]
- ;
- reduce:
- [ [ IDENT "Eval"; r = red_expr; "in" -> Some r
- | -> None ] ]
- ;
- one_decl_notation:
- [ [ ntn = ne_lstring; ":="; c = constr;
- scopt = OPT [ ":"; sc = IDENT -> sc] -> (ntn,c,scopt) ] ]
- ;
- decl_notation:
- [ [ "where"; l = LIST1 one_decl_notation SEP IDENT "and" -> l
- | -> [] ] ]
- ;
- (* Inductives and records *)
- opt_constructors_or_fields:
- [ [ ":="; lc = constructor_list_or_record_decl -> lc
- | -> RecordDecl (None, []) ] ]
- ;
- inductive_definition:
- [ [ oc = opt_coercion; id = ident_decl; indpar = binders;
- c = OPT [ ":"; c = lconstr -> c ];
- lc=opt_constructors_or_fields; ntn = decl_notation ->
- (((oc,id),indpar,c,lc),ntn) ] ]
- ;
- constructor_list_or_record_decl:
- [ [ "|"; l = LIST1 constructor SEP "|" -> Constructors l
- | id = identref ; c = constructor_type; "|"; l = LIST0 constructor SEP "|" ->
- Constructors ((c id)::l)
- | id = identref ; c = constructor_type -> Constructors [ c id ]
- | cstr = identref; "{"; fs = record_fields; "}" ->
- RecordDecl (Some cstr,fs)
- | "{";fs = record_fields; "}" -> RecordDecl (None,fs)
- | -> Constructors [] ] ]
- ;
-(*
- csort:
- [ [ s = sort -> CSort (loc,s) ] ]
- ;
-*)
- opt_coercion:
- [ [ ">" -> true
- | -> false ] ]
- ;
- (* (co)-fixpoints *)
- rec_definition:
- [ [ id = ident_decl;
- bl = binders_fixannot;
- ty = type_cstr;
- def = OPT [":="; def = lconstr -> def]; ntn = decl_notation ->
- let bl, annot = bl in ((id,annot,bl,ty,def),ntn) ] ]
- ;
- corec_definition:
- [ [ id = ident_decl; bl = binders; ty = type_cstr;
- def = OPT [":="; def = lconstr -> def]; ntn = decl_notation ->
- ((id,bl,ty,def),ntn) ] ]
- ;
- type_cstr:
- [ [ ":"; c=lconstr -> c
- | -> CAst.make ~loc:!@loc @@ CHole (None, IntroAnonymous, None) ] ]
- ;
- (* Inductive schemes *)
- scheme:
- [ [ kind = scheme_kind -> (None,kind)
- | id = identref; ":="; kind = scheme_kind -> (Some id,kind) ] ]
- ;
- scheme_kind:
- [ [ IDENT "Induction"; "for"; ind = smart_global;
- IDENT "Sort"; s = sort_family-> InductionScheme(true,ind,s)
- | IDENT "Minimality"; "for"; ind = smart_global;
- IDENT "Sort"; s = sort_family-> InductionScheme(false,ind,s)
- | IDENT "Elimination"; "for"; ind = smart_global;
- IDENT "Sort"; s = sort_family-> CaseScheme(true,ind,s)
- | IDENT "Case"; "for"; ind = smart_global;
- IDENT "Sort"; s = sort_family-> CaseScheme(false,ind,s)
- | IDENT "Equality"; "for" ; ind = smart_global -> EqualityScheme(ind) ] ]
- ;
- (* Various Binders *)
-(*
- (* ... without coercions *)
- binder_nodef:
- [ [ b = binder_let ->
- (match b with
- CLocalAssum(l,ty) -> (l,ty)
- | CLocalDef _ ->
- Util.user_err_loc
- (loc,"fix_param",Pp.str"defined binder not allowed here.")) ] ]
- ;
-*)
- (* ... with coercions *)
- record_field:
- [ [ bd = record_binder; pri = OPT [ "|"; n = natural -> n ];
- ntn = decl_notation -> (bd,pri),ntn ] ]
- ;
- record_fields:
- [ [ f = record_field; ";"; fs = record_fields -> f :: fs
- | f = record_field; ";" -> [f]
- | f = record_field -> [f]
- | -> []
- ] ]
- ;
- record_binder_body:
- [ [ l = binders; oc = of_type_with_opt_coercion;
- t = lconstr -> fun id -> (oc,AssumExpr (id,mkCProdN ~loc:!@loc l t))
- | l = binders; oc = of_type_with_opt_coercion;
- t = lconstr; ":="; b = lconstr -> fun id ->
- (oc,DefExpr (id,mkCLambdaN ~loc:!@loc l b,Some (mkCProdN ~loc:!@loc l t)))
- | l = binders; ":="; b = lconstr -> fun id ->
- match b.CAst.v with
- | CCast(b', (CastConv t|CastVM t|CastNative t)) ->
- (None,DefExpr(id,mkCLambdaN ~loc:!@loc l b',Some (mkCProdN ~loc:!@loc l t)))
- | _ ->
- (None,DefExpr(id,mkCLambdaN ~loc:!@loc l b,None)) ] ]
- ;
- record_binder:
- [ [ id = name -> (None,AssumExpr(id, CAst.make ~loc:!@loc @@ CHole (None, IntroAnonymous, None)))
- | id = name; f = record_binder_body -> f id ] ]
- ;
- assum_list:
- [ [ bl = LIST1 assum_coe -> bl | b = simple_assum_coe -> [b] ] ]
- ;
- assum_coe:
- [ [ "("; a = simple_assum_coe; ")" -> a ] ]
- ;
- simple_assum_coe:
- [ [ idl = LIST1 ident_decl; oc = of_type_with_opt_coercion; c = lconstr ->
- (not (Option.is_empty oc),(idl,c)) ] ]
- ;
-
- constructor_type:
- [[ l = binders;
- t= [ coe = of_type_with_opt_coercion; c = lconstr ->
- fun l id -> (not (Option.is_empty coe),(id,mkCProdN ~loc:!@loc l c))
- | ->
- fun l id -> (false,(id,mkCProdN ~loc:!@loc l (CAst.make ~loc:!@loc @@ CHole (None, IntroAnonymous, None)))) ]
- -> t l
- ]]
-;
-
- constructor:
- [ [ id = identref; c=constructor_type -> c id ] ]
- ;
- of_type_with_opt_coercion:
- [ [ ":>>" -> Some false
- | ":>"; ">" -> Some false
- | ":>" -> Some true
- | ":"; ">"; ">" -> Some false
- | ":"; ">" -> Some true
- | ":" -> None ] ]
- ;
-END
-
-let only_starredidentrefs =
- Gram.Entry.of_parser "test_only_starredidentrefs"
- (fun strm ->
- let rec aux n =
- match Util.stream_nth n strm with
- | KEYWORD "." -> ()
- | KEYWORD ")" -> ()
- | (IDENT _ | KEYWORD "Type" | KEYWORD "*") -> aux (n+1)
- | _ -> raise Stream.Failure in
- aux 0)
-let starredidentreflist_to_expr l =
- match l with
- | [] -> SsEmpty
- | x :: xs -> List.fold_right (fun i acc -> SsUnion(i,acc)) xs x
-
-let warn_deprecated_include_type =
- CWarnings.create ~name:"deprecated-include-type" ~category:"deprecated"
- (fun () -> strbrk "Include Type is deprecated; use Include instead")
-
-(* Modules and Sections *)
-GEXTEND Gram
- GLOBAL: gallina_ext module_expr module_type section_subset_expr;
-
- gallina_ext:
- [ [ (* Interactive module declaration *)
- IDENT "Module"; export = export_token; id = identref;
- bl = LIST0 module_binder; sign = of_module_type;
- body = is_module_expr ->
- VernacDefineModule (export, id, bl, sign, body)
- | IDENT "Module"; "Type"; id = identref;
- bl = LIST0 module_binder; sign = check_module_types;
- body = is_module_type ->
- VernacDeclareModuleType (id, bl, sign, body)
- | IDENT "Declare"; IDENT "Module"; export = export_token; id = identref;
- bl = LIST0 module_binder; ":"; mty = module_type_inl ->
- VernacDeclareModule (export, id, bl, mty)
- (* Section beginning *)
- | IDENT "Section"; id = identref -> VernacBeginSection id
- | IDENT "Chapter"; id = identref -> VernacBeginSection id
-
- (* This end a Section a Module or a Module Type *)
- | IDENT "End"; id = identref -> VernacEndSegment id
-
- (* Naming a set of section hyps *)
- | IDENT "Collection"; id = identref; ":="; expr = section_subset_expr ->
- VernacNameSectionHypSet (id, expr)
-
- (* Requiring an already compiled module *)
- | IDENT "Require"; export = export_token; qidl = LIST1 global ->
- VernacRequire (None, export, qidl)
- | IDENT "From" ; ns = global ; IDENT "Require"; export = export_token
- ; qidl = LIST1 global ->
- VernacRequire (Some ns, export, qidl)
- | IDENT "Import"; qidl = LIST1 global -> VernacImport (false,qidl)
- | IDENT "Export"; qidl = LIST1 global -> VernacImport (true,qidl)
- | IDENT "Include"; e = module_type_inl; l = LIST0 ext_module_expr ->
- VernacInclude(e::l)
- | IDENT "Include"; "Type"; e = module_type_inl; l = LIST0 ext_module_type ->
- warn_deprecated_include_type ~loc:!@loc ();
- VernacInclude(e::l) ] ]
- ;
- export_token:
- [ [ IDENT "Import" -> Some false
- | IDENT "Export" -> Some true
- | -> None ] ]
- ;
- ext_module_type:
- [ [ "<+"; mty = module_type_inl -> mty ] ]
- ;
- ext_module_expr:
- [ [ "<+"; mexpr = module_expr_inl -> mexpr ] ]
- ;
- check_module_type:
- [ [ "<:"; mty = module_type_inl -> mty ] ]
- ;
- check_module_types:
- [ [ mtys = LIST0 check_module_type -> mtys ] ]
- ;
- of_module_type:
- [ [ ":"; mty = module_type_inl -> Enforce mty
- | mtys = check_module_types -> Check mtys ] ]
- ;
- is_module_type:
- [ [ ":="; mty = module_type_inl ; l = LIST0 ext_module_type -> (mty::l)
- | -> [] ] ]
- ;
- is_module_expr:
- [ [ ":="; mexpr = module_expr_inl; l = LIST0 ext_module_expr -> (mexpr::l)
- | -> [] ] ]
- ;
- functor_app_annot:
- [ [ "["; IDENT "inline"; "at"; IDENT "level"; i = INT; "]" ->
- InlineAt (int_of_string i)
- | "["; IDENT "no"; IDENT "inline"; "]" -> NoInline
- | -> DefaultInline
- ] ]
- ;
- module_expr_inl:
- [ [ "!"; me = module_expr -> (me,NoInline)
- | me = module_expr; a = functor_app_annot -> (me,a) ] ]
- ;
- module_type_inl:
- [ [ "!"; me = module_type -> (me,NoInline)
- | me = module_type; a = functor_app_annot -> (me,a) ] ]
- ;
- (* Module binder *)
- module_binder:
- [ [ "("; export = export_token; idl = LIST1 identref; ":";
- mty = module_type_inl; ")" -> (export,idl,mty) ] ]
- ;
- (* Module expressions *)
- module_expr:
- [ [ me = module_expr_atom -> me
- | me1 = module_expr; me2 = module_expr_atom -> CAst.make ~loc:!@loc @@ CMapply (me1,me2)
- ] ]
- ;
- module_expr_atom:
- [ [ qid = qualid -> CAst.make ~loc:!@loc @@ CMident qid | "("; me = module_expr; ")" -> me ] ]
- ;
- with_declaration:
- [ [ "Definition"; fqid = fullyqualid; udecl = OPT univ_decl; ":="; c = Constr.lconstr ->
- CWith_Definition (fqid,udecl,c)
- | IDENT "Module"; fqid = fullyqualid; ":="; qid = qualid ->
- CWith_Module (fqid,qid)
- ] ]
- ;
- module_type:
- [ [ qid = qualid -> CAst.make ~loc:!@loc @@ CMident qid
- | "("; mt = module_type; ")" -> mt
- | mty = module_type; me = module_expr_atom ->
- CAst.make ~loc:!@loc @@ CMapply (mty,me)
- | mty = module_type; "with"; decl = with_declaration ->
- CAst.make ~loc:!@loc @@ CMwith (mty,decl)
- ] ]
- ;
- (* Proof using *)
- section_subset_expr:
- [ [ only_starredidentrefs; l = LIST0 starredidentref ->
- starredidentreflist_to_expr l
- | e = ssexpr -> e ]]
- ;
- starredidentref:
- [ [ i = identref -> SsSingl i
- | i = identref; "*" -> SsFwdClose(SsSingl i)
- | "Type" -> SsType
- | "Type"; "*" -> SsFwdClose SsType ]]
- ;
- ssexpr:
- [ "35"
- [ "-"; e = ssexpr -> SsCompl e ]
- | "50"
- [ e1 = ssexpr; "-"; e2 = ssexpr->SsSubstr(e1,e2)
- | e1 = ssexpr; "+"; e2 = ssexpr->SsUnion(e1,e2)]
- | "0"
- [ i = starredidentref -> i
- | "("; only_starredidentrefs; l = LIST0 starredidentref; ")"->
- starredidentreflist_to_expr l
- | "("; only_starredidentrefs; l = LIST0 starredidentref; ")"; "*" ->
- SsFwdClose(starredidentreflist_to_expr l)
- | "("; e = ssexpr; ")"-> e
- | "("; e = ssexpr; ")"; "*" -> SsFwdClose e ] ]
- ;
-END
-
-(* Extensions: implicits, coercions, etc. *)
-GEXTEND Gram
- GLOBAL: gallina_ext instance_name hint_info;
-
- gallina_ext:
- [ [ (* Transparent and Opaque *)
- IDENT "Transparent"; l = LIST1 smart_global ->
- VernacSetOpacity (Conv_oracle.transparent, l)
- | IDENT "Opaque"; l = LIST1 smart_global ->
- VernacSetOpacity (Conv_oracle.Opaque, l)
- | IDENT "Strategy"; l =
- LIST1 [ v=strategy_level; "["; q=LIST1 smart_global; "]" -> (v,q)] ->
- VernacSetStrategy l
- (* Canonical structure *)
- | IDENT "Canonical"; IDENT "Structure"; qid = global ->
- VernacCanonical CAst.(make ~loc:!@loc @@ AN qid)
- | IDENT "Canonical"; IDENT "Structure"; ntn = by_notation ->
- VernacCanonical CAst.(make ~loc:!@loc @@ ByNotation ntn)
- | IDENT "Canonical"; IDENT "Structure"; qid = global; d = def_body ->
- let s = coerce_reference_to_id qid in
- VernacDefinition ((NoDischarge,CanonicalStructure),((CAst.make (Name s)),None),d)
-
- (* Coercions *)
- | IDENT "Coercion"; qid = global; d = def_body ->
- let s = coerce_reference_to_id qid in
- VernacDefinition ((NoDischarge,Coercion),((CAst.make (Name s)),None),d)
- | IDENT "Identity"; IDENT "Coercion"; f = identref; ":";
- s = class_rawexpr; ">->"; t = class_rawexpr ->
- VernacIdentityCoercion (f, s, t)
- | IDENT "Coercion"; qid = global; ":"; s = class_rawexpr; ">->";
- t = class_rawexpr ->
- VernacCoercion (CAst.make ~loc:!@loc @@ AN qid, s, t)
- | IDENT "Coercion"; ntn = by_notation; ":"; s = class_rawexpr; ">->";
- t = class_rawexpr ->
- VernacCoercion (CAst.make ~loc:!@loc @@ ByNotation ntn, s, t)
-
- | IDENT "Context"; c = LIST1 binder ->
- VernacContext (List.flatten c)
-
- | IDENT "Instance"; namesup = instance_name; ":";
- expl = [ "!" -> Decl_kinds.Implicit | -> Decl_kinds.Explicit ] ; t = operconstr LEVEL "200";
- info = hint_info ;
- props = [ ":="; "{"; r = record_declaration; "}" -> Some (true,r) |
- ":="; c = lconstr -> Some (false,c) | -> None ] ->
- VernacInstance (false,snd namesup,(fst namesup,expl,t),props,info)
-
- | IDENT "Existing"; IDENT "Instance"; id = global;
- info = hint_info ->
- VernacDeclareInstances [id, info]
-
- | IDENT "Existing"; IDENT "Instances"; ids = LIST1 global;
- pri = OPT [ "|"; i = natural -> i ] ->
- let info = { Typeclasses.hint_priority = pri; hint_pattern = None } in
- let insts = List.map (fun i -> (i, info)) ids in
- VernacDeclareInstances insts
-
- | IDENT "Existing"; IDENT "Class"; is = global -> VernacDeclareClass is
-
- (* Arguments *)
- | IDENT "Arguments"; qid = smart_global;
- args = LIST0 argument_spec_block;
- more_implicits = OPT
- [ ","; impl = LIST1
- [ impl = LIST0 more_implicits_block -> List.flatten impl]
- SEP "," -> impl
- ];
- mods = OPT [ ":"; l = LIST1 arguments_modifier SEP "," -> l ] ->
- let mods = match mods with None -> [] | Some l -> List.flatten l in
- let slash_position = ref None in
- let rec parse_args i = function
- | [] -> []
- | `Id x :: args -> x :: parse_args (i+1) args
- | `Slash :: args ->
- if Option.is_empty !slash_position then
- (slash_position := Some i; parse_args i args)
- else
- user_err Pp.(str "The \"/\" modifier can occur only once")
- in
- let args = parse_args 0 (List.flatten args) in
- let more_implicits = Option.default [] more_implicits in
- VernacArguments (qid, args, more_implicits, !slash_position, mods)
-
- | IDENT "Implicit"; "Type"; bl = reserv_list ->
- VernacReserve bl
-
- | IDENT "Implicit"; IDENT "Types"; bl = reserv_list ->
- test_plural_form_types loc "Implicit Types" bl;
- VernacReserve bl
-
- | IDENT "Generalizable";
- gen = [IDENT "All"; IDENT "Variables" -> Some []
- | IDENT "No"; IDENT "Variables" -> None
- | ["Variable" | IDENT "Variables"];
- idl = LIST1 identref -> Some idl ] ->
- VernacGeneralizable gen ] ]
- ;
- arguments_modifier:
- [ [ IDENT "simpl"; IDENT "nomatch" -> [`ReductionDontExposeCase]
- | IDENT "simpl"; IDENT "never" -> [`ReductionNeverUnfold]
- | IDENT "default"; IDENT "implicits" -> [`DefaultImplicits]
- | IDENT "clear"; IDENT "implicits" -> [`ClearImplicits]
- | IDENT "clear"; IDENT "scopes" -> [`ClearScopes]
- | IDENT "rename" -> [`Rename]
- | IDENT "assert" -> [`Assert]
- | IDENT "extra"; IDENT "scopes" -> [`ExtraScopes]
- | IDENT "clear"; IDENT "scopes"; IDENT "and"; IDENT "implicits" ->
- [`ClearImplicits; `ClearScopes]
- | IDENT "clear"; IDENT "implicits"; IDENT "and"; IDENT "scopes" ->
- [`ClearImplicits; `ClearScopes]
- ] ]
- ;
- scope:
- [ [ "%"; key = IDENT -> key ] ]
- ;
- argument_spec: [
- [ b = OPT "!"; id = name ; s = OPT scope ->
- id.CAst.v, not (Option.is_empty b), Option.map (fun x -> CAst.make ~loc:!@loc x) s
- ]
- ];
- (* List of arguments implicit status, scope, modifiers *)
- argument_spec_block: [
- [ item = argument_spec ->
- let name, recarg_like, notation_scope = item in
- [`Id { name=name; recarg_like=recarg_like;
- notation_scope=notation_scope;
- implicit_status = NotImplicit}]
- | "/" -> [`Slash]
- | "("; items = LIST1 argument_spec; ")"; sc = OPT scope ->
- let f x = match sc, x with
- | None, x -> x | x, None -> Option.map (fun y -> CAst.make ~loc:!@loc y) x
- | Some _, Some _ -> user_err Pp.(str "scope declared twice") in
- List.map (fun (name,recarg_like,notation_scope) ->
- `Id { name=name; recarg_like=recarg_like;
- notation_scope=f notation_scope;
- implicit_status = NotImplicit}) items
- | "["; items = LIST1 argument_spec; "]"; sc = OPT scope ->
- let f x = match sc, x with
- | None, x -> x | x, None -> Option.map (fun y -> CAst.make ~loc:!@loc y) x
- | Some _, Some _ -> user_err Pp.(str "scope declared twice") in
- List.map (fun (name,recarg_like,notation_scope) ->
- `Id { name=name; recarg_like=recarg_like;
- notation_scope=f notation_scope;
- implicit_status = Implicit}) items
- | "{"; items = LIST1 argument_spec; "}"; sc = OPT scope ->
- let f x = match sc, x with
- | None, x -> x | x, None -> Option.map (fun y -> CAst.make ~loc:!@loc y) x
- | Some _, Some _ -> user_err Pp.(str "scope declared twice") in
- List.map (fun (name,recarg_like,notation_scope) ->
- `Id { name=name; recarg_like=recarg_like;
- notation_scope=f notation_scope;
- implicit_status = MaximallyImplicit}) items
- ]
- ];
- (* Same as [argument_spec_block], but with only implicit status and names *)
- more_implicits_block: [
- [ name = name -> [(name.CAst.v, Vernacexpr.NotImplicit)]
- | "["; items = LIST1 name; "]" ->
- List.map (fun name -> (name.CAst.v, Vernacexpr.Implicit)) items
- | "{"; items = LIST1 name; "}" ->
- List.map (fun name -> (name.CAst.v, Vernacexpr.MaximallyImplicit)) items
- ]
- ];
- strategy_level:
- [ [ IDENT "expand" -> Conv_oracle.Expand
- | IDENT "opaque" -> Conv_oracle.Opaque
- | n=INT -> Conv_oracle.Level (int_of_string n)
- | "-"; n=INT -> Conv_oracle.Level (- int_of_string n)
- | IDENT "transparent" -> Conv_oracle.transparent ] ]
- ;
- instance_name:
- [ [ name = ident_decl; sup = OPT binders ->
- (CAst.map (fun id -> Name id) (fst name), snd name),
- (Option.default [] sup)
- | -> ((CAst.make ~loc:!@loc Anonymous), None), [] ] ]
- ;
- hint_info:
- [ [ "|"; i = OPT natural; pat = OPT constr_pattern ->
- { Typeclasses.hint_priority = i; hint_pattern = pat }
- | -> { Typeclasses.hint_priority = None; hint_pattern = None } ] ]
- ;
- reserv_list:
- [ [ bl = LIST1 reserv_tuple -> bl | b = simple_reserv -> [b] ] ]
- ;
- reserv_tuple:
- [ [ "("; a = simple_reserv; ")" -> a ] ]
- ;
- simple_reserv:
- [ [ idl = LIST1 identref; ":"; c = lconstr -> (idl,c) ] ]
- ;
-
-END
-
-GEXTEND Gram
- GLOBAL: command query_command class_rawexpr gallina_ext;
-
- gallina_ext:
- [ [ IDENT "Export"; "Set"; table = option_table; v = option_value ->
- VernacSetOption (true, table, v)
- | IDENT "Export"; IDENT "Unset"; table = option_table ->
- VernacUnsetOption (true, table)
- ] ];
-
- command:
- [ [ IDENT "Comments"; l = LIST0 comment -> VernacComments l
-
- (* Hack! Should be in grammar_ext, but camlp5 factorizes badly *)
- | IDENT "Declare"; IDENT "Instance"; namesup = instance_name; ":";
- expl = [ "!" -> Decl_kinds.Implicit | -> Decl_kinds.Explicit ] ; t = operconstr LEVEL "200";
- info = hint_info ->
- VernacInstance (true, snd namesup, (fst namesup, expl, t), None, info)
-
- (* System directory *)
- | IDENT "Pwd" -> VernacChdir None
- | IDENT "Cd" -> VernacChdir None
- | IDENT "Cd"; dir = ne_string -> VernacChdir (Some dir)
-
- | IDENT "Load"; verbosely = [ IDENT "Verbose" -> true | -> false ];
- s = [ s = ne_string -> s | s = IDENT -> s ] ->
- VernacLoad (verbosely, s)
- | IDENT "Declare"; IDENT "ML"; IDENT "Module"; l = LIST1 ne_string ->
- VernacDeclareMLModule l
-
- | IDENT "Locate"; l = locatable -> VernacLocate l
-
- (* Managing load paths *)
- | IDENT "Add"; IDENT "LoadPath"; dir = ne_string; alias = as_dirpath ->
- VernacAddLoadPath (false, dir, alias)
- | IDENT "Add"; IDENT "Rec"; IDENT "LoadPath"; dir = ne_string;
- alias = as_dirpath -> VernacAddLoadPath (true, dir, alias)
- | IDENT "Remove"; IDENT "LoadPath"; dir = ne_string ->
- VernacRemoveLoadPath dir
-
- (* For compatibility *)
- | IDENT "AddPath"; dir = ne_string; "as"; alias = as_dirpath ->
- VernacAddLoadPath (false, dir, alias)
- | IDENT "AddRecPath"; dir = ne_string; "as"; alias = as_dirpath ->
- VernacAddLoadPath (true, dir, alias)
- | IDENT "DelPath"; dir = ne_string ->
- VernacRemoveLoadPath dir
-
- (* Type-Checking (pas dans le refman) *)
- | "Type"; c = lconstr -> VernacGlobalCheck c
-
- (* Printing (careful factorization of entries) *)
- | IDENT "Print"; p = printable -> VernacPrint p
- | IDENT "Print"; qid = smart_global; l = OPT univ_name_list -> VernacPrint (PrintName (qid,l))
- | IDENT "Print"; IDENT "Module"; "Type"; qid = global ->
- VernacPrint (PrintModuleType qid)
- | IDENT "Print"; IDENT "Module"; qid = global ->
- VernacPrint (PrintModule qid)
- | IDENT "Print"; IDENT "Namespace" ; ns = dirpath ->
- VernacPrint (PrintNamespace ns)
- | IDENT "Inspect"; n = natural -> VernacPrint (PrintInspect n)
-
- | IDENT "Add"; IDENT "ML"; IDENT "Path"; dir = ne_string ->
- VernacAddMLPath (false, dir)
- | IDENT "Add"; IDENT "Rec"; IDENT "ML"; IDENT "Path"; dir = ne_string ->
- VernacAddMLPath (true, dir)
-
- (* For acting on parameter tables *)
- | "Set"; table = option_table; v = option_value ->
- VernacSetOption (false, table, v)
- | IDENT "Unset"; table = option_table ->
- VernacUnsetOption (false, table)
-
- | IDENT "Print"; IDENT "Table"; table = option_table ->
- VernacPrintOption table
-
- | IDENT "Add"; table = IDENT; field = IDENT; v = LIST1 option_ref_value
- -> VernacAddOption ([table;field], v)
- (* A global value below will be hidden by a field above! *)
- (* In fact, we give priority to secondary tables *)
- (* No syntax for tertiary tables due to conflict *)
- (* (but they are unused anyway) *)
- | IDENT "Add"; table = IDENT; v = LIST1 option_ref_value ->
- VernacAddOption ([table], v)
-
- | IDENT "Test"; table = option_table; "for"; v = LIST1 option_ref_value
- -> VernacMemOption (table, v)
- | IDENT "Test"; table = option_table ->
- VernacPrintOption table
-
- | IDENT "Remove"; table = IDENT; field = IDENT; v= LIST1 option_ref_value
- -> VernacRemoveOption ([table;field], v)
- | IDENT "Remove"; table = IDENT; v = LIST1 option_ref_value ->
- VernacRemoveOption ([table], v) ]]
- ;
- query_command: (* TODO: rapprocher Eval et Check *)
- [ [ IDENT "Eval"; r = red_expr; "in"; c = lconstr; "." ->
- fun g -> VernacCheckMayEval (Some r, g, c)
- | IDENT "Compute"; c = lconstr; "." ->
- fun g -> VernacCheckMayEval (Some (Genredexpr.CbvVm None), g, c)
- | IDENT "Check"; c = lconstr; "." ->
- fun g -> VernacCheckMayEval (None, g, c)
- (* Searching the environment *)
- | IDENT "About"; qid = smart_global; l = OPT univ_name_list; "." ->
- fun g -> VernacPrint (PrintAbout (qid,l,g))
- | IDENT "SearchHead"; c = constr_pattern; l = in_or_out_modules; "." ->
- fun g -> VernacSearch (SearchHead c,g, l)
- | IDENT "SearchPattern"; c = constr_pattern; l = in_or_out_modules; "." ->
- fun g -> VernacSearch (SearchPattern c,g, l)
- | IDENT "SearchRewrite"; c = constr_pattern; l = in_or_out_modules; "." ->
- fun g -> VernacSearch (SearchRewrite c,g, l)
- | IDENT "Search"; s = searchabout_query; l = searchabout_queries; "." ->
- let (sl,m) = l in fun g -> VernacSearch (SearchAbout (s::sl),g, m)
- (* compatibility: SearchAbout *)
- | IDENT "SearchAbout"; s = searchabout_query; l = searchabout_queries; "." ->
- fun g -> let (sl,m) = l in VernacSearch (SearchAbout (s::sl),g, m)
- (* compatibility: SearchAbout with "[ ... ]" *)
- | IDENT "SearchAbout"; "["; sl = LIST1 searchabout_query; "]";
- l = in_or_out_modules; "." ->
- fun g -> VernacSearch (SearchAbout sl,g, l)
- ] ]
- ;
- printable:
- [ [ IDENT "Term"; qid = smart_global; l = OPT univ_name_list -> PrintName (qid,l)
- | IDENT "All" -> PrintFullContext
- | IDENT "Section"; s = global -> PrintSectionContext s
- | IDENT "Grammar"; ent = IDENT ->
- (* This should be in "syntax" section but is here for factorization*)
- PrintGrammar ent
- | IDENT "LoadPath"; dir = OPT dirpath -> PrintLoadPath dir
- | IDENT "Modules" ->
- user_err Pp.(str "Print Modules is obsolete; use Print Libraries instead")
- | IDENT "Libraries" -> PrintModules
-
- | IDENT "ML"; IDENT "Path" -> PrintMLLoadPath
- | IDENT "ML"; IDENT "Modules" -> PrintMLModules
- | IDENT "Debug"; IDENT "GC" -> PrintDebugGC
- | IDENT "Graph" -> PrintGraph
- | IDENT "Classes" -> PrintClasses
- | IDENT "TypeClasses" -> PrintTypeClasses
- | IDENT "Instances"; qid = smart_global -> PrintInstances qid
- | IDENT "Coercions" -> PrintCoercions
- | IDENT "Coercion"; IDENT "Paths"; s = class_rawexpr; t = class_rawexpr
- -> PrintCoercionPaths (s,t)
- | IDENT "Canonical"; IDENT "Projections" -> PrintCanonicalConversions
- | IDENT "Tables" -> PrintTables
- | IDENT "Options" -> PrintTables (* A Synonymous to Tables *)
- | IDENT "Hint" -> PrintHintGoal
- | IDENT "Hint"; qid = smart_global -> PrintHint qid
- | IDENT "Hint"; "*" -> PrintHintDb
- | IDENT "HintDb"; s = IDENT -> PrintHintDbName s
- | IDENT "Scopes" -> PrintScopes
- | IDENT "Scope"; s = IDENT -> PrintScope s
- | IDENT "Visibility"; s = OPT [x = IDENT -> x ] -> PrintVisibility s
- | IDENT "Implicit"; qid = smart_global -> PrintImplicit qid
- | IDENT "Universes"; fopt = OPT ne_string -> PrintUniverses (false, fopt)
- | IDENT "Sorted"; IDENT "Universes"; fopt = OPT ne_string -> PrintUniverses (true, fopt)
- | IDENT "Assumptions"; qid = smart_global -> PrintAssumptions (false, false, qid)
- | IDENT "Opaque"; IDENT "Dependencies"; qid = smart_global -> PrintAssumptions (true, false, qid)
- | IDENT "Transparent"; IDENT "Dependencies"; qid = smart_global -> PrintAssumptions (false, true, qid)
- | IDENT "All"; IDENT "Dependencies"; qid = smart_global -> PrintAssumptions (true, true, qid)
- | IDENT "Strategy"; qid = smart_global -> PrintStrategy (Some qid)
- | IDENT "Strategies" -> PrintStrategy None ] ]
- ;
- class_rawexpr:
- [ [ IDENT "Funclass" -> FunClass
- | IDENT "Sortclass" -> SortClass
- | qid = smart_global -> RefClass qid ] ]
- ;
- locatable:
- [ [ qid = smart_global -> LocateAny qid
- | IDENT "Term"; qid = smart_global -> LocateTerm qid
- | IDENT "File"; f = ne_string -> LocateFile f
- | IDENT "Library"; qid = global -> LocateLibrary qid
- | IDENT "Module"; qid = global -> LocateModule qid ] ]
- ;
- option_value:
- [ [ -> BoolValue true
- | n = integer -> IntValue (Some n)
- | s = STRING -> StringValue s ] ]
- ;
- option_ref_value:
- [ [ id = global -> QualidRefValue id
- | s = STRING -> StringRefValue s ] ]
- ;
- option_table:
- [ [ fl = LIST1 [ x = IDENT -> x ] -> fl ]]
- ;
- as_dirpath:
- [ [ d = OPT [ "as"; d = dirpath -> d ] -> d ] ]
- ;
- ne_in_or_out_modules:
- [ [ IDENT "inside"; l = LIST1 global -> SearchInside l
- | IDENT "outside"; l = LIST1 global -> SearchOutside l ] ]
- ;
- in_or_out_modules:
- [ [ m = ne_in_or_out_modules -> m
- | -> SearchOutside [] ] ]
- ;
- comment:
- [ [ c = constr -> CommentConstr c
- | s = STRING -> CommentString s
- | n = natural -> CommentInt n ] ]
- ;
- positive_search_mark:
- [ [ "-" -> false | -> true ] ]
- ;
- scope:
- [ [ "%"; key = IDENT -> key ] ]
- ;
- searchabout_query:
- [ [ b = positive_search_mark; s = ne_string; sc = OPT scope ->
- (b, SearchString (s,sc))
- | b = positive_search_mark; p = constr_pattern ->
- (b, SearchSubPattern p)
- ] ]
- ;
- searchabout_queries:
- [ [ m = ne_in_or_out_modules -> ([],m)
- | s = searchabout_query; l = searchabout_queries ->
- let (sl,m) = l in (s::sl,m)
- | -> ([],SearchOutside [])
- ] ]
- ;
- univ_name_list:
- [ [ "@{" ; l = LIST0 name; "}" -> l ] ]
- ;
-END;
-
-GEXTEND Gram
- command:
- [ [
-(* State management *)
- IDENT "Write"; IDENT "State"; s = IDENT -> VernacWriteState s
- | IDENT "Write"; IDENT "State"; s = ne_string -> VernacWriteState s
- | IDENT "Restore"; IDENT "State"; s = IDENT -> VernacRestoreState s
- | IDENT "Restore"; IDENT "State"; s = ne_string -> VernacRestoreState s
-
-(* Resetting *)
- | IDENT "Reset"; IDENT "Initial" -> VernacResetInitial
- | IDENT "Reset"; id = identref -> VernacResetName id
- | IDENT "Back" -> VernacBack 1
- | IDENT "Back"; n = natural -> VernacBack n
- | IDENT "BackTo"; n = natural -> VernacBackTo n
-
-(* Tactic Debugger *)
- | IDENT "Debug"; IDENT "On" ->
- VernacSetOption (false, ["Ltac";"Debug"], BoolValue true)
-
- | IDENT "Debug"; IDENT "Off" ->
- VernacSetOption (false, ["Ltac";"Debug"], BoolValue false)
-
-(* registration of a custom reduction *)
-
- | IDENT "Declare"; IDENT "Reduction"; s = IDENT; ":=";
- r = red_expr ->
- VernacDeclareReduction (s,r)
-
- ] ];
- END
-;;
-
-(* Grammar extensions *)
-
-GEXTEND Gram
- GLOBAL: syntax;
-
- syntax:
- [ [ IDENT "Open"; IDENT "Scope"; sc = IDENT ->
- VernacOpenCloseScope (true,sc)
-
- | IDENT "Close"; IDENT "Scope"; sc = IDENT ->
- VernacOpenCloseScope (false,sc)
-
- | IDENT "Delimit"; IDENT "Scope"; sc = IDENT; "with"; key = IDENT ->
- VernacDelimiters (sc, Some key)
- | IDENT "Undelimit"; IDENT "Scope"; sc = IDENT ->
- VernacDelimiters (sc, None)
-
- | IDENT "Bind"; IDENT "Scope"; sc = IDENT; "with";
- refl = LIST1 class_rawexpr -> VernacBindScope (sc,refl)
-
- | IDENT "Infix"; op = ne_lstring; ":="; p = constr;
- modl = [ "("; l = LIST1 syntax_modifier SEP ","; ")" -> l | -> [] ];
- sc = OPT [ ":"; sc = IDENT -> sc ] ->
- VernacInfix ((op,modl),p,sc)
- | IDENT "Notation"; id = identref;
- idl = LIST0 ident; ":="; c = constr; b = only_parsing ->
- VernacSyntacticDefinition
- (id,(idl,c),b)
- | IDENT "Notation"; s = lstring; ":=";
- c = constr;
- modl = [ "("; l = LIST1 syntax_modifier SEP ","; ")" -> l | -> [] ];
- sc = OPT [ ":"; sc = IDENT -> sc ] ->
- VernacNotation (c,(s,modl),sc)
- | IDENT "Format"; IDENT "Notation"; n = STRING; s = STRING; fmt = STRING ->
- VernacNotationAddFormat (n,s,fmt)
-
- | IDENT "Reserved"; IDENT "Infix"; s = ne_lstring;
- l = [ "("; l = LIST1 syntax_modifier SEP ","; ")" -> l | -> [] ] ->
- let s = CAst.map (fun s -> "x '"^s^"' y") s in
- VernacSyntaxExtension (true,(s,l))
-
- | IDENT "Reserved"; IDENT "Notation";
- s = ne_lstring;
- l = [ "("; l = LIST1 syntax_modifier SEP ","; ")" -> l | -> [] ]
- -> VernacSyntaxExtension (false, (s,l))
-
- (* "Print" "Grammar" should be here but is in "command" entry in order
- to factorize with other "Print"-based vernac entries *)
- ] ]
- ;
- only_parsing:
- [ [ "("; IDENT "only"; IDENT "parsing"; ")" ->
- Some Flags.Current
- | "("; IDENT "compat"; s = STRING; ")" ->
- Some (parse_compat_version s)
- | -> None ] ]
- ;
- level:
- [ [ IDENT "level"; n = natural -> NumLevel n
- | IDENT "next"; IDENT "level" -> NextLevel ] ]
- ;
- syntax_modifier:
- [ [ "at"; IDENT "level"; n = natural -> SetLevel n
- | IDENT "left"; IDENT "associativity" -> SetAssoc LeftA
- | IDENT "right"; IDENT "associativity" -> SetAssoc RightA
- | IDENT "no"; IDENT "associativity" -> SetAssoc NonA
- | IDENT "only"; IDENT "printing" -> SetOnlyPrinting
- | IDENT "only"; IDENT "parsing" -> SetOnlyParsing
- | IDENT "compat"; s = STRING ->
- SetCompatVersion (parse_compat_version s)
- | IDENT "format"; s1 = [s = STRING -> CAst.make ~loc:!@loc s];
- s2 = OPT [s = STRING -> CAst.make ~loc:!@loc s] ->
- begin match s1, s2 with
- | { CAst.v = k }, Some s -> SetFormat(k,s)
- | s, None -> SetFormat ("text",s) end
- | x = IDENT; ","; l = LIST1 [id = IDENT -> id ] SEP ","; "at";
- lev = level -> SetItemLevel (x::l,lev)
- | x = IDENT; "at"; lev = level -> SetItemLevel ([x],lev)
- | x = IDENT; "at"; lev = level; b = constr_as_binder_kind -> SetItemLevelAsBinder ([x],b,Some lev)
- | x = IDENT; b = constr_as_binder_kind -> SetItemLevelAsBinder ([x],b,None)
- | x = IDENT; typ = syntax_extension_type -> SetEntryType (x,typ)
- ] ]
- ;
- syntax_extension_type:
- [ [ IDENT "ident" -> ETName | IDENT "global" -> ETReference
- | IDENT "bigint" -> ETBigint
- | IDENT "binder" -> ETBinder true
- | IDENT "constr"; n = OPT at_level; b = constr_as_binder_kind -> ETConstrAsBinder (b,n)
- | IDENT "pattern" -> ETPattern (false,None)
- | IDENT "pattern"; "at"; IDENT "level"; n = natural -> ETPattern (false,Some n)
- | IDENT "strict"; IDENT "pattern" -> ETPattern (true,None)
- | IDENT "strict"; IDENT "pattern"; "at"; IDENT "level"; n = natural -> ETPattern (true,Some n)
- | IDENT "closed"; IDENT "binder" -> ETBinder false
- ] ]
- ;
- at_level:
- [ [ "at"; n = level -> n ] ]
- ;
- constr_as_binder_kind:
- [ [ "as"; IDENT "ident" -> Notation_term.AsIdent
- | "as"; IDENT "pattern" -> Notation_term.AsIdentOrPattern
- | "as"; IDENT "strict"; IDENT "pattern" -> Notation_term.AsStrictPattern ] ]
- ;
-END
diff --git a/vernac/g_vernac.mlg b/vernac/g_vernac.mlg
new file mode 100644
index 000000000..3a01ce6df
--- /dev/null
+++ b/vernac/g_vernac.mlg
@@ -0,0 +1,1173 @@
+(************************************************************************)
+(* * The Coq Proof Assistant / The Coq Development Team *)
+(* v * INRIA, CNRS and contributors - Copyright 1999-2018 *)
+(* <O___,, * (see CREDITS file for the list of authors) *)
+(* \VV/ **************************************************************)
+(* // * This file is distributed under the terms of the *)
+(* * GNU Lesser General Public License Version 2.1 *)
+(* * (see LICENSE file for the text of the license) *)
+(************************************************************************)
+
+{
+
+open Pp
+open CErrors
+open Util
+open Names
+open Glob_term
+open Vernacexpr
+open Constrexpr
+open Constrexpr_ops
+open Extend
+open Decl_kinds
+open Declaremods
+open Declarations
+open Namegen
+open Tok (* necessary for camlp5 *)
+
+open Pcoq
+open Pcoq.Prim
+open Pcoq.Constr
+open Pcoq.Module
+open Pvernac.Vernac_
+
+let vernac_kw = [ ";"; ","; ">->"; ":<"; "<:"; "where"; "at" ]
+let _ = List.iter CLexer.add_keyword vernac_kw
+
+(* Rem: do not join the different GEXTEND into one, it breaks native *)
+(* compilation on PowerPC and Sun architectures *)
+
+let query_command = Gram.entry_create "vernac:query_command"
+
+let subprf = Gram.entry_create "vernac:subprf"
+
+let class_rawexpr = Gram.entry_create "vernac:class_rawexpr"
+let thm_token = Gram.entry_create "vernac:thm_token"
+let def_body = Gram.entry_create "vernac:def_body"
+let decl_notation = Gram.entry_create "vernac:decl_notation"
+let record_field = Gram.entry_create "vernac:record_field"
+let of_type_with_opt_coercion = Gram.entry_create "vernac:of_type_with_opt_coercion"
+let instance_name = Gram.entry_create "vernac:instance_name"
+let section_subset_expr = Gram.entry_create "vernac:section_subset_expr"
+
+let make_bullet s =
+ let open Proof_bullet in
+ let n = String.length s in
+ match s.[0] with
+ | '-' -> Dash n
+ | '+' -> Plus n
+ | '*' -> Star n
+ | _ -> assert false
+
+let parse_compat_version ?(allow_old = true) = let open Flags in function
+ | "8.8" -> Current
+ | "8.7" -> V8_7
+ | "8.6" -> V8_6
+ | ("8.5" | "8.4" | "8.3" | "8.2" | "8.1" | "8.0") as s ->
+ CErrors.user_err ~hdr:"get_compat_version"
+ Pp.(str "Compatibility with version " ++ str s ++ str " not supported.")
+ | s ->
+ CErrors.user_err ~hdr:"get_compat_version"
+ Pp.(str "Unknown compatibility version \"" ++ str s ++ str "\".")
+
+}
+
+GRAMMAR EXTEND Gram
+ GLOBAL: vernac_control gallina_ext noedit_mode subprf;
+ vernac_control: FIRST
+ [ [ IDENT "Time"; c = located_vernac -> { VernacTime (false,c) }
+ | IDENT "Redirect"; s = ne_string; c = located_vernac -> { VernacRedirect (s, c) }
+ | IDENT "Timeout"; n = natural; v = vernac_control -> { VernacTimeout(n,v) }
+ | IDENT "Fail"; v = vernac_control -> { VernacFail v }
+ | v = vernac -> { let (f, v) = v in VernacExpr(f, v) } ]
+ ]
+ ;
+ vernac:
+ [ [ IDENT "Local"; v = vernac_poly -> { let (f, v) = v in (VernacLocal true :: f, v) }
+ | IDENT "Global"; v = vernac_poly -> { let (f, v) = v in (VernacLocal false :: f, v) }
+
+ | v = vernac_poly -> { v } ]
+ ]
+ ;
+ vernac_poly:
+ [ [ IDENT "Polymorphic"; v = vernac_aux -> { let (f, v) = v in (VernacPolymorphic true :: f, v) }
+ | IDENT "Monomorphic"; v = vernac_aux -> { let (f, v) = v in (VernacPolymorphic false :: f, v) }
+ | v = vernac_aux -> { v } ]
+ ]
+ ;
+ vernac_aux:
+ (* Better to parse "." here: in case of failure (e.g. in coerce_to_var), *)
+ (* "." is still in the stream and discard_to_dot works correctly *)
+ [ [ IDENT "Program"; g = gallina; "." -> { ([VernacProgram], g) }
+ | IDENT "Program"; g = gallina_ext; "." -> { ([VernacProgram], g) }
+ | g = gallina; "." -> { ([], g) }
+ | g = gallina_ext; "." -> { ([], g) }
+ | c = command; "." -> { ([], c) }
+ | c = syntax; "." -> { ([], c) }
+ | c = subprf -> { ([], c) }
+ ] ]
+ ;
+ vernac_aux: LAST
+ [ [ prfcom = command_entry -> { ([], prfcom) } ] ]
+ ;
+ noedit_mode:
+ [ [ c = query_command -> { c None } ] ]
+ ;
+
+ subprf:
+ [ [ s = BULLET -> { VernacBullet (make_bullet s) }
+ | "{" -> { VernacSubproof None }
+ | "}" -> { VernacEndSubproof }
+ ] ]
+ ;
+
+ located_vernac:
+ [ [ v = vernac_control -> { CAst.make ~loc v } ] ]
+ ;
+END
+
+{
+
+let warn_plural_command =
+ CWarnings.create ~name:"plural-command" ~category:"pedantic" ~default:CWarnings.Disabled
+ (fun kwd -> strbrk (Printf.sprintf "Command \"%s\" expects more than one assumption." kwd))
+
+let test_plural_form loc kwd = function
+ | [(_,([_],_))] ->
+ warn_plural_command ~loc kwd
+ | _ -> ()
+
+let test_plural_form_types loc kwd = function
+ | [([_],_)] ->
+ warn_plural_command ~loc kwd
+ | _ -> ()
+
+let lname_of_lident : lident -> lname =
+ CAst.map (fun s -> Name s)
+
+let name_of_ident_decl : ident_decl -> name_decl =
+ on_fst lname_of_lident
+
+}
+
+(* Gallina declarations *)
+GRAMMAR EXTEND Gram
+ GLOBAL: gallina gallina_ext thm_token def_body of_type_with_opt_coercion
+ record_field decl_notation rec_definition ident_decl univ_decl;
+
+ gallina:
+ (* Definition, Theorem, Variable, Axiom, ... *)
+ [ [ thm = thm_token; id = ident_decl; bl = binders; ":"; c = lconstr;
+ l = LIST0
+ [ "with"; id = ident_decl; bl = binders; ":"; c = lconstr ->
+ { (id,(bl,c)) } ] ->
+ { VernacStartTheoremProof (thm, (id,(bl,c))::l) }
+ | stre = assumption_token; nl = inline; bl = assum_list ->
+ { VernacAssumption (stre, nl, bl) }
+ | tk = assumptions_token; nl = inline; bl = assum_list ->
+ { let (kwd,stre) = tk in
+ test_plural_form loc kwd bl;
+ VernacAssumption (stre, nl, bl) }
+ | d = def_token; id = ident_decl; b = def_body ->
+ { VernacDefinition (d, name_of_ident_decl id, b) }
+ | IDENT "Let"; id = identref; b = def_body ->
+ { VernacDefinition ((DoDischarge, Let), (lname_of_lident id, None), b) }
+ (* Gallina inductive declarations *)
+ | cum = OPT cumulativity_token; priv = private_token; f = finite_token;
+ indl = LIST1 inductive_definition SEP "with" ->
+ { let (k,f) = f in
+ let indl=List.map (fun ((a,b,c,d),e) -> ((a,b,c,k,d),e)) indl in
+ VernacInductive (cum, priv,f,indl) }
+ | "Fixpoint"; recs = LIST1 rec_definition SEP "with" ->
+ { VernacFixpoint (NoDischarge, recs) }
+ | IDENT "Let"; "Fixpoint"; recs = LIST1 rec_definition SEP "with" ->
+ { VernacFixpoint (DoDischarge, recs) }
+ | "CoFixpoint"; corecs = LIST1 corec_definition SEP "with" ->
+ { VernacCoFixpoint (NoDischarge, corecs) }
+ | IDENT "Let"; "CoFixpoint"; corecs = LIST1 corec_definition SEP "with" ->
+ { VernacCoFixpoint (DoDischarge, corecs) }
+ | IDENT "Scheme"; l = LIST1 scheme SEP "with" -> { VernacScheme l }
+ | IDENT "Combined"; IDENT "Scheme"; id = identref; IDENT "from";
+ l = LIST1 identref SEP "," -> { VernacCombinedScheme (id, l) }
+ | IDENT "Register"; IDENT "Inline"; id = identref ->
+ { VernacRegister(id, RegisterInline) }
+ | IDENT "Universe"; l = LIST1 identref -> { VernacUniverse l }
+ | IDENT "Universes"; l = LIST1 identref -> { VernacUniverse l }
+ | IDENT "Constraint"; l = LIST1 univ_constraint SEP "," -> { VernacConstraint l }
+ ] ]
+ ;
+
+ thm_token:
+ [ [ "Theorem" -> { Theorem }
+ | IDENT "Lemma" -> { Lemma }
+ | IDENT "Fact" -> { Fact }
+ | IDENT "Remark" -> { Remark }
+ | IDENT "Corollary" -> { Corollary }
+ | IDENT "Proposition" -> { Proposition }
+ | IDENT "Property" -> { Property } ] ]
+ ;
+ def_token:
+ [ [ "Definition" -> { (NoDischarge,Definition) }
+ | IDENT "Example" -> { (NoDischarge,Example) }
+ | IDENT "SubClass" -> { (NoDischarge,SubClass) } ] ]
+ ;
+ assumption_token:
+ [ [ "Hypothesis" -> { (DoDischarge, Logical) }
+ | "Variable" -> { (DoDischarge, Definitional) }
+ | "Axiom" -> { (NoDischarge, Logical) }
+ | "Parameter" -> { (NoDischarge, Definitional) }
+ | IDENT "Conjecture" -> { (NoDischarge, Conjectural) } ] ]
+ ;
+ assumptions_token:
+ [ [ IDENT "Hypotheses" -> { ("Hypotheses", (DoDischarge, Logical)) }
+ | IDENT "Variables" -> { ("Variables", (DoDischarge, Definitional)) }
+ | IDENT "Axioms" -> { ("Axioms", (NoDischarge, Logical)) }
+ | IDENT "Parameters" -> { ("Parameters", (NoDischarge, Definitional)) }
+ | IDENT "Conjectures" -> { ("Conjectures", (NoDischarge, Conjectural)) } ] ]
+ ;
+ inline:
+ [ [ IDENT "Inline"; "("; i = INT; ")" -> { InlineAt (int_of_string i) }
+ | IDENT "Inline" -> { DefaultInline }
+ | -> { NoInline } ] ]
+ ;
+ univ_constraint:
+ [ [ l = universe_level; ord = [ "<" -> { Univ.Lt } | "=" -> { Univ.Eq } | "<=" -> { Univ.Le } ];
+ r = universe_level -> { (l, ord, r) } ] ]
+ ;
+ univ_decl :
+ [ [ "@{" ; l = LIST0 identref; ext = [ "+" -> { true } | -> { false } ];
+ cs = [ "|"; l' = LIST0 univ_constraint SEP ",";
+ ext = [ "+" -> { true } | -> { false } ]; "}" -> { (l',ext) }
+ | ext = [ "}" -> { true } | "|}" -> { false } ] -> { ([], ext) } ]
+ ->
+ { let open UState in
+ { univdecl_instance = l;
+ univdecl_extensible_instance = ext;
+ univdecl_constraints = fst cs;
+ univdecl_extensible_constraints = snd cs } }
+ ] ]
+ ;
+ ident_decl:
+ [ [ i = identref; l = OPT univ_decl -> { (i, l) }
+ ] ]
+ ;
+ finite_token:
+ [ [ IDENT "Inductive" -> { (Inductive_kw,Finite) }
+ | IDENT "CoInductive" -> { (CoInductive,CoFinite) }
+ | IDENT "Variant" -> { (Variant,BiFinite) }
+ | IDENT "Record" -> { (Record,BiFinite) }
+ | IDENT "Structure" -> { (Structure,BiFinite) }
+ | IDENT "Class" -> { (Class true,BiFinite) } ] ]
+ ;
+ cumulativity_token:
+ [ [ IDENT "Cumulative" -> { VernacCumulative }
+ | IDENT "NonCumulative" -> { VernacNonCumulative } ] ]
+ ;
+ private_token:
+ [ [ IDENT "Private" -> { true } | -> { false } ] ]
+ ;
+ (* Simple definitions *)
+ def_body:
+ [ [ bl = binders; ":="; red = reduce; c = lconstr ->
+ { if List.exists (function CLocalPattern _ -> true | _ -> false) bl
+ then
+ (* FIXME: "red" will be applied to types in bl and Cast with remain *)
+ let c = mkCLambdaN ~loc bl c in
+ DefineBody ([], red, c, None)
+ else
+ (match c with
+ | { CAst.v = CCast(c, CastConv t) } -> DefineBody (bl, red, c, Some t)
+ | _ -> DefineBody (bl, red, c, None)) }
+ | bl = binders; ":"; t = lconstr; ":="; red = reduce; c = lconstr ->
+ { let ((bl, c), tyo) =
+ if List.exists (function CLocalPattern _ -> true | _ -> false) bl
+ then
+ (* FIXME: "red" will be applied to types in bl and Cast with remain *)
+ let c = CAst.make ~loc @@ CCast (c, CastConv t) in
+ (([],mkCLambdaN ~loc bl c), None)
+ else ((bl, c), Some t)
+ in
+ DefineBody (bl, red, c, tyo) }
+ | bl = binders; ":"; t = lconstr ->
+ { ProveBody (bl, t) } ] ]
+ ;
+ reduce:
+ [ [ IDENT "Eval"; r = red_expr; "in" -> { Some r }
+ | -> { None } ] ]
+ ;
+ one_decl_notation:
+ [ [ ntn = ne_lstring; ":="; c = constr;
+ scopt = OPT [ ":"; sc = IDENT -> { sc } ] -> { (ntn,c,scopt) } ] ]
+ ;
+ decl_sep:
+ [ [ IDENT "and" -> { () } ] ]
+ ;
+ decl_notation:
+ [ [ "where"; l = LIST1 one_decl_notation SEP decl_sep -> { l }
+ | -> { [] } ] ]
+ ;
+ (* Inductives and records *)
+ opt_constructors_or_fields:
+ [ [ ":="; lc = constructor_list_or_record_decl -> { lc }
+ | -> { RecordDecl (None, []) } ] ]
+ ;
+ inductive_definition:
+ [ [ oc = opt_coercion; id = ident_decl; indpar = binders;
+ c = OPT [ ":"; c = lconstr -> { c } ];
+ lc=opt_constructors_or_fields; ntn = decl_notation ->
+ { (((oc,id),indpar,c,lc),ntn) } ] ]
+ ;
+ constructor_list_or_record_decl:
+ [ [ "|"; l = LIST1 constructor SEP "|" -> { Constructors l }
+ | id = identref ; c = constructor_type; "|"; l = LIST0 constructor SEP "|" ->
+ { Constructors ((c id)::l) }
+ | id = identref ; c = constructor_type -> { Constructors [ c id ] }
+ | cstr = identref; "{"; fs = record_fields; "}" ->
+ { RecordDecl (Some cstr,fs) }
+ | "{";fs = record_fields; "}" -> { RecordDecl (None,fs) }
+ | -> { Constructors [] } ] ]
+ ;
+(*
+ csort:
+ [ [ s = sort -> CSort (loc,s) ] ]
+ ;
+*)
+ opt_coercion:
+ [ [ ">" -> { true }
+ | -> { false } ] ]
+ ;
+ (* (co)-fixpoints *)
+ rec_definition:
+ [ [ id = ident_decl;
+ bl = binders_fixannot;
+ ty = type_cstr;
+ def = OPT [":="; def = lconstr -> { def } ]; ntn = decl_notation ->
+ { let bl, annot = bl in ((id,annot,bl,ty,def),ntn) } ] ]
+ ;
+ corec_definition:
+ [ [ id = ident_decl; bl = binders; ty = type_cstr;
+ def = OPT [":="; def = lconstr -> { def }]; ntn = decl_notation ->
+ { ((id,bl,ty,def),ntn) } ] ]
+ ;
+ type_cstr:
+ [ [ ":"; c=lconstr -> { c }
+ | -> { CAst.make ~loc @@ CHole (None, IntroAnonymous, None) } ] ]
+ ;
+ (* Inductive schemes *)
+ scheme:
+ [ [ kind = scheme_kind -> { (None,kind) }
+ | id = identref; ":="; kind = scheme_kind -> { (Some id,kind) } ] ]
+ ;
+ scheme_kind:
+ [ [ IDENT "Induction"; "for"; ind = smart_global;
+ IDENT "Sort"; s = sort_family-> { InductionScheme(true,ind,s) }
+ | IDENT "Minimality"; "for"; ind = smart_global;
+ IDENT "Sort"; s = sort_family-> { InductionScheme(false,ind,s) }
+ | IDENT "Elimination"; "for"; ind = smart_global;
+ IDENT "Sort"; s = sort_family-> { CaseScheme(true,ind,s) }
+ | IDENT "Case"; "for"; ind = smart_global;
+ IDENT "Sort"; s = sort_family-> { CaseScheme(false,ind,s) }
+ | IDENT "Equality"; "for" ; ind = smart_global -> { EqualityScheme(ind) } ] ]
+ ;
+ (* Various Binders *)
+(*
+ (* ... without coercions *)
+ binder_nodef:
+ [ [ b = binder_let ->
+ (match b with
+ CLocalAssum(l,ty) -> (l,ty)
+ | CLocalDef _ ->
+ Util.user_err_loc
+ (loc,"fix_param",Pp.str"defined binder not allowed here.")) ] ]
+ ;
+*)
+ (* ... with coercions *)
+ record_field:
+ [ [ bd = record_binder; pri = OPT [ "|"; n = natural -> { n } ];
+ ntn = decl_notation -> { (bd,pri),ntn } ] ]
+ ;
+ record_fields:
+ [ [ f = record_field; ";"; fs = record_fields -> { f :: fs }
+ | f = record_field; ";" -> { [f] }
+ | f = record_field -> { [f] }
+ | -> { [] }
+ ] ]
+ ;
+ record_binder_body:
+ [ [ l = binders; oc = of_type_with_opt_coercion;
+ t = lconstr -> { fun id -> (oc,AssumExpr (id,mkCProdN ~loc l t)) }
+ | l = binders; oc = of_type_with_opt_coercion;
+ t = lconstr; ":="; b = lconstr -> { fun id ->
+ (oc,DefExpr (id,mkCLambdaN ~loc l b,Some (mkCProdN ~loc l t))) }
+ | l = binders; ":="; b = lconstr -> { fun id ->
+ match b.CAst.v with
+ | CCast(b', (CastConv t|CastVM t|CastNative t)) ->
+ (None,DefExpr(id,mkCLambdaN ~loc l b',Some (mkCProdN ~loc l t)))
+ | _ ->
+ (None,DefExpr(id,mkCLambdaN ~loc l b,None)) } ] ]
+ ;
+ record_binder:
+ [ [ id = name -> { (None,AssumExpr(id, CAst.make ~loc @@ CHole (None, IntroAnonymous, None))) }
+ | id = name; f = record_binder_body -> { f id } ] ]
+ ;
+ assum_list:
+ [ [ bl = LIST1 assum_coe -> { bl } | b = simple_assum_coe -> { [b] } ] ]
+ ;
+ assum_coe:
+ [ [ "("; a = simple_assum_coe; ")" -> { a } ] ]
+ ;
+ simple_assum_coe:
+ [ [ idl = LIST1 ident_decl; oc = of_type_with_opt_coercion; c = lconstr ->
+ { (not (Option.is_empty oc),(idl,c)) } ] ]
+ ;
+
+ constructor_type:
+ [[ l = binders;
+ t= [ coe = of_type_with_opt_coercion; c = lconstr ->
+ { fun l id -> (not (Option.is_empty coe),(id,mkCProdN ~loc l c)) }
+ | ->
+ { fun l id -> (false,(id,mkCProdN ~loc l (CAst.make ~loc @@ CHole (None, IntroAnonymous, None)))) } ]
+ -> { t l }
+ ]]
+;
+
+ constructor:
+ [ [ id = identref; c=constructor_type -> { c id } ] ]
+ ;
+ of_type_with_opt_coercion:
+ [ [ ":>>" -> { Some false }
+ | ":>"; ">" -> { Some false }
+ | ":>" -> { Some true }
+ | ":"; ">"; ">" -> { Some false }
+ | ":"; ">" -> { Some true }
+ | ":" -> { None } ] ]
+ ;
+END
+
+{
+
+let only_starredidentrefs =
+ Gram.Entry.of_parser "test_only_starredidentrefs"
+ (fun strm ->
+ let rec aux n =
+ match Util.stream_nth n strm with
+ | KEYWORD "." -> ()
+ | KEYWORD ")" -> ()
+ | (IDENT _ | KEYWORD "Type" | KEYWORD "*") -> aux (n+1)
+ | _ -> raise Stream.Failure in
+ aux 0)
+let starredidentreflist_to_expr l =
+ match l with
+ | [] -> SsEmpty
+ | x :: xs -> List.fold_right (fun i acc -> SsUnion(i,acc)) xs x
+
+let warn_deprecated_include_type =
+ CWarnings.create ~name:"deprecated-include-type" ~category:"deprecated"
+ (fun () -> strbrk "Include Type is deprecated; use Include instead")
+
+}
+
+(* Modules and Sections *)
+GRAMMAR EXTEND Gram
+ GLOBAL: gallina_ext module_expr module_type section_subset_expr;
+
+ gallina_ext:
+ [ [ (* Interactive module declaration *)
+ IDENT "Module"; export = export_token; id = identref;
+ bl = LIST0 module_binder; sign = of_module_type;
+ body = is_module_expr ->
+ { VernacDefineModule (export, id, bl, sign, body) }
+ | IDENT "Module"; "Type"; id = identref;
+ bl = LIST0 module_binder; sign = check_module_types;
+ body = is_module_type ->
+ { VernacDeclareModuleType (id, bl, sign, body) }
+ | IDENT "Declare"; IDENT "Module"; export = export_token; id = identref;
+ bl = LIST0 module_binder; ":"; mty = module_type_inl ->
+ { VernacDeclareModule (export, id, bl, mty) }
+ (* Section beginning *)
+ | IDENT "Section"; id = identref -> { VernacBeginSection id }
+ | IDENT "Chapter"; id = identref -> { VernacBeginSection id }
+
+ (* This end a Section a Module or a Module Type *)
+ | IDENT "End"; id = identref -> { VernacEndSegment id }
+
+ (* Naming a set of section hyps *)
+ | IDENT "Collection"; id = identref; ":="; expr = section_subset_expr ->
+ { VernacNameSectionHypSet (id, expr) }
+
+ (* Requiring an already compiled module *)
+ | IDENT "Require"; export = export_token; qidl = LIST1 global ->
+ { VernacRequire (None, export, qidl) }
+ | IDENT "From" ; ns = global ; IDENT "Require"; export = export_token
+ ; qidl = LIST1 global ->
+ { VernacRequire (Some ns, export, qidl) }
+ | IDENT "Import"; qidl = LIST1 global -> { VernacImport (false,qidl) }
+ | IDENT "Export"; qidl = LIST1 global -> { VernacImport (true,qidl) }
+ | IDENT "Include"; e = module_type_inl; l = LIST0 ext_module_expr ->
+ { VernacInclude(e::l) }
+ | IDENT "Include"; "Type"; e = module_type_inl; l = LIST0 ext_module_type ->
+ { warn_deprecated_include_type ~loc ();
+ VernacInclude(e::l) } ] ]
+ ;
+ export_token:
+ [ [ IDENT "Import" -> { Some false }
+ | IDENT "Export" -> { Some true }
+ | -> { None } ] ]
+ ;
+ ext_module_type:
+ [ [ "<+"; mty = module_type_inl -> { mty } ] ]
+ ;
+ ext_module_expr:
+ [ [ "<+"; mexpr = module_expr_inl -> { mexpr } ] ]
+ ;
+ check_module_type:
+ [ [ "<:"; mty = module_type_inl -> { mty } ] ]
+ ;
+ check_module_types:
+ [ [ mtys = LIST0 check_module_type -> { mtys } ] ]
+ ;
+ of_module_type:
+ [ [ ":"; mty = module_type_inl -> { Enforce mty }
+ | mtys = check_module_types -> { Check mtys } ] ]
+ ;
+ is_module_type:
+ [ [ ":="; mty = module_type_inl ; l = LIST0 ext_module_type -> { (mty::l) }
+ | -> { [] } ] ]
+ ;
+ is_module_expr:
+ [ [ ":="; mexpr = module_expr_inl; l = LIST0 ext_module_expr -> { (mexpr::l) }
+ | -> { [] } ] ]
+ ;
+ functor_app_annot:
+ [ [ "["; IDENT "inline"; "at"; IDENT "level"; i = INT; "]" ->
+ { InlineAt (int_of_string i) }
+ | "["; IDENT "no"; IDENT "inline"; "]" -> { NoInline }
+ | -> { DefaultInline }
+ ] ]
+ ;
+ module_expr_inl:
+ [ [ "!"; me = module_expr -> { (me,NoInline) }
+ | me = module_expr; a = functor_app_annot -> { (me,a) } ] ]
+ ;
+ module_type_inl:
+ [ [ "!"; me = module_type -> { (me,NoInline) }
+ | me = module_type; a = functor_app_annot -> { (me,a) } ] ]
+ ;
+ (* Module binder *)
+ module_binder:
+ [ [ "("; export = export_token; idl = LIST1 identref; ":";
+ mty = module_type_inl; ")" -> { (export,idl,mty) } ] ]
+ ;
+ (* Module expressions *)
+ module_expr:
+ [ [ me = module_expr_atom -> { me }
+ | me1 = module_expr; me2 = module_expr_atom -> { CAst.make ~loc @@ CMapply (me1,me2) }
+ ] ]
+ ;
+ module_expr_atom:
+ [ [ qid = qualid -> { CAst.make ~loc @@ CMident qid } | "("; me = module_expr; ")" -> { me } ] ]
+ ;
+ with_declaration:
+ [ [ "Definition"; fqid = fullyqualid; udecl = OPT univ_decl; ":="; c = Constr.lconstr ->
+ { CWith_Definition (fqid,udecl,c) }
+ | IDENT "Module"; fqid = fullyqualid; ":="; qid = qualid ->
+ { CWith_Module (fqid,qid) }
+ ] ]
+ ;
+ module_type:
+ [ [ qid = qualid -> { CAst.make ~loc @@ CMident qid }
+ | "("; mt = module_type; ")" -> { mt }
+ | mty = module_type; me = module_expr_atom ->
+ { CAst.make ~loc @@ CMapply (mty,me) }
+ | mty = module_type; "with"; decl = with_declaration ->
+ { CAst.make ~loc @@ CMwith (mty,decl) }
+ ] ]
+ ;
+ (* Proof using *)
+ section_subset_expr:
+ [ [ only_starredidentrefs; l = LIST0 starredidentref ->
+ { starredidentreflist_to_expr l }
+ | e = ssexpr -> { e } ]]
+ ;
+ starredidentref:
+ [ [ i = identref -> { SsSingl i }
+ | i = identref; "*" -> { SsFwdClose(SsSingl i) }
+ | "Type" -> { SsType }
+ | "Type"; "*" -> { SsFwdClose SsType } ]]
+ ;
+ ssexpr:
+ [ "35"
+ [ "-"; e = ssexpr -> { SsCompl e } ]
+ | "50"
+ [ e1 = ssexpr; "-"; e2 = ssexpr-> { SsSubstr(e1,e2) }
+ | e1 = ssexpr; "+"; e2 = ssexpr-> { SsUnion(e1,e2) } ]
+ | "0"
+ [ i = starredidentref -> { i }
+ | "("; only_starredidentrefs; l = LIST0 starredidentref; ")"->
+ { starredidentreflist_to_expr l }
+ | "("; only_starredidentrefs; l = LIST0 starredidentref; ")"; "*" ->
+ { SsFwdClose(starredidentreflist_to_expr l) }
+ | "("; e = ssexpr; ")"-> { e }
+ | "("; e = ssexpr; ")"; "*" -> { SsFwdClose e } ] ]
+ ;
+END
+
+(* Extensions: implicits, coercions, etc. *)
+GRAMMAR EXTEND Gram
+ GLOBAL: gallina_ext instance_name hint_info;
+
+ gallina_ext:
+ [ [ (* Transparent and Opaque *)
+ IDENT "Transparent"; l = LIST1 smart_global ->
+ { VernacSetOpacity (Conv_oracle.transparent, l) }
+ | IDENT "Opaque"; l = LIST1 smart_global ->
+ { VernacSetOpacity (Conv_oracle.Opaque, l) }
+ | IDENT "Strategy"; l =
+ LIST1 [ v=strategy_level; "["; q=LIST1 smart_global; "]" -> { (v,q) } ] ->
+ { VernacSetStrategy l }
+ (* Canonical structure *)
+ | IDENT "Canonical"; IDENT "Structure"; qid = global ->
+ { VernacCanonical CAst.(make ~loc @@ AN qid) }
+ | IDENT "Canonical"; IDENT "Structure"; ntn = by_notation ->
+ { VernacCanonical CAst.(make ~loc @@ ByNotation ntn) }
+ | IDENT "Canonical"; IDENT "Structure"; qid = global; d = def_body ->
+ { let s = coerce_reference_to_id qid in
+ VernacDefinition ((NoDischarge,CanonicalStructure),((CAst.make (Name s)),None),d) }
+
+ (* Coercions *)
+ | IDENT "Coercion"; qid = global; d = def_body ->
+ { let s = coerce_reference_to_id qid in
+ VernacDefinition ((NoDischarge,Coercion),((CAst.make (Name s)),None),d) }
+ | IDENT "Identity"; IDENT "Coercion"; f = identref; ":";
+ s = class_rawexpr; ">->"; t = class_rawexpr ->
+ { VernacIdentityCoercion (f, s, t) }
+ | IDENT "Coercion"; qid = global; ":"; s = class_rawexpr; ">->";
+ t = class_rawexpr ->
+ { VernacCoercion (CAst.make ~loc @@ AN qid, s, t) }
+ | IDENT "Coercion"; ntn = by_notation; ":"; s = class_rawexpr; ">->";
+ t = class_rawexpr ->
+ { VernacCoercion (CAst.make ~loc @@ ByNotation ntn, s, t) }
+
+ | IDENT "Context"; c = LIST1 binder ->
+ { VernacContext (List.flatten c) }
+
+ | IDENT "Instance"; namesup = instance_name; ":";
+ expl = [ "!" -> { Decl_kinds.Implicit } | -> { Decl_kinds.Explicit } ] ; t = operconstr LEVEL "200";
+ info = hint_info ;
+ props = [ ":="; "{"; r = record_declaration; "}" -> { Some (true,r) } |
+ ":="; c = lconstr -> { Some (false,c) } | -> { None } ] ->
+ { VernacInstance (false,snd namesup,(fst namesup,expl,t),props,info) }
+
+ | IDENT "Existing"; IDENT "Instance"; id = global;
+ info = hint_info ->
+ { VernacDeclareInstances [id, info] }
+
+ | IDENT "Existing"; IDENT "Instances"; ids = LIST1 global;
+ pri = OPT [ "|"; i = natural -> { i } ] ->
+ { let info = { Typeclasses.hint_priority = pri; hint_pattern = None } in
+ let insts = List.map (fun i -> (i, info)) ids in
+ VernacDeclareInstances insts }
+
+ | IDENT "Existing"; IDENT "Class"; is = global -> { VernacDeclareClass is }
+
+ (* Arguments *)
+ | IDENT "Arguments"; qid = smart_global;
+ args = LIST0 argument_spec_block;
+ more_implicits = OPT
+ [ ","; impl = LIST1
+ [ impl = LIST0 more_implicits_block -> { List.flatten impl } ]
+ SEP "," -> { impl }
+ ];
+ mods = OPT [ ":"; l = LIST1 arguments_modifier SEP "," -> { l } ] ->
+ { let mods = match mods with None -> [] | Some l -> List.flatten l in
+ let slash_position = ref None in
+ let rec parse_args i = function
+ | [] -> []
+ | `Id x :: args -> x :: parse_args (i+1) args
+ | `Slash :: args ->
+ if Option.is_empty !slash_position then
+ (slash_position := Some i; parse_args i args)
+ else
+ user_err Pp.(str "The \"/\" modifier can occur only once")
+ in
+ let args = parse_args 0 (List.flatten args) in
+ let more_implicits = Option.default [] more_implicits in
+ VernacArguments (qid, args, more_implicits, !slash_position, mods) }
+
+ | IDENT "Implicit"; "Type"; bl = reserv_list ->
+ { VernacReserve bl }
+
+ | IDENT "Implicit"; IDENT "Types"; bl = reserv_list ->
+ { test_plural_form_types loc "Implicit Types" bl;
+ VernacReserve bl }
+
+ | IDENT "Generalizable";
+ gen = [IDENT "All"; IDENT "Variables" -> { Some [] }
+ | IDENT "No"; IDENT "Variables" -> { None }
+ | ["Variable" -> { () } | IDENT "Variables" -> { () } ];
+ idl = LIST1 identref -> { Some idl } ] ->
+ { VernacGeneralizable gen } ] ]
+ ;
+ arguments_modifier:
+ [ [ IDENT "simpl"; IDENT "nomatch" -> { [`ReductionDontExposeCase] }
+ | IDENT "simpl"; IDENT "never" -> { [`ReductionNeverUnfold] }
+ | IDENT "default"; IDENT "implicits" -> { [`DefaultImplicits] }
+ | IDENT "clear"; IDENT "implicits" -> { [`ClearImplicits] }
+ | IDENT "clear"; IDENT "scopes" -> { [`ClearScopes] }
+ | IDENT "rename" -> { [`Rename] }
+ | IDENT "assert" -> { [`Assert] }
+ | IDENT "extra"; IDENT "scopes" -> { [`ExtraScopes] }
+ | IDENT "clear"; IDENT "scopes"; IDENT "and"; IDENT "implicits" ->
+ { [`ClearImplicits; `ClearScopes] }
+ | IDENT "clear"; IDENT "implicits"; IDENT "and"; IDENT "scopes" ->
+ { [`ClearImplicits; `ClearScopes] }
+ ] ]
+ ;
+ scope:
+ [ [ "%"; key = IDENT -> { key } ] ]
+ ;
+ argument_spec: [
+ [ b = OPT "!"; id = name ; s = OPT scope ->
+ { id.CAst.v, not (Option.is_empty b), Option.map (fun x -> CAst.make ~loc x) s }
+ ]
+ ];
+ (* List of arguments implicit status, scope, modifiers *)
+ argument_spec_block: [
+ [ item = argument_spec ->
+ { let name, recarg_like, notation_scope = item in
+ [`Id { name=name; recarg_like=recarg_like;
+ notation_scope=notation_scope;
+ implicit_status = NotImplicit}] }
+ | "/" -> { [`Slash] }
+ | "("; items = LIST1 argument_spec; ")"; sc = OPT scope ->
+ { let f x = match sc, x with
+ | None, x -> x | x, None -> Option.map (fun y -> CAst.make ~loc y) x
+ | Some _, Some _ -> user_err Pp.(str "scope declared twice") in
+ List.map (fun (name,recarg_like,notation_scope) ->
+ `Id { name=name; recarg_like=recarg_like;
+ notation_scope=f notation_scope;
+ implicit_status = NotImplicit}) items }
+ | "["; items = LIST1 argument_spec; "]"; sc = OPT scope ->
+ { let f x = match sc, x with
+ | None, x -> x | x, None -> Option.map (fun y -> CAst.make ~loc y) x
+ | Some _, Some _ -> user_err Pp.(str "scope declared twice") in
+ List.map (fun (name,recarg_like,notation_scope) ->
+ `Id { name=name; recarg_like=recarg_like;
+ notation_scope=f notation_scope;
+ implicit_status = Implicit}) items }
+ | "{"; items = LIST1 argument_spec; "}"; sc = OPT scope ->
+ { let f x = match sc, x with
+ | None, x -> x | x, None -> Option.map (fun y -> CAst.make ~loc y) x
+ | Some _, Some _ -> user_err Pp.(str "scope declared twice") in
+ List.map (fun (name,recarg_like,notation_scope) ->
+ `Id { name=name; recarg_like=recarg_like;
+ notation_scope=f notation_scope;
+ implicit_status = MaximallyImplicit}) items }
+ ]
+ ];
+ (* Same as [argument_spec_block], but with only implicit status and names *)
+ more_implicits_block: [
+ [ name = name -> { [(name.CAst.v, Vernacexpr.NotImplicit)] }
+ | "["; items = LIST1 name; "]" ->
+ { List.map (fun name -> (name.CAst.v, Vernacexpr.Implicit)) items }
+ | "{"; items = LIST1 name; "}" ->
+ { List.map (fun name -> (name.CAst.v, Vernacexpr.MaximallyImplicit)) items }
+ ]
+ ];
+ strategy_level:
+ [ [ IDENT "expand" -> { Conv_oracle.Expand }
+ | IDENT "opaque" -> { Conv_oracle.Opaque }
+ | n=INT -> { Conv_oracle.Level (int_of_string n) }
+ | "-"; n=INT -> { Conv_oracle.Level (- int_of_string n) }
+ | IDENT "transparent" -> { Conv_oracle.transparent } ] ]
+ ;
+ instance_name:
+ [ [ name = ident_decl; sup = OPT binders ->
+ { (CAst.map (fun id -> Name id) (fst name), snd name),
+ (Option.default [] sup) }
+ | -> { ((CAst.make ~loc Anonymous), None), [] } ] ]
+ ;
+ hint_info:
+ [ [ "|"; i = OPT natural; pat = OPT constr_pattern ->
+ { { Typeclasses.hint_priority = i; hint_pattern = pat } }
+ | -> { { Typeclasses.hint_priority = None; hint_pattern = None } } ] ]
+ ;
+ reserv_list:
+ [ [ bl = LIST1 reserv_tuple -> { bl } | b = simple_reserv -> { [b] } ] ]
+ ;
+ reserv_tuple:
+ [ [ "("; a = simple_reserv; ")" -> { a } ] ]
+ ;
+ simple_reserv:
+ [ [ idl = LIST1 identref; ":"; c = lconstr -> { (idl,c) } ] ]
+ ;
+
+END
+
+GRAMMAR EXTEND Gram
+ GLOBAL: command query_command class_rawexpr gallina_ext;
+
+ gallina_ext:
+ [ [ IDENT "Export"; "Set"; table = option_table; v = option_value ->
+ { VernacSetOption (true, table, v) }
+ | IDENT "Export"; IDENT "Unset"; table = option_table ->
+ { VernacUnsetOption (true, table) }
+ ] ];
+
+ command:
+ [ [ IDENT "Comments"; l = LIST0 comment -> { VernacComments l }
+
+ (* Hack! Should be in grammar_ext, but camlp5 factorizes badly *)
+ | IDENT "Declare"; IDENT "Instance"; namesup = instance_name; ":";
+ expl = [ "!" -> { Decl_kinds.Implicit } | -> { Decl_kinds.Explicit } ] ; t = operconstr LEVEL "200";
+ info = hint_info ->
+ { VernacInstance (true, snd namesup, (fst namesup, expl, t), None, info) }
+
+ (* System directory *)
+ | IDENT "Pwd" -> { VernacChdir None }
+ | IDENT "Cd" -> { VernacChdir None }
+ | IDENT "Cd"; dir = ne_string -> { VernacChdir (Some dir) }
+
+ | IDENT "Load"; verbosely = [ IDENT "Verbose" -> { true } | -> { false } ];
+ s = [ s = ne_string -> { s } | s = IDENT -> { s } ] ->
+ { VernacLoad (verbosely, s) }
+ | IDENT "Declare"; IDENT "ML"; IDENT "Module"; l = LIST1 ne_string ->
+ { VernacDeclareMLModule l }
+
+ | IDENT "Locate"; l = locatable -> { VernacLocate l }
+
+ (* Managing load paths *)
+ | IDENT "Add"; IDENT "LoadPath"; dir = ne_string; alias = as_dirpath ->
+ { VernacAddLoadPath (false, dir, alias) }
+ | IDENT "Add"; IDENT "Rec"; IDENT "LoadPath"; dir = ne_string;
+ alias = as_dirpath -> { VernacAddLoadPath (true, dir, alias) }
+ | IDENT "Remove"; IDENT "LoadPath"; dir = ne_string ->
+ { VernacRemoveLoadPath dir }
+
+ (* For compatibility *)
+ | IDENT "AddPath"; dir = ne_string; "as"; alias = as_dirpath ->
+ { VernacAddLoadPath (false, dir, alias) }
+ | IDENT "AddRecPath"; dir = ne_string; "as"; alias = as_dirpath ->
+ { VernacAddLoadPath (true, dir, alias) }
+ | IDENT "DelPath"; dir = ne_string ->
+ { VernacRemoveLoadPath dir }
+
+ (* Type-Checking (pas dans le refman) *)
+ | "Type"; c = lconstr -> { VernacGlobalCheck c }
+
+ (* Printing (careful factorization of entries) *)
+ | IDENT "Print"; p = printable -> { VernacPrint p }
+ | IDENT "Print"; qid = smart_global; l = OPT univ_name_list -> { VernacPrint (PrintName (qid,l)) }
+ | IDENT "Print"; IDENT "Module"; "Type"; qid = global ->
+ { VernacPrint (PrintModuleType qid) }
+ | IDENT "Print"; IDENT "Module"; qid = global ->
+ { VernacPrint (PrintModule qid) }
+ | IDENT "Print"; IDENT "Namespace" ; ns = dirpath ->
+ { VernacPrint (PrintNamespace ns) }
+ | IDENT "Inspect"; n = natural -> { VernacPrint (PrintInspect n) }
+
+ | IDENT "Add"; IDENT "ML"; IDENT "Path"; dir = ne_string ->
+ { VernacAddMLPath (false, dir) }
+ | IDENT "Add"; IDENT "Rec"; IDENT "ML"; IDENT "Path"; dir = ne_string ->
+ { VernacAddMLPath (true, dir) }
+
+ (* For acting on parameter tables *)
+ | "Set"; table = option_table; v = option_value ->
+ { VernacSetOption (false, table, v) }
+ | IDENT "Unset"; table = option_table ->
+ { VernacUnsetOption (false, table) }
+
+ | IDENT "Print"; IDENT "Table"; table = option_table ->
+ { VernacPrintOption table }
+
+ | IDENT "Add"; table = IDENT; field = IDENT; v = LIST1 option_ref_value
+ -> { VernacAddOption ([table;field], v) }
+ (* A global value below will be hidden by a field above! *)
+ (* In fact, we give priority to secondary tables *)
+ (* No syntax for tertiary tables due to conflict *)
+ (* (but they are unused anyway) *)
+ | IDENT "Add"; table = IDENT; v = LIST1 option_ref_value ->
+ { VernacAddOption ([table], v) }
+
+ | IDENT "Test"; table = option_table; "for"; v = LIST1 option_ref_value
+ -> { VernacMemOption (table, v) }
+ | IDENT "Test"; table = option_table ->
+ { VernacPrintOption table }
+
+ | IDENT "Remove"; table = IDENT; field = IDENT; v= LIST1 option_ref_value
+ -> { VernacRemoveOption ([table;field], v) }
+ | IDENT "Remove"; table = IDENT; v = LIST1 option_ref_value ->
+ { VernacRemoveOption ([table], v) } ]]
+ ;
+ query_command: (* TODO: rapprocher Eval et Check *)
+ [ [ IDENT "Eval"; r = red_expr; "in"; c = lconstr; "." ->
+ { fun g -> VernacCheckMayEval (Some r, g, c) }
+ | IDENT "Compute"; c = lconstr; "." ->
+ { fun g -> VernacCheckMayEval (Some (Genredexpr.CbvVm None), g, c) }
+ | IDENT "Check"; c = lconstr; "." ->
+ { fun g -> VernacCheckMayEval (None, g, c) }
+ (* Searching the environment *)
+ | IDENT "About"; qid = smart_global; l = OPT univ_name_list; "." ->
+ { fun g -> VernacPrint (PrintAbout (qid,l,g)) }
+ | IDENT "SearchHead"; c = constr_pattern; l = in_or_out_modules; "." ->
+ { fun g -> VernacSearch (SearchHead c,g, l) }
+ | IDENT "SearchPattern"; c = constr_pattern; l = in_or_out_modules; "." ->
+ { fun g -> VernacSearch (SearchPattern c,g, l) }
+ | IDENT "SearchRewrite"; c = constr_pattern; l = in_or_out_modules; "." ->
+ { fun g -> VernacSearch (SearchRewrite c,g, l) }
+ | IDENT "Search"; s = searchabout_query; l = searchabout_queries; "." ->
+ { let (sl,m) = l in fun g -> VernacSearch (SearchAbout (s::sl),g, m) }
+ (* compatibility: SearchAbout *)
+ | IDENT "SearchAbout"; s = searchabout_query; l = searchabout_queries; "." ->
+ { fun g -> let (sl,m) = l in VernacSearch (SearchAbout (s::sl),g, m) }
+ (* compatibility: SearchAbout with "[ ... ]" *)
+ | IDENT "SearchAbout"; "["; sl = LIST1 searchabout_query; "]";
+ l = in_or_out_modules; "." ->
+ { fun g -> VernacSearch (SearchAbout sl,g, l) }
+ ] ]
+ ;
+ printable:
+ [ [ IDENT "Term"; qid = smart_global; l = OPT univ_name_list -> { PrintName (qid,l) }
+ | IDENT "All" -> { PrintFullContext }
+ | IDENT "Section"; s = global -> { PrintSectionContext s }
+ | IDENT "Grammar"; ent = IDENT ->
+ (* This should be in "syntax" section but is here for factorization*)
+ { PrintGrammar ent }
+ | IDENT "LoadPath"; dir = OPT dirpath -> { PrintLoadPath dir }
+ | IDENT "Modules" ->
+ { user_err Pp.(str "Print Modules is obsolete; use Print Libraries instead") }
+ | IDENT "Libraries" -> { PrintModules }
+
+ | IDENT "ML"; IDENT "Path" -> { PrintMLLoadPath }
+ | IDENT "ML"; IDENT "Modules" -> { PrintMLModules }
+ | IDENT "Debug"; IDENT "GC" -> { PrintDebugGC }
+ | IDENT "Graph" -> { PrintGraph }
+ | IDENT "Classes" -> { PrintClasses }
+ | IDENT "TypeClasses" -> { PrintTypeClasses }
+ | IDENT "Instances"; qid = smart_global -> { PrintInstances qid }
+ | IDENT "Coercions" -> { PrintCoercions }
+ | IDENT "Coercion"; IDENT "Paths"; s = class_rawexpr; t = class_rawexpr
+ -> { PrintCoercionPaths (s,t) }
+ | IDENT "Canonical"; IDENT "Projections" -> { PrintCanonicalConversions }
+ | IDENT "Tables" -> { PrintTables }
+ | IDENT "Options" -> { PrintTables (* A Synonymous to Tables *) }
+ | IDENT "Hint" -> { PrintHintGoal }
+ | IDENT "Hint"; qid = smart_global -> { PrintHint qid }
+ | IDENT "Hint"; "*" -> { PrintHintDb }
+ | IDENT "HintDb"; s = IDENT -> { PrintHintDbName s }
+ | IDENT "Scopes" -> { PrintScopes }
+ | IDENT "Scope"; s = IDENT -> { PrintScope s }
+ | IDENT "Visibility"; s = OPT IDENT -> { PrintVisibility s }
+ | IDENT "Implicit"; qid = smart_global -> { PrintImplicit qid }
+ | IDENT "Universes"; fopt = OPT ne_string -> { PrintUniverses (false, fopt) }
+ | IDENT "Sorted"; IDENT "Universes"; fopt = OPT ne_string -> { PrintUniverses (true, fopt) }
+ | IDENT "Assumptions"; qid = smart_global -> { PrintAssumptions (false, false, qid) }
+ | IDENT "Opaque"; IDENT "Dependencies"; qid = smart_global -> { PrintAssumptions (true, false, qid) }
+ | IDENT "Transparent"; IDENT "Dependencies"; qid = smart_global -> { PrintAssumptions (false, true, qid) }
+ | IDENT "All"; IDENT "Dependencies"; qid = smart_global -> { PrintAssumptions (true, true, qid) }
+ | IDENT "Strategy"; qid = smart_global -> { PrintStrategy (Some qid) }
+ | IDENT "Strategies" -> { PrintStrategy None } ] ]
+ ;
+ class_rawexpr:
+ [ [ IDENT "Funclass" -> { FunClass }
+ | IDENT "Sortclass" -> { SortClass }
+ | qid = smart_global -> { RefClass qid } ] ]
+ ;
+ locatable:
+ [ [ qid = smart_global -> { LocateAny qid }
+ | IDENT "Term"; qid = smart_global -> { LocateTerm qid }
+ | IDENT "File"; f = ne_string -> { LocateFile f }
+ | IDENT "Library"; qid = global -> { LocateLibrary qid }
+ | IDENT "Module"; qid = global -> { LocateModule qid } ] ]
+ ;
+ option_value:
+ [ [ -> { BoolValue true }
+ | n = integer -> { IntValue (Some n) }
+ | s = STRING -> { StringValue s } ] ]
+ ;
+ option_ref_value:
+ [ [ id = global -> { QualidRefValue id }
+ | s = STRING -> { StringRefValue s } ] ]
+ ;
+ option_table:
+ [ [ fl = LIST1 [ x = IDENT -> { x } ] -> { fl } ]]
+ ;
+ as_dirpath:
+ [ [ d = OPT [ "as"; d = dirpath -> { d } ] -> { d } ] ]
+ ;
+ ne_in_or_out_modules:
+ [ [ IDENT "inside"; l = LIST1 global -> { SearchInside l }
+ | IDENT "outside"; l = LIST1 global -> { SearchOutside l } ] ]
+ ;
+ in_or_out_modules:
+ [ [ m = ne_in_or_out_modules -> { m }
+ | -> { SearchOutside [] } ] ]
+ ;
+ comment:
+ [ [ c = constr -> { CommentConstr c }
+ | s = STRING -> { CommentString s }
+ | n = natural -> { CommentInt n } ] ]
+ ;
+ positive_search_mark:
+ [ [ "-" -> { false } | -> { true } ] ]
+ ;
+ scope:
+ [ [ "%"; key = IDENT -> { key } ] ]
+ ;
+ searchabout_query:
+ [ [ b = positive_search_mark; s = ne_string; sc = OPT scope ->
+ { (b, SearchString (s,sc)) }
+ | b = positive_search_mark; p = constr_pattern ->
+ { (b, SearchSubPattern p) }
+ ] ]
+ ;
+ searchabout_queries:
+ [ [ m = ne_in_or_out_modules -> { ([],m) }
+ | s = searchabout_query; l = searchabout_queries ->
+ { let (sl,m) = l in (s::sl,m) }
+ | -> { ([],SearchOutside []) }
+ ] ]
+ ;
+ univ_name_list:
+ [ [ "@{" ; l = LIST0 name; "}" -> { l } ] ]
+ ;
+END
+
+GRAMMAR EXTEND Gram
+ GLOBAL: command;
+
+ command:
+ [ [
+(* State management *)
+ IDENT "Write"; IDENT "State"; s = IDENT -> { VernacWriteState s }
+ | IDENT "Write"; IDENT "State"; s = ne_string -> { VernacWriteState s }
+ | IDENT "Restore"; IDENT "State"; s = IDENT -> { VernacRestoreState s }
+ | IDENT "Restore"; IDENT "State"; s = ne_string -> { VernacRestoreState s }
+
+(* Resetting *)
+ | IDENT "Reset"; IDENT "Initial" -> { VernacResetInitial }
+ | IDENT "Reset"; id = identref -> { VernacResetName id }
+ | IDENT "Back" -> { VernacBack 1 }
+ | IDENT "Back"; n = natural -> { VernacBack n }
+ | IDENT "BackTo"; n = natural -> { VernacBackTo n }
+
+(* Tactic Debugger *)
+ | IDENT "Debug"; IDENT "On" ->
+ { VernacSetOption (false, ["Ltac";"Debug"], BoolValue true) }
+
+ | IDENT "Debug"; IDENT "Off" ->
+ { VernacSetOption (false, ["Ltac";"Debug"], BoolValue false) }
+
+(* registration of a custom reduction *)
+
+ | IDENT "Declare"; IDENT "Reduction"; s = IDENT; ":=";
+ r = red_expr ->
+ { VernacDeclareReduction (s,r) }
+
+ ] ];
+ END
+
+(* Grammar extensions *)
+
+GRAMMAR EXTEND Gram
+ GLOBAL: syntax;
+
+ syntax:
+ [ [ IDENT "Open"; IDENT "Scope"; sc = IDENT ->
+ { VernacOpenCloseScope (true,sc) }
+
+ | IDENT "Close"; IDENT "Scope"; sc = IDENT ->
+ { VernacOpenCloseScope (false,sc) }
+
+ | IDENT "Delimit"; IDENT "Scope"; sc = IDENT; "with"; key = IDENT ->
+ { VernacDelimiters (sc, Some key) }
+ | IDENT "Undelimit"; IDENT "Scope"; sc = IDENT ->
+ { VernacDelimiters (sc, None) }
+
+ | IDENT "Bind"; IDENT "Scope"; sc = IDENT; "with";
+ refl = LIST1 class_rawexpr -> { VernacBindScope (sc,refl) }
+
+ | IDENT "Infix"; op = ne_lstring; ":="; p = constr;
+ modl = [ "("; l = LIST1 syntax_modifier SEP ","; ")" -> { l } | -> { [] } ];
+ sc = OPT [ ":"; sc = IDENT -> { sc } ] ->
+ { VernacInfix ((op,modl),p,sc) }
+ | IDENT "Notation"; id = identref;
+ idl = LIST0 ident; ":="; c = constr; b = only_parsing ->
+ { VernacSyntacticDefinition
+ (id,(idl,c),b) }
+ | IDENT "Notation"; s = lstring; ":=";
+ c = constr;
+ modl = [ "("; l = LIST1 syntax_modifier SEP ","; ")" -> { l } | -> { [] } ];
+ sc = OPT [ ":"; sc = IDENT -> { sc } ] ->
+ { VernacNotation (c,(s,modl),sc) }
+ | IDENT "Format"; IDENT "Notation"; n = STRING; s = STRING; fmt = STRING ->
+ { VernacNotationAddFormat (n,s,fmt) }
+
+ | IDENT "Reserved"; IDENT "Infix"; s = ne_lstring;
+ l = [ "("; l = LIST1 syntax_modifier SEP ","; ")" -> { l } | -> { [] } ] ->
+ { let s = CAst.map (fun s -> "x '"^s^"' y") s in
+ VernacSyntaxExtension (true,(s,l)) }
+
+ | IDENT "Reserved"; IDENT "Notation";
+ s = ne_lstring;
+ l = [ "("; l = LIST1 syntax_modifier SEP ","; ")" -> { l } | -> { [] } ]
+ -> { VernacSyntaxExtension (false, (s,l)) }
+
+ (* "Print" "Grammar" should be here but is in "command" entry in order
+ to factorize with other "Print"-based vernac entries *)
+ ] ]
+ ;
+ only_parsing:
+ [ [ "("; IDENT "only"; IDENT "parsing"; ")" ->
+ { Some Flags.Current }
+ | "("; IDENT "compat"; s = STRING; ")" ->
+ { Some (parse_compat_version s) }
+ | -> { None } ] ]
+ ;
+ level:
+ [ [ IDENT "level"; n = natural -> { NumLevel n }
+ | IDENT "next"; IDENT "level" -> { NextLevel } ] ]
+ ;
+ syntax_modifier:
+ [ [ "at"; IDENT "level"; n = natural -> { SetLevel n }
+ | IDENT "left"; IDENT "associativity" -> { SetAssoc LeftA }
+ | IDENT "right"; IDENT "associativity" -> { SetAssoc RightA }
+ | IDENT "no"; IDENT "associativity" -> { SetAssoc NonA }
+ | IDENT "only"; IDENT "printing" -> { SetOnlyPrinting }
+ | IDENT "only"; IDENT "parsing" -> { SetOnlyParsing }
+ | IDENT "compat"; s = STRING ->
+ { SetCompatVersion (parse_compat_version s) }
+ | IDENT "format"; s1 = [s = STRING -> { CAst.make ~loc s } ];
+ s2 = OPT [s = STRING -> { CAst.make ~loc s } ] ->
+ { begin match s1, s2 with
+ | { CAst.v = k }, Some s -> SetFormat(k,s)
+ | s, None -> SetFormat ("text",s) end }
+ | x = IDENT; ","; l = LIST1 [id = IDENT -> { id } ] SEP ","; "at";
+ lev = level -> { SetItemLevel (x::l,lev) }
+ | x = IDENT; "at"; lev = level -> { SetItemLevel ([x],lev) }
+ | x = IDENT; "at"; lev = level; b = constr_as_binder_kind -> { SetItemLevelAsBinder ([x],b,Some lev) }
+ | x = IDENT; b = constr_as_binder_kind -> { SetItemLevelAsBinder ([x],b,None) }
+ | x = IDENT; typ = syntax_extension_type -> { SetEntryType (x,typ) }
+ ] ]
+ ;
+ syntax_extension_type:
+ [ [ IDENT "ident" -> { ETName } | IDENT "global" -> { ETReference }
+ | IDENT "bigint" -> { ETBigint }
+ | IDENT "binder" -> { ETBinder true }
+ | IDENT "constr"; n = OPT at_level; b = constr_as_binder_kind -> { ETConstrAsBinder (b,n) }
+ | IDENT "pattern" -> { ETPattern (false,None) }
+ | IDENT "pattern"; "at"; IDENT "level"; n = natural -> { ETPattern (false,Some n) }
+ | IDENT "strict"; IDENT "pattern" -> { ETPattern (true,None) }
+ | IDENT "strict"; IDENT "pattern"; "at"; IDENT "level"; n = natural -> { ETPattern (true,Some n) }
+ | IDENT "closed"; IDENT "binder" -> { ETBinder false }
+ ] ]
+ ;
+ at_level:
+ [ [ "at"; n = level -> { n } ] ]
+ ;
+ constr_as_binder_kind:
+ [ [ "as"; IDENT "ident" -> { Notation_term.AsIdent }
+ | "as"; IDENT "pattern" -> { Notation_term.AsIdentOrPattern }
+ | "as"; IDENT "strict"; IDENT "pattern" -> { Notation_term.AsStrictPattern } ] ]
+ ;
+END
diff --git a/vernac/metasyntax.ml b/vernac/metasyntax.ml
index da14358ef..240147c8d 100644
--- a/vernac/metasyntax.ml
+++ b/vernac/metasyntax.ml
@@ -487,6 +487,15 @@ and check_no_ldots_in_box = function
let error_not_same ?loc () =
user_err ?loc Pp.(str "The format is not the same on the right- and left-hand sides of the special token \"..\".")
+let find_prod_list_loc sfmt fmt =
+ (* [fmt] is some [UnpTerminal x :: sfmt @ UnpTerminal ".." :: sfmt @ UnpTerminal y :: rest] *)
+ if List.is_empty sfmt then
+ (* No separators; we highlight the sequence "x .." *)
+ Loc.merge_opt (fst (List.hd fmt)) (fst (List.hd (List.tl fmt)))
+ else
+ (* A separator; we highlight the separating sequence *)
+ Loc.merge_opt (fst (List.hd sfmt)) (fst (List.last sfmt))
+
let skip_var_in_recursive_format = function
| (_,UnpTerminal s) :: sl (* skip first var *) when not (List.for_all (fun c -> c = " ") (String.explode s)) ->
(* To do, though not so important: check that the names match
@@ -496,6 +505,8 @@ let skip_var_in_recursive_format = function
| [] -> assert false
let read_recursive_format sl fmt =
+ (* Turn [[UnpTerminal s :: some-list @ UnpTerminal ".." :: same-some-list @ UnpTerminal s' :: rest] *)
+ (* into [(some-list,rest)] *)
let get_head fmt =
let sl = skip_var_in_recursive_format fmt in
try split_format_at_ldots [] sl with Exit -> error_not_same ?loc:(fst (List.last (if sl = [] then fmt else sl))) () in
@@ -528,10 +539,10 @@ let hunks_of_format (from,(vars,typs)) symfmt =
let i = index_id m vars in
let typ = List.nth typs (i-1) in
let _,prec = precedence_of_entry_type from typ in
- let slfmt,fmt = read_recursive_format sl fmt in
- let sl, slfmt = aux (sl,slfmt) in
- if not (List.is_empty sl) then error_format ?loc:(fst (List.last fmt)) ();
- let symbs, l = aux (symbs,fmt) in
+ let loc_slfmt,rfmt = read_recursive_format sl fmt in
+ let sl, slfmt = aux (sl,loc_slfmt) in
+ if not (List.is_empty sl) then error_format ?loc:(find_prod_list_loc loc_slfmt fmt) ();
+ let symbs, l = aux (symbs,rfmt) in
let hunk = match typ with
| ETConstr _ -> UnpListMetaVar (i,prec,slfmt)
| ETBinder isopen ->
@@ -1312,8 +1323,15 @@ let make_pa_rule level (typs,symbols) ntn need_squash =
let make_pp_rule level (typs,symbols) fmt =
match fmt with
- | None -> [UnpBox (PpHOVB 0, make_hunks typs symbols level)]
- | Some fmt -> hunks_of_format (level, List.split typs) (symbols, parse_format fmt)
+ | None ->
+ let hunks = make_hunks typs symbols level in
+ if List.exists (function _,(UnpCut (PpBrk _) | UnpListMetaVar _) -> true | _ -> false) hunks then
+ [UnpBox (PpHOVB 0,hunks)]
+ else
+ (* Optimization to work around what seems an ocaml Format bug (see Mantis #7804/#7807) *)
+ List.map snd hunks (* drop locations which are dummy *)
+ | Some fmt ->
+ hunks_of_format (level, List.split typs) (symbols, parse_format fmt)
(* let make_syntax_rules i_typs (ntn,prec,need_squash) sy_data fmt extra onlyprint compat = *)
let make_syntax_rules (sd : SynData.syn_data) = let open SynData in
diff --git a/vernac/misctypes.ml b/vernac/misctypes.ml
index ae725efaa..ef9cd3c35 100644
--- a/vernac/misctypes.ml
+++ b/vernac/misctypes.ml
@@ -17,10 +17,10 @@ type 'a or_by_notation = 'a Constrexpr.or_by_notation
[@@ocaml.deprecated "use [Constrexpr.or_by_notation]"]
type intro_pattern_naming_expr = Namegen.intro_pattern_naming_expr =
- | IntroIdentifier of Id.t [@ocaml.deprecated "Use version in [Evarutil]"]
- | IntroFresh of Id.t [@ocaml.deprecated "Use version in [Evarutil]"]
- | IntroAnonymous [@ocaml.deprecated "Use version in [Evarutil]"]
-[@@ocaml.deprecated "use [Evarutil.intro_pattern_naming_expr]"]
+ | IntroIdentifier of Id.t [@ocaml.deprecated "Use version in [Namegen]"]
+ | IntroFresh of Id.t [@ocaml.deprecated "Use version in [Namegen]"]
+ | IntroAnonymous [@ocaml.deprecated "Use version in [Namegen]"]
+[@@ocaml.deprecated "use [Namegen.intro_pattern_naming_expr]"]
type 'a or_var = 'a Locus.or_var =
| ArgArg of 'a [@ocaml.deprecated "Use version in [Locus]"]
diff --git a/vernac/ppvernac.ml b/vernac/ppvernac.ml
index 56dfaa54a..5fbe1f4e4 100644
--- a/vernac/ppvernac.ml
+++ b/vernac/ppvernac.ml
@@ -210,7 +210,10 @@ open Pputils
| HintsTransparency (l, b) ->
keyword (if b then "Transparent" else "Opaque")
++ spc ()
- ++ prlist_with_sep sep pr_qualid l
+ ++ (match l with
+ | HintsVariables -> keyword "Variables"
+ | HintsConstants -> keyword "Constants"
+ | HintsReferences l -> prlist_with_sep sep pr_qualid l)
| HintsMode (m, l) ->
keyword "Mode"
++ spc ()
diff --git a/vernac/vernacentries.ml b/vernac/vernacentries.ml
index 6d1abeca1..5fda1a0da 100644
--- a/vernac/vernacentries.ml
+++ b/vernac/vernacentries.ml
@@ -537,6 +537,13 @@ let should_treat_as_cumulative cum poly =
else user_err Pp.(str "The NonCumulative prefix can only be used in a polymorphic context.")
| None -> poly && Flags.is_polymorphic_inductive_cumulativity ()
+let uniform_inductive_parameters = ref false
+
+let should_treat_as_uniform () =
+ if !uniform_inductive_parameters
+ then ComInductive.UniformParameters
+ else ComInductive.NonUniformParameters
+
let vernac_record cum k poly finite records =
let is_cumulative = should_treat_as_cumulative cum poly in
let map ((coe, (id, pl)), binders, sort, nameopt, cfs) =
@@ -642,7 +649,8 @@ let vernac_inductive ~atts cum lo finite indl =
in
let indl = List.map unpack indl in
let is_cumulative = should_treat_as_cumulative cum atts.polymorphic in
- ComInductive.do_mutual_inductive indl is_cumulative atts.polymorphic lo finite
+ let uniform = should_treat_as_uniform () in
+ ComInductive.do_mutual_inductive indl is_cumulative atts.polymorphic lo ~uniform finite
else
user_err (str "Mixed record-inductive definitions are not allowed")
(*
@@ -1471,6 +1479,14 @@ let _ =
optwrite = Flags.make_polymorphic_inductive_cumulativity }
let _ =
+ declare_bool_option
+ { optdepr = false;
+ optname = "Uniform inductive parameters";
+ optkey = ["Uniform"; "Inductive"; "Parameters"];
+ optread = (fun () -> !uniform_inductive_parameters);
+ optwrite = (fun b -> uniform_inductive_parameters := b) }
+
+let _ =
declare_int_option
{ optdepr = false;
optname = "the level of inlining during functor application";
@@ -2089,7 +2105,7 @@ let interp ?proof ~atts ~st c =
| VernacExactProof c -> vernac_exact_proof c
| VernacAssumption ((discharge,kind),nl,l) ->
vernac_assumption ~atts discharge kind l nl
- | VernacInductive (cum, priv,finite,l) -> vernac_inductive ~atts cum priv finite l
+ | VernacInductive (cum, priv, finite, l) -> vernac_inductive ~atts cum priv finite l
| VernacFixpoint (discharge, l) -> vernac_fixpoint ~atts discharge l
| VernacCoFixpoint (discharge, l) -> vernac_cofixpoint ~atts discharge l
| VernacScheme l -> vernac_scheme l
diff --git a/vernac/vernacexpr.ml b/vernac/vernacexpr.ml
index f74383b02..f5f37339c 100644
--- a/vernac/vernacexpr.ml
+++ b/vernac/vernacexpr.ml
@@ -121,12 +121,17 @@ type 'a hint_info_gen = 'a Typeclasses.hint_info_gen =
type hint_info_expr = Hints.hint_info_expr
[@@ocaml.deprecated "Please use [Hints.hint_info_expr]"]
+type 'a hints_transparency_target = 'a Hints.hints_transparency_target =
+ | HintsVariables
+ | HintsConstants
+ | HintsReferences of 'a list
+
type hints_expr = Hints.hints_expr =
| HintsResolve of (Hints.hint_info_expr * bool * Hints.reference_or_constr) list
| HintsResolveIFF of bool * qualid list * int option
| HintsImmediate of Hints.reference_or_constr list
| HintsUnfold of qualid list
- | HintsTransparency of qualid list * bool
+ | HintsTransparency of qualid hints_transparency_target * bool
| HintsMode of qualid * Hints.hint_mode list
| HintsConstructors of qualid list
| HintsExtern of int * constr_expr option * Genarg.raw_generic_argument