diff options
author | Benjamin Barenblat <bbaren@debian.org> | 2019-02-02 19:29:23 -0500 |
---|---|---|
committer | Benjamin Barenblat <bbaren@debian.org> | 2019-02-02 19:29:23 -0500 |
commit | 9ebf44d84754adc5b64fcf612c6816c02c80462d (patch) | |
tree | bf5e06a28488e0e06a2f2011ff0d110e2e02f8fc /dev/tools | |
parent | 9043add656177eeac1491a73d2f3ab92bec0013c (diff) |
Imported Upstream version 8.9.0upstream/8.9.0upstream
Diffstat (limited to 'dev/tools')
-rwxr-xr-x | dev/tools/backport-pr.sh | 53 | ||||
-rwxr-xr-x | dev/tools/check-overlays.sh | 11 | ||||
-rwxr-xr-x | dev/tools/check-owners-pr.sh | 32 | ||||
-rwxr-xr-x | dev/tools/check-owners.sh | 138 | ||||
-rw-r--r-- | dev/tools/coqdev.el | 47 | ||||
-rwxr-xr-x | dev/tools/merge-pr.sh | 218 | ||||
-rwxr-xr-x | dev/tools/pre-commit | 2 | ||||
-rwxr-xr-x | dev/tools/update-compat.py | 341 |
8 files changed, 800 insertions, 42 deletions
diff --git a/dev/tools/backport-pr.sh b/dev/tools/backport-pr.sh index e4359f70..9864fd4d 100755 --- a/dev/tools/backport-pr.sh +++ b/dev/tools/backport-pr.sh @@ -1,11 +1,34 @@ #!/usr/bin/env bash -# Usage: dev/tools/backport-pr.sh <PR number> [--stop-before-merging] - set -e -PRNUM=$1 -OPTION=$2 +if [[ $# == 0 ]]; then + echo "Usage: $0 [--no-conflict] [--no-signature-check] [--stop-before-merging] prnum" + exit 1 +fi + +while [[ $# -gt 0 ]]; do + case "$1" in + --no-conflict) + NO_CONFLICTS="true" + shift + ;; + --no-signature-check) + NO_SIGNATURE_CHECK="true" + shift + ;; + --stop-before-merging) + STOP_BEFORE_MERGING="true" + shift + ;; + *) + if [[ "$PRNUM" != "" ]]; then + echo "PRNUM was already set to $PRNUM and is now being overridden with $1." + fi + PRNUM="$1" + shift + esac +done if ! git log master --grep "Merge PR #${PRNUM}" | grep "." > /dev/null; then echo "PR #${PRNUM} does not exist." @@ -14,7 +37,7 @@ fi SIGNATURE_STATUS=$(git log master --grep "Merge PR #${PRNUM}" --format="%G?") git log master --grep "Merge PR #${PRNUM}" --format="%GG" -if [[ "${SIGNATURE_STATUS}" != "G" ]]; then +if [[ "$NO_SIGNATURE_CHECK" != "true" && "$SIGNATURE_STATUS" != "G" ]]; then echo read -p "Merge commit does not have a good (valid) signature. Bypass? [y/N] " -n 1 -r echo @@ -27,9 +50,17 @@ BRANCH=backport-pr-${PRNUM} RANGE=$(git log master --grep "Merge PR #${PRNUM}" --format="%P" | sed 's/ /../') MESSAGE=$(git log master --grep "Merge PR #${PRNUM}" --format="%s" | sed 's/Merge/Backport/') -if git checkout -b ${BRANCH}; then +if git checkout -b "${BRANCH}"; then - if ! git cherry-pick -x ${RANGE}; then + if ! git cherry-pick -x "${RANGE}"; then + if [[ "$NO_CONFLICTS" == "true" ]]; then + git status + echo "Conflicts! Aborting..." + git cherry-pick --abort + git checkout - + git branch -d "$BRANCH" + exit 1 + fi echo "Please fix the conflicts, then exit." bash while ! git cherry-pick --continue; do @@ -50,7 +81,7 @@ else fi -if ! git diff --exit-code HEAD ${BRANCH} -- "*.mli"; then +if ! git diff --exit-code HEAD "${BRANCH}" -- "*.mli"; then echo read -p "Some mli files are modified. Bypass? [y/N] " -n 1 -r echo @@ -59,12 +90,12 @@ if ! git diff --exit-code HEAD ${BRANCH} -- "*.mli"; then fi fi -if [[ "${OPTION}" == "--stop-before-merging" ]]; then +if [[ "$STOP_BEFORE_MERGING" == "true" ]]; then exit 0 fi -git merge -S --no-ff ${BRANCH} -m "${MESSAGE}" -git branch -d ${BRANCH} +git merge -S --no-ff "${BRANCH}" -m "${MESSAGE}" +git branch -d "${BRANCH}" # To-Do: # - Support for backporting a PR before it is merged diff --git a/dev/tools/check-overlays.sh b/dev/tools/check-overlays.sh new file mode 100755 index 00000000..33a9ff05 --- /dev/null +++ b/dev/tools/check-overlays.sh @@ -0,0 +1,11 @@ +#!/usr/bin/env bash + +for f in $(git ls-files "dev/ci/user-overlays/") +do + if ! ([[ "$f" = dev/ci/user-overlays/README.md ]] || [[ "$f" == *.sh ]]) + then + >&2 echo "Bad overlay '$f'." + >&2 echo "User overlays need to have extension .sh to be picked up!" + exit 1 + fi +done diff --git a/dev/tools/check-owners-pr.sh b/dev/tools/check-owners-pr.sh new file mode 100755 index 00000000..d2910279 --- /dev/null +++ b/dev/tools/check-owners-pr.sh @@ -0,0 +1,32 @@ +#!/usr/bin/env sh + +usage() { + { echo "usage: $0 PR [ARGS]..." + echo "A wrapper around check-owners.sh to check owners for a PR." + echo "Assumes upstream is the canonical Coq repository." + echo "Assumes the PR is against master." + echo + echo " PR: PR number" + echo " ARGS: passed through to check-owners.sh" + } >&2 +} + +case "$1" in + "--help"|"-h") + usage + if [ $# = 1 ]; then exit 0; else exit 1; fi;; + "") + usage + exit 1;; +esac + +PR="$1" +shift + +# this puts both refs in the FETCH_HEAD file but git rev-parse will use the first +git fetch upstream "+refs/pull/$PR/head" master + +head=$(git rev-parse FETCH_HEAD) +base=$(git merge-base upstream/master "$head") + +git diff --name-only -z "$base" "$head" | xargs -0 dev/tools/check-owners.sh "$@" diff --git a/dev/tools/check-owners.sh b/dev/tools/check-owners.sh new file mode 100755 index 00000000..1a97508a --- /dev/null +++ b/dev/tools/check-owners.sh @@ -0,0 +1,138 @@ +#!/usr/bin/env bash + +# Determine CODEOWNERS of the files given in argument +# For a given commit range: +# git diff --name-only -z COMMIT1 COMMIT2 | xargs -0 dev/tools/check-owners.sh [opts] + +# NB: gitignore files will be messed up if you interrupt the script. +# You should be able to just move the .gitignore.bak files back manually. + +usage() { + { echo "usage: $0 [--show-patterns] [--owner OWNER] [FILE]..." + echo " --show-patterns: instead of printing file names print the matching patterns (more compact)" + echo " --owner: show only files/patterns owned by OWNER (use Nobody to see only non-owned files)" + } >&2 +} + +case "$1" in + "--help"|"-h") + usage + if [ $# = 1 ]; then exit 0; else exit 1; fi +esac + +if ! [ -e .github/CODEOWNERS ]; then + >&2 echo "No CODEOWNERS set up or calling from wrong directory." + exit 1 +fi + +files=() +show_patterns=false + +target_owner="" + +while [[ "$#" -gt 0 ]]; do + case "$1" in + "--show-patterns") + show_patterns=true + shift;; + "--owner") + if [[ "$#" = 1 ]]; then + >&2 echo "Missing argument to --owner" + usage + exit 1 + elif [[ "$target_owner" != "" ]]; then + >&2 echo "Only one --owner allowed" + usage + exit 1 + fi + target_owner="$2" + shift 2;; + *) + files+=("$@") + break;; + esac +done + +# CODEOWNERS uses .gitignore patterns so we want to use git to parse it +# The only available tool for that is git check-ignore +# However it provides no way to use alternate .gitignore files +# so we rename them temporarily + +find . -name .gitignore -print0 | while IFS= read -r -d '' f; do + if [ -e "$f.bak" ]; then + >&2 echo "$f.bak exists!" + exit 1 + else + mv "$f" "$f.bak" + fi +done + +# CODEOWNERS is not quite .gitignore patterns: +# after the pattern is the owner (space separated) +# git would interpret that as a big pattern containing spaces +# so we create a valid .gitignore by removing all but the first field + +while read -r pat _; do + printf '%s\n' "$pat" >> .gitignore +done < .github/CODEOWNERS + +# associative array [file => owner] +declare -A owners + +for f in "${files[@]}"; do + data=$(git check-ignore --verbose --no-index "./$f") + code=$? + + if [[ "$code" = 1 ]] || ! [[ "$data" =~ .gitignore:.* ]] ; then + # no match, or match from non tracked gitignore (eg global gitignore) + if [ "$target_owner" != "" ] && [ "$target_owner" != Nobody ] ; then + owner="" + else + owner="Nobody" + pat="$f" # no patterns for unowned files + fi + else + # data looks like [.gitignore:$line:$pattern $file] + # extract the line to look it up in CODEOWNERS + data=${data#'.gitignore:'} + line=${data%%:*} + + # NB: supports multiple owners + # Does not support secondary owners declared in comment + read -r pat fowners < <(sed "${line}q;d" .github/CODEOWNERS) + + owner="" + if [ "$target_owner" != "" ]; then + for o in $fowners; do # do not quote: multiple owners possible + if [ "$o" = "$target_owner" ]; then + owner="$o" + fi + done + else + owner="$fowners" + fi + fi + + if [ "$owner" != "" ]; then + if $show_patterns; then + owners[$pat]="$owner" + else + owners[$f]="$owner" + fi + fi +done + +for f in "${!owners[@]}"; do + printf '%s: %s\n' "$f" "${owners[$f]}" +done | sort -k 2 -k 1 # group by owner + +# restore gitignore files +rm .gitignore +find . -name .gitignore.bak -print0 | while IFS= read -r -d '' f; do + base=${f%.bak} + if [ -e "$base" ]; then + >&2 echo "$base exists!" + else + mv "$f" "$base" + fi +done diff --git a/dev/tools/coqdev.el b/dev/tools/coqdev.el index 62fdaec8..ec72f965 100644 --- a/dev/tools/coqdev.el +++ b/dev/tools/coqdev.el @@ -23,7 +23,7 @@ ;; If you load this file from a git repository, checking out an old ;; commit will make it disappear and cause errors for your Emacs -;; startup. To ignore those errors use (require 'coqdev nil t). If you +;; startup. To ignore those errors use (require 'coqdev nil t). If you ;; check out a malicious commit Emacs startup would allow it to run ;; arbitrary code, to avoid this you can copy coqdev.el to any ;; location and adjust the load path accordingly (of course if you run @@ -33,7 +33,7 @@ (defun coqdev-default-directory () "Return the Coq repository containing `default-directory'." - (let ((dir (locate-dominating-file default-directory "META.coq"))) + (let ((dir (locate-dominating-file default-directory "META.coq.in"))) (when dir (expand-file-name dir)))) (defun coqdev-setup-compile-command () @@ -103,5 +103,48 @@ Note that this function is executed before _Coqproject is read if it exists." 2 (3 . 4) (5 . 6))) (add-to-list 'compilation-error-regexp-alist 'coq-backtrace)) +(defvar bug-reference-bug-regexp) +(defvar bug-reference-url-format) +(defun coqdev-setup-bug-reference-mode () + "Setup `bug-reference-bug-regexp' and `bug-reference-url-format' for Coq. + +This does not enable `bug-reference-mode'." + (let ((dir (coqdev-default-directory))) + (when dir + (setq-local bug-reference-bug-regexp "#\\(?2:[0-9]+\\)") + (setq-local bug-reference-url-format "https://github.com/coq/coq/issues/%s")))) +(add-hook 'hack-local-variables-hook #'coqdev-setup-bug-reference-mode) + +(defun coqdev-sphinx-quote-coq-refman-region (left right &optional offset beg end) + "Add LEFT and RIGHT around the BEG..END. +Leave the point after RIGHT. BEG and END default to the bounds +of the current region. Leave point OFFSET characters after the +left quote (if OFFSET is nil, leave the point after the right +quote)." + (unless beg + (if (region-active-p) + (setq beg (region-beginning) end (region-end)) + (setq beg (point) end nil))) + (save-excursion + (goto-char (or end beg)) + (insert right)) + (save-excursion + (goto-char beg) + (insert left)) + (if (and end (not offset)) ;; Second test handles the ::`` case + (goto-char (+ end (length left) (length right))) + (goto-char (+ beg (or offset (length left)))))) + +(defun coqdev-sphinx-rst-coq-action () + "Insert a Sphinx role template or quote the current region." + (interactive) + (pcase (read-char "Command [gntm:`]?") + (?g (coqdev-sphinx-quote-coq-refman-region ":g:`" "`")) + (?n (coqdev-sphinx-quote-coq-refman-region ":n:`" "`")) + (?t (coqdev-sphinx-quote-coq-refman-region ":token:`" "`")) + (?m (coqdev-sphinx-quote-coq-refman-region ":math:`" "`")) + (?: (coqdev-sphinx-quote-coq-refman-region "::`" "`" 1)) + (?` (coqdev-sphinx-quote-coq-refman-region "``" "``")))) + (provide 'coqdev) ;;; coqdev ends here diff --git a/dev/tools/merge-pr.sh b/dev/tools/merge-pr.sh index 9f24960f..320ef6ed 100755 --- a/dev/tools/merge-pr.sh +++ b/dev/tools/merge-pr.sh @@ -1,50 +1,210 @@ #!/usr/bin/env bash set -e +set -o pipefail -# This script depends (at least) on git and jq. -# It should be used like this: dev/tools/merge-pr.sh /PR number/ - -#TODO: check arguments and show usage if relevant - -PR=$1 +API=https://api.github.com/repos/coq/coq +OFFICIAL_REMOTE_GIT_URL="git@github.com:coq/coq" +OFFICIAL_REMOTE_HTTPS_URL="github.com/coq/coq" -CURRENT_LOCAL_BRANCH=$(git rev-parse --abbrev-ref HEAD) -REMOTE=$(git config --get "branch.$CURRENT_LOCAL_BRANCH.remote") -git fetch "$REMOTE" "refs/pull/$PR/head" +# This script depends (at least) on git (>= 2.7) and jq. +# It should be used like this: dev/tools/merge-pr.sh /PR number/ -API=https://api.github.com/repos/coq/coq +# Set SLOW_CONF to have the confirmation output wait for a newline +# E.g. call $ SLOW_CONF= dev/tools/merge-pr.sh /PR number/ +if [ -z ${SLOW_CONF+x} ]; then + QUICK_CONF="-n 1" +else + QUICK_CONF="" +fi -BASE_BRANCH=$(curl -s "$API/pulls/$PR" | jq -r '.base.label') +RED="\033[31m" +RESET="\033[0m" +GREEN="\033[32m" +BLUE="\033[34m" +YELLOW="\033[33m" +info() { + echo -e "${GREEN}info:${RESET} $1 ${RESET}" +} +error() { + echo -e "${RED}error:${RESET} $1 ${RESET}" +} +warning() { + echo -e "${YELLOW}warning:${RESET} $1 ${RESET}" +} -COMMIT=$(git rev-parse FETCH_HEAD) -STATUS=$(curl -s "$API/commits/$COMMIT/status" | jq -r '.state') +check_util() { +if ! command -v "$1" > /dev/null 2>&1; then + error "this script requires the $1 command line utility" + exit 1 +fi +} -if [ "$BASE_BRANCH" != "coq:$CURRENT_LOCAL_BRANCH" ]; then - echo "Wrong base branch" - read -p "Bypass? [y/N] " -n 1 -r +ask_confirmation() { + read -p "Continue anyway? [y/N] " $QUICK_CONF -r echo if [[ ! $REPLY =~ ^[Yy]$ ]] then exit 1 fi +} + +check_util jq +check_util curl +check_util git +check_util gpg + +# command line parsing + +if [ $# != 1 ]; then + error "usage: $0 PR-number" + exit 1 +fi + +if [[ "$1" =~ ^[1-9][0-9]*$ ]]; then + PR=$1 +else + error "$1 is not a number" + exit 1 +fi + +# Fetching PR metadata + +PRDATA=$(curl -s "$API/pulls/$PR") + +TITLE=$(echo "$PRDATA" | jq -r '.title') +info "title for PR $PR is ${BLUE}$TITLE" + +BASE_BRANCH=$(echo "$PRDATA" | jq -r '.base.label') +info "PR $PR targets branch ${BLUE}$BASE_BRANCH" + +CURRENT_LOCAL_BRANCH=$(git rev-parse --abbrev-ref HEAD) +info "you are merging in ${BLUE}$CURRENT_LOCAL_BRANCH" + +REMOTE=$(git config --get "branch.$CURRENT_LOCAL_BRANCH.remote") +if [ -z "$REMOTE" ]; then + error "branch ${BLUE}$CURRENT_LOCAL_BRANCH${RESET} has not associated remote" + error "don't know where to fetch the PR from" + error "please run: git branch --set-upstream-to=THE_REMOTE/$CURRENT_LOCAL_BRANCH" + exit 1 +fi +REMOTE_URL=$(git remote get-url "$REMOTE" --all) +if [ "$REMOTE_URL" != "${OFFICIAL_REMOTE_GIT_URL}" ] && \ + [ "$REMOTE_URL" != "${OFFICIAL_REMOTE_GIT_URL}.git" ] && \ + [ "$REMOTE_URL" != "https://${OFFICIAL_REMOTE_HTTPS_URL}" ] && \ + [ "$REMOTE_URL" != "https://${OFFICIAL_REMOTE_HTTPS_URL}.git" ] && \ + [[ "$REMOTE_URL" != "https://"*"@${OFFICIAL_REMOTE_HTTPS_URL}" ]] && \ + [[ "$REMOTE_URL" != "https://"*"@${OFFICIAL_REMOTE_HTTPS_URL}.git" ]] ; then + error "remote ${BLUE}$REMOTE${RESET} does not point to the official Coq repo" + error "that is ${BLUE}$OFFICIAL_REMOTE_GIT_URL" + error "it points to ${BLUE}$REMOTE_URL${RESET} instead" + ask_confirmation +fi +info "remote for $CURRENT_LOCAL_BRANCH is ${BLUE}$REMOTE" + +info "fetching from $REMOTE the PR" +git remote update "$REMOTE" +if ! git ls-remote "$REMOTE" | grep pull >/dev/null; then + error "remote $REMOTE is not configured to fetch pull requests" + error "run: git config remote.$REMOTE.fetch +refs/pull/*/head:refs/remotes/$REMOTE/pr/*" + exit 1 +fi +git fetch "$REMOTE" "refs/pull/$PR/head" +COMMIT=$(git rev-parse FETCH_HEAD) +info "commit for PR $PR is ${BLUE}$COMMIT" + +# Sanity check: merge to a different branch + +if [ "$BASE_BRANCH" != "coq:$CURRENT_LOCAL_BRANCH" ]; then + error "PR requests merge in ${BLUE}$BASE_BRANCH${RESET} but you are merging in ${BLUE}$CURRENT_LOCAL_BRANCH" + ask_confirmation fi; -if [ "$STATUS" != "success" ]; then - echo "CI status is \"$STATUS\"" - read -p "Bypass? [y/N] " -n 1 -r - echo - if [[ ! $REPLY =~ ^[Yy]$ ]] - then - exit 1 - fi +# Sanity check: the local branch is up-to-date with upstream + +LOCAL_BRANCH_COMMIT=$(git rev-parse HEAD) +UPSTREAM_COMMIT=$(git rev-parse @{u}) +if [ "$LOCAL_BRANCH_COMMIT" != "$UPSTREAM_COMMIT" ]; then + + # Is it just that the upstream branch is behind? + # It could just be that we merged other PRs and we didn't push yet + + if git merge-base --is-ancestor -- "$UPSTREAM_COMMIT" "$LOCAL_BRANCH_COMMIT"; then + warning "Your branch is ahead of ${REMOTE}." + warning "You should see this warning only if you've just merged another PR and did not push yet." + ask_confirmation + else + error "Local branch is not up-to-date with ${REMOTE}." + error "Pull before merging." + ask_confirmation + fi +fi + +# Sanity check: PR has an outdated version of CI + +BASE_COMMIT=$(echo "$PRDATA" | jq -r '.base.sha') +CI_FILES=(".travis.yml" ".gitlab-ci.yml" "appveyor.yml") + +if ! git diff --quiet "$BASE_COMMIT" "$LOCAL_BRANCH_COMMIT" -- "${CI_FILES[@]}" +then + warning "This PR didn't run with the latest version of CI." + warning "It is probably a good idea to ask for a rebase." + read -p "Do you want to see the diff? [Y/n] " $QUICK_CONF -r + echo + if [[ ! $REPLY =~ ^[Nn]$ ]] + then + git diff "$BASE_COMMIT" "$LOCAL_BRANCH_COMMIT" -- "${CI_FILES[@]}" + fi + ask_confirmation +fi + +# Sanity check: CI failed + +STATUS=$(curl -s "$API/commits/$COMMIT/status") + +if [ "$(echo "$STATUS" | jq -r '.state')" != "success" ]; then + error "CI unsuccessful on ${BLUE}$(echo "$STATUS" | + jq -r -c '.statuses|map(select(.state != "success"))|map(.context)')" + ask_confirmation fi; -git merge -S --no-ff FETCH_HEAD -m "Merge PR #$PR: $(curl -s "$API/pulls/$PR" | jq -r '.title')" -e +# Sanity check: has labels named "needs:" + +NEEDS_LABELS=$(echo "$PRDATA" | jq -rc '.labels | map(select(.name | match("needs:"))) | map(.name)') +if [ "$NEEDS_LABELS" != "[]" ]; then + error "needs:something labels still present: ${BLUE}$NEEDS_LABELS" + ask_confirmation +fi + +# Sanity check: has milestone + +MILESTONE=$(echo "$PRDATA" | jq -rc '.milestone.title') +if [ "$MILESTONE" = "null" ]; then + error "no milestone set, please set one" + ask_confirmation +fi + +# Sanity check: has kind + +KIND=$(echo "$PRDATA" | jq -rc '.labels | map(select(.name | match("kind:"))) | map(.name)') +if [ "$KIND" = "[]" ]; then + error "no kind:something label set, please set one" + ask_confirmation +fi + +# Sanity check: user.signingkey +if [ -z "$(git config user.signingkey)" ]; then + warning "git config user.signingkey is empty" + warning "gpg will guess a key out of your git config user.* data" +fi + +info "merging" +git merge -v -S --no-ff FETCH_HEAD -m "Merge PR #$PR: $TITLE" -e # TODO: improve this check -if ! git diff --quiet "$REMOTE/$CURRENT_LOCAL_BRANCH" -- dev/ci; then - echo "******************************************" - echo "** WARNING: does this PR have overlays? **" - echo "******************************************" +if ! git diff --quiet "$REMOTE/$CURRENT_LOCAL_BRANCH" -- dev/ci/user-overlays; then + warning "this PR may have overlays (sorry the check is not perfect)" + warning "if it has overlays please check the following:" + warning "- each overlay has a corresponding open PR on the upstream repo" + warning "- after merging please notify the upstream they can merge the PR" fi diff --git a/dev/tools/pre-commit b/dev/tools/pre-commit index dfa95bac..ad2f2f93 100755 --- a/dev/tools/pre-commit +++ b/dev/tools/pre-commit @@ -5,6 +5,8 @@ set -e +dev/tools/check-overlays.sh + if ! git diff --cached --name-only -z | xargs -0 dev/tools/check-eof-newline.sh || ! git diff-index --check --cached HEAD >/dev/null 2>&1 ; then diff --git a/dev/tools/update-compat.py b/dev/tools/update-compat.py new file mode 100755 index 00000000..7c8b9f02 --- /dev/null +++ b/dev/tools/update-compat.py @@ -0,0 +1,341 @@ +#!/usr/bin/env python +from __future__ import with_statement +import os, re, sys + +# Obtain the absolute path of the script being run. By assuming that +# the script lives in dev/tools/, and basing all calls on the path of +# the script, rather than the current working directory, we can be +# robust to users who choose to run the script from any location. +SCRIPT_PATH = os.path.dirname(os.path.realpath(__file__)) +ROOT_PATH = os.path.realpath(os.path.join(SCRIPT_PATH, '..', '..')) +CONFIGURE_PATH = os.path.join(ROOT_PATH, 'configure.ml') +HEADER_PATH = os.path.join(ROOT_PATH, 'dev', 'header.ml') +DEFAULT_NUMBER_OF_OLD_VERSIONS = 2 +EXTRA_HEADER = '\n(** Compatibility file for making Coq act similar to Coq v%s *)\n' +FLAGS_MLI_PATH = os.path.join(ROOT_PATH, 'lib', 'flags.mli') +FLAGS_ML_PATH = os.path.join(ROOT_PATH, 'lib', 'flags.ml') +COQARGS_ML_PATH = os.path.join(ROOT_PATH, 'toplevel', 'coqargs.ml') +G_VERNAC_PATH = os.path.join(ROOT_PATH, 'vernac', 'g_vernac.mlg') +DOC_INDEX_PATH = os.path.join(ROOT_PATH, 'doc', 'stdlib', 'index-list.html.template') +BUG_4798_PATH = os.path.join(ROOT_PATH, 'test-suite', 'bugs', 'closed', '4798.v') +TEST_SUITE_PATHS = tuple(os.path.join(ROOT_PATH, 'test-suite', 'success', i) + for i in ('CompatOldOldFlag.v', 'CompatOldFlag.v', 'CompatPreviousFlag.v', 'CompatCurrentFlag.v')) +TEST_SUITE_DESCRIPTIONS = ('current-minus-three', 'current-minus-two', 'current-minus-one', 'current') +# sanity check that we are where we think we are +assert(os.path.normpath(os.path.realpath(SCRIPT_PATH)) == os.path.normpath(os.path.realpath(os.path.join(ROOT_PATH, 'dev', 'tools')))) +assert(os.path.exists(CONFIGURE_PATH)) + +def get_header(): + with open(HEADER_PATH, 'r') as f: return f.read() + +HEADER = get_header() + +def get_version(cur_version=None): + if cur_version is not None: return cur_version + with open(CONFIGURE_PATH, 'r') as f: + for line in f.readlines(): + found = re.findall(r'let coq_version = "([0-9]+\.[0-9]+)', line) + if len(found) > 0: + return found[0] + raise Exception("No line 'let coq_version = \"X.X' found in %s" % os.path.relpath(CONFIGURE_PATH, ROOT_PATH)) + +def compat_name_to_version_name(compat_file_name): + assert(compat_file_name.startswith('Coq') and compat_file_name.endswith('.v')) + v = compat_file_name[len('Coq'):][:-len('.v')] + assert(len(v) == 2 or (len(v) >= 2 and v[0] in ('8', '9'))) # we'll have to change this scheme when we hit Coq 10.* + return '%s.%s' % (v[0], v[1:]) + +def version_name_to_compat_name(v, ext='.v'): + return 'Coq%s%s%s' % tuple(v.split('.') + [ext]) + +# returns (lines of compat files, lines of not compat files +def get_doc_index_lines(): + with open(DOC_INDEX_PATH, 'r') as f: + lines = f.readlines() + return (tuple(line for line in lines if 'theories/Compat/Coq' in line), + tuple(line for line in lines if 'theories/Compat/Coq' not in line)) + +COMPAT_INDEX_LINES, DOC_INDEX_LINES = get_doc_index_lines() + +def version_to_int_pair(v): + return tuple(map(int, v.split('.'))) + +def get_known_versions(): + # We could either get the files from the doc index, or from the + # directory list. We assume that the doc index is more + # representative. If we wanted to use the directory list, we + # would do: + # compat_files = os.listdir(os.path.join(ROOT_PATH, 'theories', 'Compat')) + compat_files = re.findall(r'Coq[^\.]+\.v', '\n'.join(COMPAT_INDEX_LINES)) + return tuple(sorted((compat_name_to_version_name(i) for i in compat_files if i.startswith('Coq') and i.endswith('.v')), key=version_to_int_pair)) + +def get_new_versions(known_versions, **args): + if args['cur_version'] in known_versions: + assert(known_versions[-1] == args['cur_version']) + assert(len(known_versions) == args['number_of_compat_versions']) + return known_versions + assert(len(known_versions) >= args['number_of_old_versions']) + return tuple(list(known_versions[-args['number_of_old_versions']:]) + [args['cur_version']]) + +def update_compat_files(old_versions, new_versions, assert_unchanged=False, **args): + for v in old_versions: + if v not in new_versions: + compat_file = os.path.join('theories', 'Compat', version_name_to_compat_name(v)) + if not assert_unchanged: + print('Removing %s...' % compat_file) + compat_path = os.path.join(ROOT_PATH, compat_file) + os.rename(compat_path, compat_path + '.bak') + else: + raise Exception('%s exists!' % compat_file) + for v, next_v in zip(new_versions, list(new_versions[1:]) + [None]): + compat_file = os.path.join('theories', 'Compat', version_name_to_compat_name(v)) + compat_path = os.path.join(ROOT_PATH, compat_file) + if not os.path.exists(compat_path): + print('Creating %s...' % compat_file) + contents = HEADER + (EXTRA_HEADER % v) + if next_v is not None: + contents += '\nRequire Export Coq.Compat.%s.\n' % version_name_to_compat_name(next_v, ext='') + if not assert_unchanged: + with open(compat_path, 'w') as f: + f.write(contents) + print(r"Don't forget to 'git add %s'!" % compat_file) + else: + raise Exception('%s does not exist!' % compat_file) + else: + # print('Checking %s...' % compat_file) + with open(compat_path, 'r') as f: + contents = f.read() + header = HEADER + (EXTRA_HEADER % v) + if not contents.startswith(HEADER): + raise Exception("Invalid header in %s; does not match %s" % (compat_file, os.path.relpath(HEADER_PATH, ROOT_PATH))) + if not contents.startswith(header): + raise Exception("Invalid header in %s; missing line %s" % (compat_file, EXTRA_HEADER.strip('\n') % v)) + if next_v is not None: + line = 'Require Export Coq.Compat.%s.' % version_name_to_compat_name(next_v, ext='') + if ('\n%s\n' % line) not in contents: + if not contents.startswith(header + '\n'): + contents = contents.replace(header, header + '\n') + contents = contents.replace(header, '%s\n%s' % (header, line)) + if not assert_unchanged: + print('Updating %s...' % compat_file) + with open(compat_path, 'w') as f: + f.write(contents) + else: + raise Exception('Compat file %s is missing line %s' % (compat_file, line)) + +def update_compat_versions_type_line(new_versions, contents, relpath): + compat_version_string = ' | '.join(['V%s_%s' % tuple(v.split('.')) for v in new_versions[:-1]] + ['Current']) + new_compat_line = 'type compat_version = %s' % compat_version_string + new_contents = re.sub(r'^type compat_version = .*$', new_compat_line, contents, flags=re.MULTILINE) + if new_compat_line not in new_contents: + raise Exception("Could not find 'type compat_version =' in %s" % relpath) + return new_contents + +def update_version_compare(new_versions, contents, relpath): + first_line = 'let version_compare v1 v2 = match v1, v2 with' + new_lines = [first_line] + for v in new_versions[:-1]: + V = 'V%s_%s' % tuple(v.split('.')) + new_lines.append(' | %s, %s -> 0' % (V, V)) + new_lines.append(' | %s, _ -> -1' % V) + new_lines.append(' | _, %s -> 1' % V) + new_lines.append(' | Current, Current -> 0') + new_lines = '\n'.join(new_lines) + new_contents = re.sub(first_line + '.*' + 'Current, Current -> 0', new_lines, contents, flags=re.DOTALL|re.MULTILINE) + if new_lines not in new_contents: + raise Exception('Could not find version_compare in %s' % relpath) + return new_contents + +def update_pr_version(new_versions, contents, relpath): + first_line = 'let pr_version = function' + new_lines = [first_line] + for v in new_versions[:-1]: + V = 'V%s_%s' % tuple(v.split('.')) + new_lines.append(' | %s -> "%s"' % (V, v)) + new_lines.append(' | Current -> "current"') + new_lines = '\n'.join(new_lines) + new_contents = re.sub(first_line + '.*' + 'Current -> "current"', new_lines, contents, flags=re.DOTALL|re.MULTILINE) + if new_lines not in new_contents: + raise Exception('Could not find pr_version in %s' % relpath) + return new_contents + +def update_add_compat_require(new_versions, contents, relpath): + first_line = 'let add_compat_require opts v =' + new_lines = [first_line, ' match v with'] + for v in new_versions[:-1]: + V = 'V%s_%s' % tuple(v.split('.')) + new_lines.append(' | Flags.%s -> add_vo_require opts "Coq.Compat.%s" None (Some false)' % (V, version_name_to_compat_name(v, ext=''))) + new_lines.append(' | Flags.Current -> add_vo_require opts "Coq.Compat.%s" None (Some false)' % version_name_to_compat_name(new_versions[-1], ext='')) + new_lines = '\n'.join(new_lines) + new_contents = re.sub(first_line + '.*' + 'Flags.Current -> add_vo_require opts "Coq.Compat.[^"]+" None .Some false.', new_lines, contents, flags=re.DOTALL|re.MULTILINE) + if new_lines not in new_contents: + raise Exception('Could not find add_compat_require in %s' % relpath) + return new_contents + +def update_parse_compat_version(new_versions, contents, relpath, **args): + line_count = args['number_of_compat_versions']+2 # 1 for the first line, 1 for the invalid flags + first_line = 'let parse_compat_version = let open Flags in function' + old_function_lines = contents[contents.index(first_line):].split('\n')[:line_count] + if re.match(r'^ \| \([0-9 "\.\|]*\) as s ->$', old_function_lines[-1]) is None: + raise Exception('Could not recognize line %d of parse_compat_version in %s as a list of invalid versions' % (line_count, relpath)) + all_versions = re.findall(r'"([0-9\.]+)"', ''.join(old_function_lines)) + invalid_versions = tuple(i for i in all_versions if i not in new_versions) + new_function_lines = [first_line] + for v, V in reversed(list(zip(new_versions, ['V%s_%s' % tuple(v.split('.')) for v in new_versions[:-1]] + ['Current']))): + new_function_lines.append(' | "%s" -> %s' % (v, V)) + new_function_lines.append(' | (%s) as s ->' % ' | '.join('"%s"' % v for v in invalid_versions)) + new_lines = '\n'.join(new_function_lines) + new_contents = contents.replace('\n'.join(old_function_lines), new_lines) + if new_lines not in new_contents: + raise Exception('Could not find parse_compat_version in %s' % relpath) + return new_contents + +def check_no_old_versions(old_versions, new_versions, contents, relpath): + for v in old_versions: + if v not in new_versions: + V = 'V%s_%s' % tuple(v.split('.')) + if V in contents: + raise Exception('Unreplaced usage of %s remaining in %s' % (V, relpath)) + +def update_if_changed(contents, new_contents, path, assert_unchanged=False, **args): + if contents != new_contents: + if not assert_unchanged: + print('Updating %s...' % os.path.relpath(path, ROOT_PATH)) + with open(path, 'w') as f: + f.write(new_contents) + else: + raise Exception('%s changed!' % os.path.relpath(path, ROOT_PATH)) + +def update_flags_mli(old_versions, new_versions, **args): + with open(FLAGS_MLI_PATH, 'r') as f: contents = f.read() + new_contents = update_compat_versions_type_line(new_versions, contents, os.path.relpath(FLAGS_MLI_PATH, ROOT_PATH)) + check_no_old_versions(old_versions, new_versions, new_contents, os.path.relpath(FLAGS_MLI_PATH, ROOT_PATH)) + update_if_changed(contents, new_contents, FLAGS_MLI_PATH, **args) + +def update_flags_ml(old_versions, new_versions, **args): + with open(FLAGS_ML_PATH, 'r') as f: contents = f.read() + new_contents = update_compat_versions_type_line(new_versions, contents, os.path.relpath(FLAGS_ML_PATH, ROOT_PATH)) + new_contents = update_version_compare(new_versions, new_contents, os.path.relpath(FLAGS_ML_PATH, ROOT_PATH)) + new_contents = update_pr_version(new_versions, new_contents, os.path.relpath(FLAGS_ML_PATH, ROOT_PATH)) + check_no_old_versions(old_versions, new_versions, new_contents, os.path.relpath(FLAGS_ML_PATH, ROOT_PATH)) + update_if_changed(contents, new_contents, FLAGS_ML_PATH, **args) + +def update_coqargs_ml(old_versions, new_versions, **args): + with open(COQARGS_ML_PATH, 'r') as f: contents = f.read() + new_contents = update_add_compat_require(new_versions, contents, os.path.relpath(COQARGS_ML_PATH, ROOT_PATH)) + check_no_old_versions(old_versions, new_versions, new_contents, os.path.relpath(COQARGS_ML_PATH, ROOT_PATH)) + update_if_changed(contents, new_contents, COQARGS_ML_PATH, **args) + +def update_g_vernac(old_versions, new_versions, **args): + with open(G_VERNAC_PATH, 'r') as f: contents = f.read() + new_contents = update_parse_compat_version(new_versions, contents, os.path.relpath(G_VERNAC_PATH, ROOT_PATH), **args) + check_no_old_versions(old_versions, new_versions, new_contents, os.path.relpath(G_VERNAC_PATH, ROOT_PATH)) + update_if_changed(contents, new_contents, G_VERNAC_PATH, **args) + +def update_flags(old_versions, new_versions, **args): + update_flags_mli(old_versions, new_versions, **args) + update_flags_ml(old_versions, new_versions, **args) + update_coqargs_ml(old_versions, new_versions, **args) + update_g_vernac(old_versions, new_versions, **args) + +def update_test_suite(new_versions, assert_unchanged=False, test_suite_paths=TEST_SUITE_PATHS, test_suite_descriptions=TEST_SUITE_DESCRIPTIONS, **args): + assert(len(new_versions) == len(test_suite_paths)) + assert(len(new_versions) == len(test_suite_descriptions)) + for i, (v, path, descr) in enumerate(zip(new_versions, test_suite_paths, test_suite_descriptions)): + if not os.path.exists(path): + raise Exception('Could not find existing file %s' % os.path.relpath(path, ROOT_PATH)) + if '%s' in descr: descr = descr % v + with open(path, 'r') as f: contents = f.read() + lines = ['(* -*- coq-prog-args: ("-compat" "%s") -*- *)' % v, + '(** Check that the %s compatibility flag actually requires the relevant modules. *)' % descr] + for imp_v in reversed(new_versions[i:]): + lines.append('Import Coq.Compat.%s.' % version_name_to_compat_name(imp_v, ext='')) + lines.append('') + new_contents = '\n'.join(lines) + update_if_changed(contents, new_contents, path, **args) + +def update_doc_index(new_versions, **args): + with open(DOC_INDEX_PATH, 'r') as f: contents = f.read() + firstline = ' theories/Compat/AdmitAxiom.v' + new_contents = ''.join(DOC_INDEX_LINES) + if firstline not in new_contents: + raise Exception("Could not find line '%s' in %s" % (firstline, os.path.relpath(DOC_INDEX_PATH, ROOT_PATH))) + extra_lines = [' theories/Compat/%s' % version_name_to_compat_name(v) for v in new_versions] + new_contents = new_contents.replace(firstline, '\n'.join([firstline] + extra_lines)) + update_if_changed(contents, new_contents, DOC_INDEX_PATH, **args) + +def update_bug_4789(new_versions, **args): + # we always update this compat notation to oldest + # currently-supported compat version, which should never be the + # current version + with open(BUG_4798_PATH, 'r') as f: contents = f.read() + new_contents = r"""Check match 2 with 0 => 0 | S n => n end. +Notation "|" := 1 (compat "%s"). +Check match 2 with 0 => 0 | S n => n end. (* fails *) +""" % new_versions[0] + update_if_changed(contents, new_contents, BUG_4798_PATH, **args) + +def update_compat_notations_in(old_versions, new_versions, contents): + for v in old_versions: + if v not in new_versions: + reg = re.compile(r'^[ \t]*(?:Notation|Infix)[^\n]*?compat "%s"[^\n]*?\n' % v, flags=re.MULTILINE) + contents = re.sub(r'^[ \t]*(?:Notation|Infix)[^\n]*?compat "%s"[^\n]*?\n' % v, '', contents, flags=re.MULTILINE) + return contents + +def update_compat_notations(old_versions, new_versions, **args): + for root, dirs, files in os.walk(os.path.join(ROOT_PATH, 'theories')): + for fname in files: + if not fname.endswith('.v'): continue + with open(os.path.join(root, fname), 'r') as f: contents = f.read() + new_contents = update_compat_notations_in(old_versions, new_versions, contents) + update_if_changed(contents, new_contents, os.path.join(root, fname), **args) + +def display_git_grep(old_versions, new_versions): + Vs = ['V%s_%s' % tuple(v.split('.')) for v in old_versions if v not in new_versions] + compat_vs = ['compat "%s"' % v for v in old_versions if v not in new_versions] + all_options = tuple(Vs + compat_vs) + options = (['"-compat" "%s"' % v for v in old_versions if v not in new_versions] + + [version_name_to_compat_name(v, ext='') for v in old_versions if v not in new_versions]) + if len(options) > 0 or len(all_options) > 0: + print('To discover what files require manual updating, run:') + if len(options) > 0: print("git grep -- '%s' test-suite/" % r'\|'.join(options)) + if len(all_options) > 0: print("git grep -- '%s'" % r'\|'.join(all_options)) + +def parse_args(argv): + args = { + 'assert_unchanged': False, + 'cur_version': None, + 'number_of_old_versions': DEFAULT_NUMBER_OF_OLD_VERSIONS + } + for arg in argv[1:]: + if arg == '--assert-unchanged': + args['assert_unchanged'] = True + elif arg.startswith('--cur-version='): + args['cur_version'] = arg[len('--cur-version='):] + assert(len(args['cur_version'].split('.')) == 2) + assert(all(map(str.isdigit, args['cur_version'].split('.')))) + elif arg.startswith('--number-of-old-versions='): + args['number_of_old_versions'] = int(arg[len('--number-of-old-versions='):]) + else: + print('USAGE: %s [--assert-unchanged] [--cur-version=NN.NN] [--number-of-old-versions=NN]' % argv[0]) + print('') + print('ERROR: Unrecognized argument: %s' % arg) + sys.exit(1) + return args + +if __name__ == '__main__': + args = parse_args(sys.argv) + args['cur_version'] = get_version(args['cur_version']) + args['number_of_compat_versions'] = args['number_of_old_versions'] + 1 + known_versions = get_known_versions() + new_versions = get_new_versions(known_versions, **args) + assert(len(TEST_SUITE_PATHS) >= args['number_of_compat_versions']) + args['test_suite_paths'] = tuple(TEST_SUITE_PATHS[-args['number_of_compat_versions']:]) + args['test_suite_descriptions'] = tuple(TEST_SUITE_DESCRIPTIONS[-args['number_of_compat_versions']:]) + update_compat_files(known_versions, new_versions, **args) + update_flags(known_versions, new_versions, **args) + update_test_suite(new_versions, **args) + update_doc_index(new_versions, **args) + update_bug_4789(new_versions, **args) + update_compat_notations(known_versions, new_versions, **args) + display_git_grep(known_versions, new_versions) |