aboutsummaryrefslogtreecommitdiff
path: root/scripts
diff options
context:
space:
mode:
Diffstat (limited to 'scripts')
-rw-r--r--scripts/coccinelle/ctrl-reply-cleanup.cocci43
-rw-r--r--scripts/coccinelle/ctrl-reply.cocci87
-rw-r--r--scripts/coccinelle/tor-coccinelle.h3
-rwxr-xr-xscripts/git/git-merge-forward.sh236
-rwxr-xr-xscripts/git/git-pull-all.sh224
-rwxr-xr-xscripts/git/git-push-all.sh11
-rwxr-xr-xscripts/git/post-merge.git-hook45
-rwxr-xr-xscripts/git/pre-commit.git-hook45
-rwxr-xr-xscripts/git/pre-push.git-hook108
-rwxr-xr-xscripts/maint/add_c_file.py251
-rwxr-xr-xscripts/maint/checkIncludes.py2
-rwxr-xr-xscripts/maint/checkSpace.pl32
-rw-r--r--scripts/maint/practracker/exceptions.txt289
-rw-r--r--scripts/maint/practracker/metrics.py50
-rwxr-xr-xscripts/maint/practracker/practracker.py216
-rwxr-xr-xscripts/maint/practracker/practracker_tests.py50
-rw-r--r--scripts/maint/practracker/problem.py158
-rw-r--r--scripts/maint/practracker/util.py28
-rwxr-xr-xscripts/maint/pre-commit.git-hook26
-rwxr-xr-xscripts/maint/pre-push.git-hook61
-rwxr-xr-xscripts/maint/rectify_include_paths.py15
-rwxr-xr-xscripts/maint/updateCopyright.pl6
-rwxr-xr-xscripts/test/cov-diff3
-rwxr-xr-xscripts/test/cov-test-determinism.sh51
24 files changed, 1945 insertions, 95 deletions
diff --git a/scripts/coccinelle/ctrl-reply-cleanup.cocci b/scripts/coccinelle/ctrl-reply-cleanup.cocci
new file mode 100644
index 0000000000..f085cd4684
--- /dev/null
+++ b/scripts/coccinelle/ctrl-reply-cleanup.cocci
@@ -0,0 +1,43 @@
+// Script to clean up after ctrl-reply.cocci -- run as a separate step
+// because cleanup_write2 (even when disabled) somehow prevents the
+// match rule in ctrl-reply.cocci from matching.
+
+// If it doesn't have to be a printf, turn it into a write
+
+@ cleanup_write @
+expression E;
+constant code, s;
+@@
+-control_printf_endreply(E, code, s)
++control_write_endreply(E, code, s)
+
+// Use send_control_done() instead of explicitly writing it out
+@ cleanup_send_done @
+type T;
+identifier f != send_control_done;
+expression E;
+@@
+ T f(...) {
+<...
+-control_write_endreply(E, 250, "OK")
++send_control_done(E)
+ ...>
+ }
+
+// Clean up more printfs that could be writes
+//
+// For some reason, including this rule, even disabled, causes the
+// match rule in ctrl-reply.cocci to fail to match some code that has
+// %s in its format strings
+
+@ cleanup_write2 @
+expression E1, E2;
+constant code;
+@@
+(
+-control_printf_endreply(E1, code, "%s", E2)
++control_write_endreply(E1, code, E2)
+|
+-control_printf_midreply(E1, code, "%s", E2)
++control_write_midreply(E1, code, E2)
+)
diff --git a/scripts/coccinelle/ctrl-reply.cocci b/scripts/coccinelle/ctrl-reply.cocci
new file mode 100644
index 0000000000..d6e9aeedd7
--- /dev/null
+++ b/scripts/coccinelle/ctrl-reply.cocci
@@ -0,0 +1,87 @@
+// Script to edit control_*.c for refactored control reply output functions
+
+@ initialize:python @
+@@
+import re
+from coccilib.report import *
+
+# reply strings "NNN-foo", "NNN+foo", "NNN foo", etc.
+r = re.compile(r'^"(\d+)([ +-])(.*)\\r\\n"$')
+
+# Generate name of function to call based on which separator character
+# comes between the numeric code and the text
+def idname(sep, base):
+ if sep == '+':
+ return base + "datareply"
+ elif sep == '-':
+ return base + "midreply"
+ else:
+ return base + "endreply"
+
+# Generate the actual replacements used by the rules
+def gen(s, base, p):
+ pos = p[0]
+ print_report(pos, "%s %s" % (base, s))
+ m = r.match(s)
+ if m is None:
+ # String not correct format, so fail match
+ cocci.include_match(False)
+ print_report(pos, "BAD STRING %s" % s)
+ return
+
+ code, sep, s1 = m.groups()
+
+ if r'\r\n' in s1:
+ # Extra CRLF in string, so fail match
+ cocci.include_match(False)
+ print_report(pos, "extra CRLF in string %s" % s)
+ return
+
+ coccinelle.code = code
+ # Need a string that is a single C token, because Coccinelle only allows
+ # "identifiers" to be output from Python scripts?
+ coccinelle.body = '"%s"' % s1
+ coccinelle.id = idname(sep, base)
+ return
+
+@ match @
+identifier f;
+position p;
+expression E;
+constant s;
+@@
+(
+ connection_printf_to_buf@f@p(E, s, ...)
+|
+ connection_write_str_to_buf@f@p(s, E)
+)
+
+@ script:python sc1 @
+s << match.s;
+p << match.p;
+f << match.f;
+id;
+body;
+code;
+@@
+if f == 'connection_printf_to_buf':
+ gen(s, 'control_printf_', p)
+elif f == 'connection_write_str_to_buf':
+ gen(s, 'control_write_', p)
+else:
+ raise(ValueError("%s: %s" % (f, s)))
+
+@ replace @
+constant match.s;
+expression match.E;
+identifier match.f;
+identifier sc1.body, sc1.id, sc1.code;
+@@
+(
+-connection_write_str_to_buf@f(s, E)
++id(E, code, body)
+|
+-connection_printf_to_buf@f(E, s
++id(E, code, body
+ , ...)
+)
diff --git a/scripts/coccinelle/tor-coccinelle.h b/scripts/coccinelle/tor-coccinelle.h
new file mode 100644
index 0000000000..8f625dcee4
--- /dev/null
+++ b/scripts/coccinelle/tor-coccinelle.h
@@ -0,0 +1,3 @@
+#define MOCK_IMPL(a, b, c) a b c
+#define CHECK_PRINTF(a, b)
+#define STATIC static
diff --git a/scripts/git/git-merge-forward.sh b/scripts/git/git-merge-forward.sh
new file mode 100755
index 0000000000..67af7e98bf
--- /dev/null
+++ b/scripts/git/git-merge-forward.sh
@@ -0,0 +1,236 @@
+#!/bin/bash
+
+##############################
+# Configuration (change me!) #
+##############################
+
+# The general setup that is suggested here is:
+#
+# GIT_PATH = /home/<user>/git/
+# ... where the git repository directories resides.
+# TOR_MASTER_NAME = "tor"
+# ... which means that tor.git was cloned in /home/<user>/git/tor
+# TOR_WKT_NAME = "tor-wkt"
+# ... which means that the tor worktrees are in /home/<user>/git/tor-wkt
+
+# Where are all those git repositories?
+GIT_PATH="FULL_PATH_TO_GIT_REPOSITORY_DIRECTORY"
+# The tor master git repository directory from which all the worktree have
+# been created.
+TOR_MASTER_NAME="tor"
+# The worktrees location (directory).
+TOR_WKT_NAME="tor-wkt"
+
+#########################
+# End of configuration. #
+#########################
+
+# Configuration of the branches that needs merging. The values are in order:
+# (1) Branch name that we merge onto.
+# (2) Branch name to merge from. In other words, this is merge into (1)
+# (3) Full path of the git worktree.
+#
+# As an example:
+# $ cd <PATH/TO/WORKTREE> (3)
+# $ git checkout maint-0.3.5 (1)
+# $ git pull
+# $ git merge maint-0.3.4 (2)
+#
+# First set of arrays are the maint-* branch and then the release-* branch.
+# New arrays need to be in the WORKTREE= array else they aren't considered.
+MAINT_034=( "maint-0.3.4" "maint-0.2.9" "$GIT_PATH/$TOR_WKT_NAME/maint-0.3.4" )
+MAINT_035=( "maint-0.3.5" "maint-0.3.4" "$GIT_PATH/$TOR_WKT_NAME/maint-0.3.5" )
+MAINT_040=( "maint-0.4.0" "maint-0.3.5" "$GIT_PATH/$TOR_WKT_NAME/maint-0.4.0" )
+MAINT_MASTER=( "master" "maint-0.4.0" "$GIT_PATH/$TOR_MASTER_NAME" )
+
+RELEASE_029=( "release-0.2.9" "maint-0.2.9" "$GIT_PATH/$TOR_WKT_NAME/release-0.2.9" )
+RELEASE_034=( "release-0.3.4" "maint-0.3.4" "$GIT_PATH/$TOR_WKT_NAME/release-0.3.4" )
+RELEASE_035=( "release-0.3.5" "maint-0.3.5" "$GIT_PATH/$TOR_WKT_NAME/release-0.3.5" )
+RELEASE_040=( "release-0.4.0" "maint-0.4.0" "$GIT_PATH/$TOR_WKT_NAME/release-0.4.0" )
+
+# The master branch path has to be the main repository thus contains the
+# origin that will be used to fetch the updates. All the worktrees are created
+# from that repository.
+ORIGIN_PATH="$GIT_PATH/$TOR_MASTER_NAME"
+
+# SC2034 -- shellcheck thinks that these are unused. We know better.
+ACTUALLY_THESE_ARE_USED=<<EOF
+${MAINT_034[0]}
+${MAINT_035[0]}
+${MAINT_040[0]}
+${MAINT_MASTER[0]}
+${RELEASE_029[0]}
+${RELEASE_034[0]}
+${RELEASE_035[0]}
+${RELEASE_040[0]}
+EOF
+
+##########################
+# Git Worktree to manage #
+##########################
+
+# List of all worktrees to work on. All defined above. Ordering is important.
+# Always the maint-* branch BEFORE then the release-*.
+WORKTREE=(
+ RELEASE_029[@]
+
+ MAINT_034[@]
+ RELEASE_034[@]
+
+ MAINT_035[@]
+ RELEASE_035[@]
+
+ MAINT_040[@]
+ RELEASE_040[@]
+
+ MAINT_MASTER[@]
+)
+COUNT=${#WORKTREE[@]}
+
+# Controlled by the -n option. The dry run option will just output the command
+# that would have been executed for each worktree.
+DRY_RUN=0
+
+# Control characters
+CNRM=$'\x1b[0;0m' # Clear color
+
+# Bright color
+BGRN=$'\x1b[1;32m'
+BBLU=$'\x1b[1;34m'
+BRED=$'\x1b[1;31m'
+BYEL=$'\x1b[1;33m'
+IWTH=$'\x1b[3;37m'
+
+# Strings for the pretty print.
+MARKER="${BBLU}[${BGRN}+${BBLU}]${CNRM}"
+SUCCESS="${BGRN}success${CNRM}"
+FAILED="${BRED}failed${CNRM}"
+
+####################
+# Helper functions #
+####################
+
+# Validate the given returned value (error code), print success or failed. The
+# second argument is the error output in case of failure, it is printed out.
+# On failure, this function exits.
+function validate_ret
+{
+ if [ "$1" -eq 0 ]; then
+ printf "%s\\n" "$SUCCESS"
+ else
+ printf "%s\\n" "$FAILED"
+ printf " %s" "$2"
+ exit 1
+ fi
+}
+
+# Switch to the given branch name.
+function switch_branch
+{
+ local cmd="git checkout $1"
+ printf " %s Switching branch to %s..." "$MARKER" "$1"
+ if [ $DRY_RUN -eq 0 ]; then
+ msg=$( eval "$cmd" 2>&1 )
+ validate_ret $? "$msg"
+ else
+ printf "\\n %s\\n" "${IWTH}$cmd${CNRM}"
+ fi
+}
+
+# Pull the given branch name.
+function pull_branch
+{
+ local cmd="git pull"
+ printf " %s Pulling branch %s..." "$MARKER" "$1"
+ if [ $DRY_RUN -eq 0 ]; then
+ msg=$( eval "$cmd" 2>&1 )
+ validate_ret $? "$msg"
+ else
+ printf "\\n %s\\n" "${IWTH}$cmd${CNRM}"
+ fi
+}
+
+# Merge the given branch name ($2) into the current branch ($1).
+function merge_branch
+{
+ local cmd="git merge --no-edit $1"
+ printf " %s Merging branch %s into %s..." "$MARKER" "$1" "$2"
+ if [ $DRY_RUN -eq 0 ]; then
+ msg=$( eval "$cmd" 2>&1 )
+ validate_ret $? "$msg"
+ else
+ printf "\\n %s\\n" "${IWTH}$cmd${CNRM}"
+ fi
+}
+
+# Pull the given branch name.
+function merge_branch_origin
+{
+ local cmd="git merge --ff-only origin/$1"
+ printf " %s Merging branch origin/%s..." "$MARKER" "$1"
+ if [ $DRY_RUN -eq 0 ]; then
+ msg=$( eval "$cmd" 2>&1 )
+ validate_ret $? "$msg"
+ else
+ printf "\\n %s\\n" "${IWTH}$cmd${CNRM}"
+ fi
+}
+
+# Go into the worktree repository.
+function goto_repo
+{
+ if [ ! -d "$1" ]; then
+ echo " $1: Not found. Stopping."
+ exit 1
+ fi
+ cd "$1" || exit
+}
+
+# Fetch the origin. No arguments.
+function fetch_origin
+{
+ local cmd="git fetch origin"
+ printf " %s Fetching origin..." "$MARKER"
+ if [ $DRY_RUN -eq 0 ]; then
+ msg=$( eval "$cmd" 2>&1 )
+ validate_ret $? "$msg"
+ else
+ printf "\\n %s\\n" "${IWTH}$cmd${CNRM}"
+ fi
+}
+
+###############
+# Entry point #
+###############
+
+while getopts "n" opt; do
+ case "$opt" in
+ n) DRY_RUN=1
+ echo " *** DRY DRUN MODE ***"
+ ;;
+ *)
+ ;;
+ esac
+done
+
+# First, fetch the origin.
+goto_repo "$ORIGIN_PATH"
+fetch_origin
+
+# Go over all configured worktree.
+for ((i=0; i<COUNT; i++)); do
+ current=${!WORKTREE[$i]:0:1}
+ previous=${!WORKTREE[$i]:1:1}
+ repo_path=${!WORKTREE[$i]:2:1}
+
+ printf "%s Handling branch \\n" "$MARKER" "${BYEL}$current${CNRM}"
+
+ # Go into the worktree to start merging.
+ goto_repo "$repo_path"
+ # Checkout the current branch
+ switch_branch "$current"
+ # Update the current branch with an origin merge to get the latest.
+ merge_branch_origin "$current"
+ # Merge the previous branch. Ex: merge maint-0.2.5 into maint-0.2.9.
+ merge_branch "$previous" "$current"
+done
diff --git a/scripts/git/git-pull-all.sh b/scripts/git/git-pull-all.sh
new file mode 100755
index 0000000000..5d1d58e4bf
--- /dev/null
+++ b/scripts/git/git-pull-all.sh
@@ -0,0 +1,224 @@
+#!/bin/bash
+
+##################################
+# User configuration (change me) #
+##################################
+
+# The general setup that is suggested here is:
+#
+# GIT_PATH = /home/<user>/git/
+# ... where the git repository directories resides.
+# TOR_MASTER_NAME = "tor"
+# ... which means that tor.git was cloned in /home/<user>/git/tor
+# TOR_WKT_NAME = "tor-wkt"
+# ... which means that the tor worktrees are in /home/<user>/git/tor-wkt
+
+# Where are all those git repositories?
+GIT_PATH="FULL_PATH_TO_GIT_REPOSITORY_DIRECTORY"
+# The tor master git repository directory from which all the worktree have
+# been created.
+TOR_MASTER_NAME="tor"
+# The worktrees location (directory).
+TOR_WKT_NAME="tor-wkt"
+
+#########################
+# End of configuration. #
+#########################
+
+# Configuration of the branches that needs merging. The values are in order:
+# (1) Branch name to pull (update).
+# (2) Full path of the git worktree.
+#
+# As an example:
+# $ cd <PATH/TO/WORKTREE> (3)
+# $ git checkout maint-0.3.5 (1)
+# $ git pull
+#
+# First set of arrays are the maint-* branch and then the release-* branch.
+# New arrays need to be in the WORKTREE= array else they aren't considered.
+MAINT_029=( "maint-0.2.9" "$GIT_PATH/$TOR_WKT_NAME/maint-0.2.9" )
+MAINT_034=( "maint-0.3.4" "$GIT_PATH/$TOR_WKT_NAME/maint-0.3.4" )
+MAINT_035=( "maint-0.3.5" "$GIT_PATH/$TOR_WKT_NAME/maint-0.3.5" )
+MAINT_040=( "maint-0.4.0" "$GIT_PATH/$TOR_WKT_NAME/maint-0.4.0" )
+MAINT_MASTER=( "master" "$GIT_PATH/$TOR_MASTER_NAME" )
+
+RELEASE_029=( "release-0.2.9" "$GIT_PATH/$TOR_WKT_NAME/release-0.2.9" )
+RELEASE_034=( "release-0.3.4" "$GIT_PATH/$TOR_WKT_NAME/release-0.3.4" )
+RELEASE_035=( "release-0.3.5" "$GIT_PATH/$TOR_WKT_NAME/release-0.3.5" )
+RELEASE_040=( "release-0.4.0" "$GIT_PATH/$TOR_WKT_NAME/release-0.4.0" )
+
+# The master branch path has to be the main repository thus contains the
+# origin that will be used to fetch the updates. All the worktrees are created
+# from that repository.
+ORIGIN_PATH="$GIT_PATH/$TOR_MASTER_NAME"
+
+# SC2034 -- shellcheck thinks that these are unused. We know better.
+ACTUALLY_THESE_ARE_USED=<<EOF
+${MAINT_029[0]}
+${MAINT_034[0]}
+${MAINT_035[0]}
+${MAINT_040[0]}
+${MAINT_MASTER[0]}
+${RELEASE_029[0]}
+${RELEASE_034[0]}
+${RELEASE_035[0]}
+${RELEASE_040[0]}
+EOF
+
+##########################
+# Git Worktree to manage #
+##########################
+
+# List of all worktrees to work on. All defined above. Ordering is important.
+# Always the maint-* branch first then the release-*.
+WORKTREE=(
+ MAINT_029[@]
+ RELEASE_029[@]
+
+ MAINT_034[@]
+ RELEASE_034[@]
+
+ MAINT_035[@]
+ RELEASE_035[@]
+
+ MAINT_040[@]
+ RELEASE_040[@]
+
+ MAINT_MASTER[@]
+)
+COUNT=${#WORKTREE[@]}
+
+# Controlled by the -n option. The dry run option will just output the command
+# that would have been executed for each worktree.
+DRY_RUN=0
+
+# Control characters
+CNRM=$'\x1b[0;0m' # Clear color
+
+# Bright color
+BGRN=$'\x1b[1;32m'
+BBLU=$'\x1b[1;34m'
+BRED=$'\x1b[1;31m'
+BYEL=$'\x1b[1;33m'
+IWTH=$'\x1b[3;37m'
+
+# Strings for the pretty print.
+MARKER="${BBLU}[${BGRN}+${BBLU}]${CNRM}"
+SUCCESS="${BGRN}ok${CNRM}"
+FAILED="${BRED}failed${CNRM}"
+
+####################
+# Helper functions #
+####################
+
+# Validate the given returned value (error code), print success or failed. The
+# second argument is the error output in case of failure, it is printed out.
+# On failure, this function exits.
+function validate_ret
+{
+ if [ "$1" -eq 0 ]; then
+ printf "%s\\n" "$SUCCESS"
+ else
+ printf "%s\\n" "$FAILED"
+ printf " %s" "$2"
+ exit 1
+ fi
+}
+
+# Switch to the given branch name.
+function switch_branch
+{
+ local cmd="git checkout $1"
+ printf " %s Switching branch to %s..." "$MARKER" "$1"
+ if [ $DRY_RUN -eq 0 ]; then
+ msg=$( eval "$cmd" 2>&1 )
+ validate_ret $? "$msg"
+ else
+ printf "\\n %s\\n" "${IWTH}$cmd${CNRM}"
+ fi
+}
+
+# Pull the given branch name.
+function merge_branch
+{
+ local cmd="git merge --ff-only origin/$1"
+ printf " %s Merging branch origin/%s..." "$MARKER" "$1"
+ if [ $DRY_RUN -eq 0 ]; then
+ msg=$( eval "$cmd" 2>&1 )
+ validate_ret $? "$msg"
+ else
+ printf "\\n %s\\n" "${IWTH}$cmd${CNRM}"
+ fi
+}
+
+# Go into the worktree repository.
+function goto_repo
+{
+ if [ ! -d "$1" ]; then
+ echo " $1: Not found. Stopping."
+ exit 1
+ fi
+ cd "$1" || exit
+}
+
+# Fetch the origin. No arguments.
+function fetch_origin
+{
+ local cmd="git fetch origin"
+ printf " %s Fetching origin..." "$MARKER"
+ if [ $DRY_RUN -eq 0 ]; then
+ msg=$( eval "$cmd" 2>&1 )
+ validate_ret $? "$msg"
+ else
+ printf "\\n %s\\n" "${IWTH}$cmd${CNRM}"
+ fi
+}
+
+# Fetch tor-github pull requests. No arguments.
+function fetch_tor_github
+{
+ local cmd="git fetch tor-github"
+ printf " %s Fetching tor-github..." "$MARKER"
+ if [ $DRY_RUN -eq 0 ]; then
+ msg=$( eval "$cmd" 2>&1 )
+ validate_ret $? "$msg"
+ else
+ printf "\\n %s\\n" "${IWTH}$cmd${CNRM}"
+ fi
+}
+
+###############
+# Entry point #
+###############
+
+while getopts "n" opt; do
+ case "$opt" in
+ n) DRY_RUN=1
+ echo " *** DRY DRUN MODE ***"
+ ;;
+ *)
+ ;;
+ esac
+done
+
+# First, fetch tor-github.
+goto_repo "$ORIGIN_PATH"
+fetch_tor_github
+
+# Then, fetch the origin.
+fetch_origin
+
+# Go over all configured worktree.
+for ((i=0; i<COUNT; i++)); do
+ current=${!WORKTREE[$i]:0:1}
+ repo_path=${!WORKTREE[$i]:1:1}
+
+ printf "%s Handling branch %s\\n" "$MARKER" "${BYEL}$current${CNRM}"
+
+ # Go into the worktree to start merging.
+ goto_repo "$repo_path"
+ # Checkout the current branch
+ switch_branch "$current"
+ # Update the current branch by merging the origin to get the latest.
+ merge_branch "$current"
+done
diff --git a/scripts/git/git-push-all.sh b/scripts/git/git-push-all.sh
new file mode 100755
index 0000000000..0ce951d4bd
--- /dev/null
+++ b/scripts/git/git-push-all.sh
@@ -0,0 +1,11 @@
+#!/bin/bash
+
+# The remote upstream branch on which git.torproject.org/tor.git points to.
+UPSTREAM_BRANCH="upstream"
+
+git push $UPSTREAM_BRANCH \
+ master \
+ {release,maint}-0.4.0 \
+ {release,maint}-0.3.5 \
+ {release,maint}-0.3.4 \
+ {release,maint}-0.2.9
diff --git a/scripts/git/post-merge.git-hook b/scripts/git/post-merge.git-hook
new file mode 100755
index 0000000000..176b7c9bbd
--- /dev/null
+++ b/scripts/git/post-merge.git-hook
@@ -0,0 +1,45 @@
+#!/bin/sh
+
+# This is post-merge git hook script to check for changes in:
+# * git hook scripts
+# * helper scripts for using git efficiently.
+# If any changes are detected, a diff of them is printed.
+#
+# To install this script, copy it to .git/hooks/post-merge in local copy of
+# tor git repo and make sure it has permission to execute.
+
+git_toplevel=$(git rev-parse --show-toplevel)
+
+check_for_diffs() {
+ installed="$git_toplevel/.git/hooks/$1"
+ latest="$git_toplevel/scripts/git/$1.git-hook"
+
+ if [ -e "$installed" ]
+ then
+ if ! cmp "$installed" "$latest" >/dev/null 2>&1
+ then
+ echo "ATTENTION: $1 hook has changed:"
+ echo "==============================="
+ diff -u "$installed" "$latest"
+ fi
+ fi
+}
+
+check_for_script_update() {
+ fullpath="$1"
+
+ if ! git diff ORIG_HEAD HEAD --exit-code -- "$fullpath" >/dev/null
+ then
+ echo "ATTENTION: $1 has changed:"
+ git --no-pager diff ORIG_HEAD HEAD -- "$fullpath"
+ fi
+}
+
+check_for_diffs "pre-push"
+check_for_diffs "pre-commit"
+check_for_diffs "post-merge"
+
+for file in "$git_toplevel"/scripts/git/* ; do
+ check_for_script_update "$file"
+done
+
diff --git a/scripts/git/pre-commit.git-hook b/scripts/git/pre-commit.git-hook
new file mode 100755
index 0000000000..b285776c04
--- /dev/null
+++ b/scripts/git/pre-commit.git-hook
@@ -0,0 +1,45 @@
+#!/bin/bash
+#
+# To install this script, copy it to .git/hooks/pre-commit in local copy of
+# tor git repo and make sure it has permission to execute.
+#
+# This is pre-commit git hook script that prevents commiting your changeset if
+# it fails our code formatting or changelog entry formatting checkers.
+
+workdir=$(git rev-parse --show-toplevel)
+
+cd "$workdir" || exit 1
+
+set -e
+
+if [ -n "$(ls ./changes/)" ]; then
+ python scripts/maint/lintChanges.py ./changes/*
+fi
+
+if [ -d src/lib ]; then
+ # This is the layout in 0.3.5
+ perl scripts/maint/checkSpace.pl -C \
+ src/lib/*/*.[ch] \
+ src/core/*/*.[ch] \
+ src/feature/*/*.[ch] \
+ src/app/*/*.[ch] \
+ src/test/*.[ch] \
+ src/test/*/*.[ch] \
+ src/tools/*.[ch]
+elif [ -d src/common ]; then
+ # This was the layout before 0.3.5
+ perl scripts/maint/checkSpace.pl -C \
+ src/common/*/*.[ch] \
+ src/or/*/*.[ch] \
+ src/test/*.[ch] \
+ src/test/*/*.[ch] \
+ src/tools/*.[ch]
+fi
+
+if test -e scripts/maint/checkIncludes.py; then
+ python scripts/maint/checkIncludes.py
+fi
+
+if [ -e scripts/maint/practracker/practracker.py ]; then
+ python3 ./scripts/maint/practracker/practracker.py "$workdir"
+fi
diff --git a/scripts/git/pre-push.git-hook b/scripts/git/pre-push.git-hook
new file mode 100755
index 0000000000..c9e72a4d43
--- /dev/null
+++ b/scripts/git/pre-push.git-hook
@@ -0,0 +1,108 @@
+#!/bin/bash
+
+# git pre-push hook script to:
+# 1) prevent "fixup!" and "squash!" commit from ending up in master, release-*
+# or maint-*
+# 2) Disallow pushing branches other than master, release-*
+# and maint-* to origin (e.g. gitweb.torproject.org).
+#
+# To install this script, copy it into .git/hooks/pre-push path in your
+# local copy of git repository. Make sure it has permission to execute.
+# Furthermore, make sure that TOR_UPSTREAM_REMOTE_NAME environment
+# variable is set to local name of git remote that corresponds to upstream
+# repository on e.g. git.torproject.org.
+#
+# The following sample script was used as starting point:
+# https://github.com/git/git/blob/master/templates/hooks--pre-push.sample
+
+echo "Running pre-push hook"
+
+z40=0000000000000000000000000000000000000000
+
+upstream_name=${TOR_UPSTREAM_REMOTE_NAME:-"upstream"}
+
+workdir=$(git rev-parse --show-toplevel)
+if [ -x "$workdir/.git/hooks/pre-commit" ]; then
+ if ! "$workdir"/.git/hooks/pre-commit; then
+ exit 1
+ fi
+fi
+
+if [ -e scripts/maint/practracker/practracker.py ]; then
+ if ! python3 ./scripts/maint/practracker/practracker.py "$workdir"; then
+ exit 1
+ fi
+fi
+
+remote="$1"
+remote_loc="$2"
+
+remote_name=$(git remote --verbose | grep "$2" | awk '{print $1}' | head -n 1)
+
+if [[ "$remote_name" != "$upstream_name" ]]; then
+ echo "Not pushing to upstream - refraining from further checks"
+ exit 0
+fi
+
+ref_is_upstream_branch() {
+ if [ "$1" == "refs/heads/master" ] ||
+ [[ "$1" == refs/heads/release-* ]] ||
+ [[ "$1" == refs/heads/maint-* ]]
+ then
+ return 1
+ fi
+}
+
+# shellcheck disable=SC2034
+while read -r local_ref local_sha remote_ref remote_sha
+do
+ if [ "$local_sha" = $z40 ]
+ then
+ # Handle delete
+ :
+ else
+ if [ "$remote_sha" = $z40 ]
+ then
+ # New branch, examine all commits
+ range="$local_sha"
+ else
+ # Update to existing branch, examine new commits
+ range="$remote_sha..$local_sha"
+ fi
+
+ if (ref_is_upstream_branch "$local_ref" == 0 ||
+ ref_is_upstream_branch "$remote_ref" == 0) &&
+ [ "$local_ref" != "$remote_ref" ]
+ then
+ if [ "$remote" == "origin" ]
+ then
+ echo >&2 "Not pushing: $local_ref to $remote_ref"
+ echo >&2 "If you really want to push this, use --no-verify."
+ exit 1
+ else
+ continue
+ fi
+ fi
+
+ # Check for fixup! commit
+ commit=$(git rev-list -n 1 --grep '^fixup!' "$range")
+ if [ -n "$commit" ]
+ then
+ echo >&2 "Found fixup! commit in $local_ref, not pushing"
+ echo >&2 "If you really want to push this, use --no-verify."
+ exit 1
+ fi
+
+ # Check for squash! commit
+ commit=$(git rev-list -n 1 --grep '^squash!' "$range")
+ if [ -n "$commit" ]
+ then
+ echo >&2 "Found squash! commit in $local_ref, not pushing"
+ echo >&2 "If you really want to push this, use --no-verify."
+ exit 1
+ fi
+ fi
+done
+
+exit 0
+
diff --git a/scripts/maint/add_c_file.py b/scripts/maint/add_c_file.py
new file mode 100755
index 0000000000..499415974f
--- /dev/null
+++ b/scripts/maint/add_c_file.py
@@ -0,0 +1,251 @@
+#!/usr/bin/env python3
+
+"""
+ Add a C file with matching header to the Tor codebase. Creates
+ both files from templates, and adds them to the right include.am file.
+
+ Example usage:
+
+ % add_c_file.py ./src/feature/dirauth/ocelot.c
+"""
+
+import os
+import re
+import time
+
+def topdir_file(name):
+ """Strip opening "src" from a filename"""
+ if name.startswith("src/"):
+ name = name[4:]
+ return name
+
+def guard_macro(name):
+ """Return the guard macro that should be used for the header file 'name'.
+ """
+ td = topdir_file(name).replace(".", "_").replace("/", "_").upper()
+ return "TOR_{}".format(td)
+
+def makeext(name, new_extension):
+ """Replace the extension for the file called 'name' with 'new_extension'.
+ """
+ base = os.path.splitext(name)[0]
+ return base + "." + new_extension
+
+def instantiate_template(template, output_fname):
+ """
+ Fill in a template with string using the fields that should be used
+ for 'output_fname'.
+ """
+ names = {
+ # The relative location of the header file.
+ 'header_path' : makeext(topdir_file(output_fname), "h"),
+ # The relative location of the C file file.
+ 'c_file_path' : makeext(topdir_file(output_fname), "c"),
+ # The truncated name of the file.
+ 'short_name' : os.path.basename(output_fname),
+ # The current year, for the copyright notice
+ 'this_year' : time.localtime().tm_year,
+ # An appropriate guard macro, for the header.
+ 'guard_macro' : guard_macro(output_fname),
+ }
+
+ return template.format(**names)
+
+HEADER_TEMPLATE = """\
+/* Copyright (c) 2001 Matej Pfajfar.
+ * Copyright (c) 2001-2004, Roger Dingledine.
+ * Copyright (c) 2004-2006, Roger Dingledine, Nick Mathewson.
+ * Copyright (c) 2007-{this_year}, The Tor Project, Inc. */
+/* See LICENSE for licensing information */
+
+/**
+ * @file {short_name}
+ * @brief Header for {c_file_path}
+ **/
+
+#ifndef {guard_macro}
+#define {guard_macro}
+
+#endif /* !defined({guard_macro}) */
+"""
+
+C_FILE_TEMPLATE = """\
+/* Copyright (c) 2001 Matej Pfajfar.
+ * Copyright (c) 2001-2004, Roger Dingledine.
+ * Copyright (c) 2004-2006, Roger Dingledine, Nick Mathewson.
+ * Copyright (c) 2007-{this_year}, The Tor Project, Inc. */
+/* See LICENSE for licensing information */
+
+/**
+ * @file {short_name}
+ * @brief DOCDOC
+ **/
+
+#include "orconfig.h"
+#include "{header_path}"
+"""
+
+class AutomakeChunk:
+ """
+ Represents part of an automake file. If it is decorated with
+ an ADD_C_FILE comment, it has a "kind" based on what to add to it.
+ Otherwise, it only has a bunch of lines in it.
+ """
+ pat = re.compile(r'# ADD_C_FILE: INSERT (\S*) HERE', re.I)
+
+ def __init__(self):
+ self.lines = []
+ self.kind = ""
+
+ def addLine(self, line):
+ """
+ Insert a line into this chunk while parsing the automake file.
+ """
+ m = self.pat.match(line)
+ if m:
+ if self.lines:
+ raise ValueError("control line not preceded by a blank line")
+ self.kind = m.group(1)
+
+ self.lines.append(line)
+ if line.strip() == "":
+ return True
+
+ return False
+
+ def insertMember(self, member):
+ """
+ Add a new member to this chunk. Try to insert it in alphabetical
+ order with matching indentation, but don't freak out too much if the
+ source isn't consistent.
+
+ Assumes that this chunk is of the form:
+ FOOBAR = \
+ X \
+ Y \
+ Z
+ """
+ self.prespace = "\t"
+ self.postspace = "\t\t"
+ for lineno, line in enumerate(self.lines):
+ m = re.match(r'(\s+)(\S+)(\s+)\\', line)
+ if not m:
+ continue
+ prespace, fname, postspace = m.groups()
+ if fname > member:
+ self.insert_before(lineno, member, prespace, postspace)
+ return
+ self.insert_at_end(member)
+
+ def insert_before(self, lineno, member, prespace, postspace):
+ self.lines.insert(lineno,
+ "{}{}{}\\\n".format(prespace, member, postspace))
+
+ def insert_at_end(self, member, prespace, postspace):
+ lastline = self.lines[-1]
+ self.lines[-1] += '{}\\\n'.format(postspace)
+ self.lines.append("{}{}\n".format(prespace, member))
+
+ def dump(self, f):
+ """Write all the lines in this chunk to the file 'f'."""
+ for line in self.lines:
+ f.write(line)
+ if not line.endswith("\n"):
+ f.write("\n")
+
+class ParsedAutomake:
+ """A sort-of-parsed automake file, with identified chunks into which
+ headers and c files can be inserted.
+ """
+ def __init__(self):
+ self.chunks = []
+ self.by_type = {}
+
+ def addChunk(self, chunk):
+ """Add a newly parsed AutomakeChunk to this file."""
+ self.chunks.append(chunk)
+ self.by_type[chunk.kind.lower()] = chunk
+
+ def add_file(self, fname, kind):
+ """Insert a file of kind 'kind' to the appropriate section of this
+ file. Return True if we added it.
+ """
+ if kind.lower() in self.by_type:
+ self.by_type[kind.lower()].insertMember(fname)
+ return True
+ else:
+ return False
+
+ def dump(self, f):
+ """Write this file into a file 'f'."""
+ for chunk in self.chunks:
+ chunk.dump(f)
+
+def get_include_am_location(fname):
+ """Find the right include.am file for introducing a new file. Return None
+ if we can't guess one.
+
+ Note that this function is imperfect because our include.am layout is
+ not (yet) consistent.
+ """
+ td = topdir_file(fname)
+ m = re.match(r'^lib/([a-z0-9_]*)/', td)
+ if m:
+ return "src/lib/{}/include.am".format(m.group(1))
+
+ if re.match(r'^(core|feature|app)/', td):
+ return "src/core/include.am"
+
+ if re.match(r'^test/', td):
+ return "src/test/include.am"
+
+ return None
+
+def run(fn):
+ """
+ Create a new C file and H file corresponding to the filename "fn", and
+ add them to include.am.
+ """
+
+ cf = makeext(fn, "c")
+ hf = makeext(fn, "h")
+
+ if os.path.exists(cf):
+ print("{} already exists".format(cf))
+ return 1
+ if os.path.exists(hf):
+ print("{} already exists".format(hf))
+ return 1
+
+ with open(cf, 'w') as f:
+ f.write(instantiate_template(C_FILE_TEMPLATE, cf))
+
+ with open(hf, 'w') as f:
+ f.write(instantiate_template(HEADER_TEMPLATE, hf))
+
+ iam = get_include_am_location(cf)
+ if iam is None or not os.path.exists(iam):
+ print("Made files successfully but couldn't identify include.am for {}"
+ .format(cf))
+ return 1
+
+ amfile = ParsedAutomake()
+ cur_chunk = AutomakeChunk()
+ with open(iam) as f:
+ for line in f:
+ if cur_chunk.addLine(line):
+ amfile.addChunk(cur_chunk)
+ cur_chunk = AutomakeChunk()
+ amfile.addChunk(cur_chunk)
+
+ amfile.add_file(cf, "sources")
+ amfile.add_file(hf, "headers")
+
+ with open(iam+".tmp", 'w') as f:
+ amfile.dump(f)
+
+ os.rename(iam+".tmp", iam)
+
+if __name__ == '__main__':
+ import sys
+ sys.exit(run(sys.argv[1]))
diff --git a/scripts/maint/checkIncludes.py b/scripts/maint/checkIncludes.py
index 3afd9bbebe..ec9350b9b1 100755
--- a/scripts/maint/checkIncludes.py
+++ b/scripts/maint/checkIncludes.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python3
+#!/usr/bin/python
# Copyright 2018 The Tor Project, Inc. See LICENSE file for licensing info.
"""This script looks through all the directories for files matching *.c or
diff --git a/scripts/maint/checkSpace.pl b/scripts/maint/checkSpace.pl
index 633b47e314..433ae62807 100755
--- a/scripts/maint/checkSpace.pl
+++ b/scripts/maint/checkSpace.pl
@@ -18,6 +18,8 @@ if ($ARGV[0] =~ /^-/) {
our %basenames = ();
+our %guardnames = ();
+
for my $fn (@ARGV) {
open(F, "$fn");
my $lastnil = 0;
@@ -31,6 +33,10 @@ for my $fn (@ARGV) {
} else {
$basenames{$basename} = $fn;
}
+ my $isheader = ($fn =~ /\.h/);
+ my $seenguard = 0;
+ my $guardname = "<none>";
+
while (<F>) {
## Warn about windows-style newlines.
# (We insist on lines that end with a single LF character, not
@@ -112,6 +118,23 @@ for my $fn (@ARGV) {
next;
}
}
+
+ if ($isheader) {
+ if ($seenguard == 0) {
+ if (/ifndef\s+(\S+)/) {
+ ++$seenguard;
+ $guardname = $1;
+ }
+ } elsif ($seenguard == 1) {
+ if (/^\#define (\S+)/) {
+ ++$seenguard;
+ if ($1 ne $guardname) {
+ msg "GUARD:$fn:$.: Header guard macro mismatch.\n";
+ }
+ }
+ }
+ }
+
if (m!/\*.*?\*/!) {
s!\s*/\*.*?\*/!!;
} elsif (m!/\*!) {
@@ -201,6 +224,15 @@ for my $fn (@ARGV) {
}
}
}
+ if ($isheader && $C) {
+ if ($seenguard < 2) {
+ msg "$fn:No #ifndef/#define header guard pair found.\n";
+ } elsif ($guardnames{$guardname}) {
+ msg "$fn:Guard macro $guardname also used in $guardnames{$guardname}\n";
+ } else {
+ $guardnames{$guardname} = $fn;
+ }
+ }
close(F);
}
diff --git a/scripts/maint/practracker/exceptions.txt b/scripts/maint/practracker/exceptions.txt
new file mode 100644
index 0000000000..726dc9c3ef
--- /dev/null
+++ b/scripts/maint/practracker/exceptions.txt
@@ -0,0 +1,289 @@
+# Welcome to the exceptions file for Tor's best-practices tracker!
+#
+# Each line of this file represents a single violation of Tor's best
+# practices -- typically, a violation that we had before practracker.py
+# first existed.
+#
+# There are three kinds of problems that we recognize right now:
+# function-size -- a function of more than 100 lines.
+# file-size -- a file of more than 3000 lines.
+# include-count -- a file with more than 50 #includes.
+#
+# Each line below represents a single exception that practracker should
+# _ignore_. Each line has four parts:
+# 1. The word "problem".
+# 2. The kind of problem.
+# 3. The location of the problem: either a filename, or a
+# filename:functionname pair.
+# 4. The magnitude of the problem to ignore.
+#
+# So for example, consider this line:
+# problem file-size /src/core/or/connection_or.c 3200
+#
+# It tells practracker to allow the mentioned file to be up to 3200 lines
+# long, even though ordinarily it would warn about any file with more than
+# 3000 lines.
+#
+# You can either edit this file by hand, or regenerate it completely by
+# running `make practracker-regen`.
+#
+# Remember: It is better to fix the problem than to add a new exception!
+
+problem file-size /src/app/config/config.c 8518
+problem include-count /src/app/config/config.c 87
+problem function-size /src/app/config/config.c:options_act_reversible() 296
+problem function-size /src/app/config/config.c:options_act() 589
+problem function-size /src/app/config/config.c:resolve_my_address() 192
+problem function-size /src/app/config/config.c:options_validate() 1217
+problem function-size /src/app/config/config.c:options_init_from_torrc() 207
+problem function-size /src/app/config/config.c:options_init_from_string() 173
+problem function-size /src/app/config/config.c:options_init_logs() 146
+problem function-size /src/app/config/config.c:parse_bridge_line() 104
+problem function-size /src/app/config/config.c:parse_transport_line() 191
+problem function-size /src/app/config/config.c:parse_dir_authority_line() 151
+problem function-size /src/app/config/config.c:parse_dir_fallback_line() 102
+problem function-size /src/app/config/config.c:parse_port_config() 452
+problem function-size /src/app/config/config.c:parse_ports() 170
+problem function-size /src/app/config/config.c:getinfo_helper_config() 116
+problem function-size /src/app/config/confparse.c:config_assign_value() 205
+problem function-size /src/app/config/confparse.c:config_get_assigned_option() 129
+problem include-count /src/app/main/main.c 67
+problem function-size /src/app/main/main.c:dumpstats() 102
+problem function-size /src/app/main/main.c:tor_init() 137
+problem function-size /src/app/main/main.c:sandbox_init_filter() 291
+problem function-size /src/app/main/main.c:run_tor_main_loop() 105
+problem function-size /src/app/main/ntmain.c:nt_service_install() 125
+problem file-size /src/core/mainloop/connection.c 5569
+problem include-count /src/core/mainloop/connection.c 62
+problem function-size /src/core/mainloop/connection.c:connection_free_minimal() 185
+problem function-size /src/core/mainloop/connection.c:connection_listener_new() 328
+problem function-size /src/core/mainloop/connection.c:connection_handle_listener_read() 161
+problem function-size /src/core/mainloop/connection.c:connection_connect_sockaddr() 103
+problem function-size /src/core/mainloop/connection.c:connection_proxy_connect() 148
+problem function-size /src/core/mainloop/connection.c:connection_read_proxy_handshake() 153
+problem function-size /src/core/mainloop/connection.c:retry_listener_ports() 116
+problem function-size /src/core/mainloop/connection.c:connection_handle_read_impl() 111
+problem function-size /src/core/mainloop/connection.c:connection_buf_read_from_socket() 181
+problem function-size /src/core/mainloop/connection.c:connection_handle_write_impl() 241
+problem function-size /src/core/mainloop/connection.c:assert_connection_ok() 143
+problem include-count /src/core/mainloop/mainloop.c 63
+problem function-size /src/core/mainloop/mainloop.c:conn_close_if_marked() 108
+problem function-size /src/core/mainloop/mainloop.c:run_connection_housekeeping() 123
+problem file-size /src/core/or/channel.c 3487
+problem function-size /src/core/or/channeltls.c:channel_tls_handle_var_cell() 160
+problem function-size /src/core/or/channeltls.c:channel_tls_process_versions_cell() 170
+problem function-size /src/core/or/channeltls.c:channel_tls_process_netinfo_cell() 214
+problem function-size /src/core/or/channeltls.c:channel_tls_process_certs_cell() 246
+problem function-size /src/core/or/channeltls.c:channel_tls_process_authenticate_cell() 202
+problem include-count /src/core/or/circuitbuild.c 54
+problem function-size /src/core/or/circuitbuild.c:get_unique_circ_id_by_chan() 128
+problem function-size /src/core/or/circuitbuild.c:circuit_extend() 147
+problem function-size /src/core/or/circuitbuild.c:choose_good_exit_server_general() 206
+problem include-count /src/core/or/circuitlist.c 55
+problem function-size /src/core/or/circuitlist.c:HT_PROTOTYPE() 128
+problem function-size /src/core/or/circuitlist.c:circuit_free_() 143
+problem function-size /src/core/or/circuitlist.c:circuit_find_to_cannibalize() 102
+problem function-size /src/core/or/circuitlist.c:circuit_about_to_free() 120
+problem function-size /src/core/or/circuitlist.c:circuits_handle_oom() 117
+problem function-size /src/core/or/circuitmux.c:circuitmux_set_policy() 110
+problem function-size /src/core/or/circuitmux.c:circuitmux_attach_circuit() 114
+problem file-size /src/core/or/circuitpadding.c 3040
+problem function-size /src/core/or/circuitpadding.c:circpad_machine_schedule_padding() 107
+problem function-size /src/core/or/circuitpadding.c:circpad_machine_schedule_padding() 113
+problem function-size /src/core/or/circuitpadding_machines.c:circpad_machine_relay_hide_intro_circuits() 104
+problem function-size /src/core/or/circuitpadding_machines.c:circpad_machine_client_hide_rend_circuits() 112
+problem function-size /src/core/or/circuitstats.c:circuit_build_times_parse_state() 124
+problem file-size /src/core/or/circuituse.c 3162
+problem function-size /src/core/or/circuituse.c:circuit_is_acceptable() 132
+problem function-size /src/core/or/circuituse.c:circuit_expire_building() 394
+problem function-size /src/core/or/circuituse.c:circuit_log_ancient_one_hop_circuits() 126
+problem function-size /src/core/or/circuituse.c:circuit_build_failed() 149
+problem function-size /src/core/or/circuituse.c:circuit_launch_by_extend_info() 110
+problem function-size /src/core/or/circuituse.c:circuit_get_open_circ_or_launch() 354
+problem function-size /src/core/or/circuituse.c:connection_ap_handshake_attach_circuit() 244
+problem function-size /src/core/or/command.c:command_process_create_cell() 156
+problem function-size /src/core/or/command.c:command_process_relay_cell() 132
+problem file-size /src/core/or/connection_edge.c 4595
+problem include-count /src/core/or/connection_edge.c 65
+problem function-size /src/core/or/connection_edge.c:connection_ap_expire_beginning() 117
+problem function-size /src/core/or/connection_edge.c:connection_ap_handshake_rewrite() 192
+problem function-size /src/core/or/connection_edge.c:connection_ap_handle_onion() 188
+problem function-size /src/core/or/connection_edge.c:connection_ap_handshake_rewrite_and_attach() 423
+problem function-size /src/core/or/connection_edge.c:connection_ap_handshake_send_begin() 111
+problem function-size /src/core/or/connection_edge.c:connection_ap_handshake_socks_resolved() 106
+problem function-size /src/core/or/connection_edge.c:connection_exit_begin_conn() 184
+problem function-size /src/core/or/connection_edge.c:connection_exit_connect() 102
+problem file-size /src/core/or/connection_or.c 3124
+problem include-count /src/core/or/connection_or.c 51
+problem function-size /src/core/or/connection_or.c:connection_or_group_set_badness_() 105
+problem function-size /src/core/or/connection_or.c:connection_or_client_learned_peer_id() 144
+problem function-size /src/core/or/connection_or.c:connection_or_compute_authenticate_cell_body() 235
+problem file-size /src/core/or/policies.c 3249
+problem function-size /src/core/or/policies.c:policy_summarize() 107
+problem function-size /src/core/or/protover.c:protover_all_supported() 117
+problem file-size /src/core/or/relay.c 3244
+problem function-size /src/core/or/relay.c:circuit_receive_relay_cell() 127
+problem function-size /src/core/or/relay.c:relay_send_command_from_edge_() 112
+problem function-size /src/core/or/relay.c:connection_ap_process_end_not_open() 194
+problem function-size /src/core/or/relay.c:connection_edge_process_relay_cell_not_open() 139
+problem function-size /src/core/or/relay.c:connection_edge_process_relay_cell() 430
+problem function-size /src/core/or/relay.c:connection_edge_package_raw_inbuf() 129
+problem function-size /src/core/or/relay.c:circuit_resume_edge_reading_helper() 148
+problem function-size /src/core/or/scheduler_kist.c:kist_scheduler_run() 171
+problem function-size /src/core/or/scheduler_vanilla.c:vanilla_scheduler_run() 109
+problem function-size /src/core/or/versions.c:tor_version_parse() 104
+problem function-size /src/core/proto/proto_socks.c:parse_socks_client() 112
+problem function-size /src/feature/client/addressmap.c:addressmap_rewrite() 112
+problem function-size /src/feature/client/bridges.c:rewrite_node_address_for_bridge() 126
+problem function-size /src/feature/client/circpathbias.c:pathbias_measure_close_rate() 108
+problem function-size /src/feature/client/dnsserv.c:evdns_server_callback() 153
+problem file-size /src/feature/client/entrynodes.c 3824
+problem function-size /src/feature/client/entrynodes.c:entry_guards_upgrade_waiting_circuits() 157
+problem function-size /src/feature/client/entrynodes.c:entry_guard_parse_from_state() 246
+problem function-size /src/feature/client/transports.c:handle_proxy_line() 108
+problem function-size /src/feature/client/transports.c:parse_method_line_helper() 112
+problem function-size /src/feature/client/transports.c:create_managed_proxy_environment() 109
+problem function-size /src/feature/control/control.c:connection_control_process_inbuf() 136
+problem function-size /src/feature/control/control_auth.c:handle_control_authchallenge() 103
+problem function-size /src/feature/control/control_auth.c:handle_control_authenticate() 187
+problem function-size /src/feature/control/control_cmd.c:handle_control_extendcircuit() 151
+problem function-size /src/feature/control/control_cmd.c:handle_control_add_onion() 269
+problem function-size /src/feature/control/control_cmd.c:add_onion_helper_keyarg() 125
+problem function-size /src/feature/control/control_events.c:control_event_stream_status() 119
+problem include-count /src/feature/control/control_getinfo.c 54
+problem function-size /src/feature/control/control_getinfo.c:getinfo_helper_misc() 109
+problem function-size /src/feature/control/control_getinfo.c:getinfo_helper_dir() 304
+problem function-size /src/feature/control/control_getinfo.c:getinfo_helper_events() 236
+problem function-size /src/feature/dirauth/bwauth.c:dirserv_read_measured_bandwidths() 124
+problem file-size /src/feature/dirauth/dirvote.c 4726
+problem include-count /src/feature/dirauth/dirvote.c 53
+problem function-size /src/feature/dirauth/dirvote.c:format_networkstatus_vote() 249
+problem function-size /src/feature/dirauth/dirvote.c:networkstatus_compute_bw_weights_v10() 235
+problem function-size /src/feature/dirauth/dirvote.c:networkstatus_compute_consensus() 962
+problem function-size /src/feature/dirauth/dirvote.c:networkstatus_add_detached_signatures() 123
+problem function-size /src/feature/dirauth/dirvote.c:dirvote_add_vote() 162
+problem function-size /src/feature/dirauth/dirvote.c:dirvote_compute_consensuses() 164
+problem function-size /src/feature/dirauth/dirvote.c:dirserv_generate_networkstatus_vote_obj() 293
+problem function-size /src/feature/dirauth/dsigs_parse.c:networkstatus_parse_detached_signatures() 196
+problem function-size /src/feature/dirauth/guardfraction.c:dirserv_read_guardfraction_file_from_str() 110
+problem function-size /src/feature/dirauth/process_descs.c:dirserv_add_descriptor() 125
+problem function-size /src/feature/dirauth/shared_random.c:should_keep_commit() 110
+problem function-size /src/feature/dirauth/voteflags.c:dirserv_compute_performance_thresholds() 172
+problem function-size /src/feature/dircache/consdiffmgr.c:consdiffmgr_cleanup() 115
+problem function-size /src/feature/dircache/consdiffmgr.c:consdiffmgr_rescan_flavor_() 111
+problem function-size /src/feature/dircache/consdiffmgr.c:consensus_diff_worker_threadfn() 132
+problem function-size /src/feature/dircache/dircache.c:handle_get_current_consensus() 166
+problem function-size /src/feature/dircache/dircache.c:directory_handle_command_post() 120
+problem file-size /src/feature/dirclient/dirclient.c 3215
+problem include-count /src/feature/dirclient/dirclient.c 51
+problem function-size /src/feature/dirclient/dirclient.c:directory_get_from_dirserver() 131
+problem function-size /src/feature/dirclient/dirclient.c:directory_initiate_request() 201
+problem function-size /src/feature/dirclient/dirclient.c:directory_send_command() 241
+problem function-size /src/feature/dirclient/dirclient.c:dir_client_decompress_response_body() 114
+problem function-size /src/feature/dirclient/dirclient.c:connection_dir_client_reached_eof() 189
+problem function-size /src/feature/dirclient/dirclient.c:handle_response_fetch_consensus() 105
+problem function-size /src/feature/dircommon/consdiff.c:gen_ed_diff() 204
+problem function-size /src/feature/dircommon/consdiff.c:apply_ed_diff() 159
+problem function-size /src/feature/dirparse/authcert_parse.c:authority_cert_parse_from_string() 182
+problem function-size /src/feature/dirparse/microdesc_parse.c:microdescs_parse_from_string() 169
+problem function-size /src/feature/dirparse/ns_parse.c:routerstatus_parse_entry_from_string() 286
+problem function-size /src/feature/dirparse/ns_parse.c:networkstatus_verify_bw_weights() 389
+problem function-size /src/feature/dirparse/ns_parse.c:networkstatus_parse_vote_from_string() 638
+problem function-size /src/feature/dirparse/parsecommon.c:tokenize_string() 103
+problem function-size /src/feature/dirparse/parsecommon.c:get_next_token() 159
+problem function-size /src/feature/dirparse/routerparse.c:router_parse_entry_from_string() 557
+problem function-size /src/feature/dirparse/routerparse.c:extrainfo_parse_entry_from_string() 210
+problem function-size /src/feature/hibernate/hibernate.c:accounting_parse_options() 109
+problem function-size /src/feature/hs/hs_cell.c:hs_cell_build_establish_intro() 115
+problem function-size /src/feature/hs/hs_cell.c:hs_cell_parse_introduce2() 154
+problem function-size /src/feature/hs/hs_client.c:send_introduce1() 104
+problem function-size /src/feature/hs/hs_client.c:hs_config_client_authorization() 108
+problem function-size /src/feature/hs/hs_common.c:hs_get_responsible_hsdirs() 104
+problem function-size /src/feature/hs/hs_config.c:config_generic_service() 140
+problem function-size /src/feature/hs/hs_descriptor.c:desc_encode_v3() 104
+problem function-size /src/feature/hs/hs_descriptor.c:decrypt_desc_layer() 110
+problem function-size /src/feature/hs/hs_descriptor.c:decode_introduction_point() 122
+problem function-size /src/feature/hs/hs_descriptor.c:desc_decode_superencrypted_v3() 109
+problem function-size /src/feature/hs/hs_descriptor.c:desc_decode_encrypted_v3() 109
+problem file-size /src/feature/hs/hs_service.c 4109
+problem function-size /src/feature/keymgt/loadkey.c:ed_key_init_from_file() 333
+problem function-size /src/feature/nodelist/authcert.c:trusted_dirs_load_certs_from_string() 124
+problem function-size /src/feature/nodelist/authcert.c:authority_certs_fetch_missing() 296
+problem function-size /src/feature/nodelist/fmt_routerstatus.c:routerstatus_format_entry() 166
+problem function-size /src/feature/nodelist/microdesc.c:microdesc_cache_rebuild() 134
+problem include-count /src/feature/nodelist/networkstatus.c 62
+problem function-size /src/feature/nodelist/networkstatus.c:networkstatus_check_consensus_signature() 176
+problem function-size /src/feature/nodelist/networkstatus.c:networkstatus_set_current_consensus() 293
+problem function-size /src/feature/nodelist/node_select.c:router_pick_directory_server_impl() 123
+problem function-size /src/feature/nodelist/node_select.c:compute_weighted_bandwidths() 206
+problem function-size /src/feature/nodelist/node_select.c:router_pick_trusteddirserver_impl() 114
+problem function-size /src/feature/nodelist/nodelist.c:compute_frac_paths_available() 193
+problem file-size /src/feature/nodelist/routerlist.c 3238
+problem function-size /src/feature/nodelist/routerlist.c:router_rebuild_store() 148
+problem function-size /src/feature/nodelist/routerlist.c:router_add_to_routerlist() 169
+problem function-size /src/feature/nodelist/routerlist.c:routerlist_remove_old_routers() 121
+problem function-size /src/feature/nodelist/routerlist.c:update_consensus_router_descriptor_downloads() 136
+problem function-size /src/feature/nodelist/routerlist.c:update_extrainfo_downloads() 103
+problem function-size /src/feature/relay/dns.c:dns_resolve_impl() 134
+problem function-size /src/feature/relay/dns.c:configure_nameservers() 161
+problem function-size /src/feature/relay/dns.c:evdns_callback() 109
+problem file-size /src/feature/relay/router.c 3407
+problem include-count /src/feature/relay/router.c 56
+problem function-size /src/feature/relay/router.c:init_keys() 252
+problem function-size /src/feature/relay/router.c:get_my_declared_family() 114
+problem function-size /src/feature/relay/router.c:router_build_fresh_unsigned_routerinfo() 136
+problem function-size /src/feature/relay/router.c:router_dump_router_to_string() 371
+problem function-size /src/feature/relay/router.c:extrainfo_dump_to_string() 206
+problem function-size /src/feature/relay/routerkeys.c:load_ed_keys() 294
+problem function-size /src/feature/rend/rendcache.c:rend_cache_store_v2_desc_as_client() 193
+problem function-size /src/feature/rend/rendclient.c:rend_client_send_introduction() 220
+problem function-size /src/feature/rend/rendcommon.c:rend_encode_v2_descriptors() 225
+problem function-size /src/feature/rend/rendmid.c:rend_mid_establish_intro_legacy() 104
+problem function-size /src/feature/rend/rendparse.c:rend_parse_v2_service_descriptor() 187
+problem function-size /src/feature/rend/rendparse.c:rend_decrypt_introduction_points() 104
+problem function-size /src/feature/rend/rendparse.c:rend_parse_introduction_points() 131
+problem file-size /src/feature/rend/rendservice.c 4511
+problem function-size /src/feature/rend/rendservice.c:rend_service_prune_list_impl_() 107
+problem function-size /src/feature/rend/rendservice.c:rend_config_service() 164
+problem function-size /src/feature/rend/rendservice.c:rend_service_load_auth_keys() 178
+problem function-size /src/feature/rend/rendservice.c:rend_service_receive_introduction() 332
+problem function-size /src/feature/rend/rendservice.c:rend_service_parse_intro_for_v3() 115
+problem function-size /src/feature/rend/rendservice.c:rend_service_decrypt_intro() 115
+problem function-size /src/feature/rend/rendservice.c:rend_service_intro_has_opened() 126
+problem function-size /src/feature/rend/rendservice.c:rend_service_rendezvous_has_opened() 117
+problem function-size /src/feature/rend/rendservice.c:directory_post_to_hs_dir() 108
+problem function-size /src/feature/rend/rendservice.c:upload_service_descriptor() 111
+problem function-size /src/feature/rend/rendservice.c:rend_consider_services_intro_points() 170
+problem function-size /src/feature/stats/rephist.c:rep_hist_load_mtbf_data() 185
+problem function-size /src/feature/stats/rephist.c:rep_hist_format_exit_stats() 148
+problem function-size /src/lib/compress/compress.c:tor_compress_impl() 133
+problem function-size /src/lib/compress/compress_zstd.c:tor_zstd_compress_process() 126
+problem function-size /src/lib/container/smartlist.c:smartlist_bsearch_idx() 109
+problem function-size /src/lib/crypt_ops/crypto_rand.c:crypto_strongest_rand_syscall() 102
+problem function-size /src/lib/encoding/binascii.c:base64_encode() 107
+problem function-size /src/lib/encoding/confline.c:parse_config_line_from_str_verbose() 119
+problem function-size /src/lib/encoding/cstring.c:unescape_string() 108
+problem function-size /src/lib/fs/dir.c:check_private_dir() 231
+problem function-size /src/lib/log/log.c:parse_log_severity_config() 101
+problem function-size /src/lib/math/prob_distr.c:sample_uniform_interval() 145
+problem function-size /src/lib/net/address.c:tor_addr_parse_mask_ports() 198
+problem function-size /src/lib/net/address.c:tor_addr_compare_masked() 111
+problem function-size /src/lib/net/inaddr.c:tor_inet_pton() 107
+problem function-size /src/lib/net/resolve.c:tor_addr_lookup() 110
+problem function-size /src/lib/net/socketpair.c:tor_ersatz_socketpair() 102
+problem function-size /src/lib/osinfo/uname.c:get_uname() 116
+problem function-size /src/lib/process/process_unix.c:process_unix_exec() 220
+problem function-size /src/lib/process/process_win32.c:process_win32_exec() 133
+problem function-size /src/lib/process/process_win32.c:process_win32_create_pipe() 112
+problem function-size /src/lib/process/restrict.c:set_max_file_descriptors() 102
+problem function-size /src/lib/process/setuid.c:switch_id() 156
+problem function-size /src/lib/sandbox/sandbox.c:prot_strings() 104
+problem function-size /src/lib/string/scanf.c:tor_vsscanf() 112
+problem function-size /src/lib/tls/tortls_nss.c:tor_tls_context_new() 153
+problem function-size /src/lib/tls/tortls_openssl.c:tor_tls_context_new() 171
+problem function-size /src/lib/tls/x509_nss.c:tor_tls_create_certificate_internal() 126
+problem function-size /src/tools/tor-gencert.c:parse_commandline() 111
+problem function-size /src/tools/tor-resolve.c:build_socks5_resolve_request() 104
+problem function-size /src/tools/tor-resolve.c:do_resolve() 174
+problem function-size /src/tools/tor-resolve.c:main() 112
+
diff --git a/scripts/maint/practracker/metrics.py b/scripts/maint/practracker/metrics.py
new file mode 100644
index 0000000000..5fa305a868
--- /dev/null
+++ b/scripts/maint/practracker/metrics.py
@@ -0,0 +1,50 @@
+#!/usr/bin/python
+
+# Implementation of various source code metrics.
+# These are currently ad-hoc string operations and regexps.
+# We might want to use a proper static analysis library in the future, if we want to get more advanced metrics.
+
+import re
+
+def get_file_len(f):
+ """Get file length of file"""
+ for i, l in enumerate(f):
+ pass
+ return i + 1
+
+def get_include_count(f):
+ """Get number of #include statements in the file"""
+ include_count = 0
+ for line in f:
+ if re.match(r' *# *include', line):
+ include_count += 1
+ return include_count
+
+def get_function_lines(f):
+ """
+ Return iterator which iterates over functions and returns (function name, function lines)
+ """
+
+ # Skip lines that look like they are defining functions with these
+ # names: they aren't real function definitions.
+ REGEXP_CONFUSE_TERMS = {"MOCK_IMPL", "ENABLE_GCC_WARNINGS", "ENABLE_GCC_WARNING", "DUMMY_TYPECHECK_INSTANCE",
+ "DISABLE_GCC_WARNING", "DISABLE_GCC_WARNINGS"}
+
+ in_function = False
+ for lineno, line in enumerate(f):
+ if not in_function:
+ # find the start of a function
+ m = re.match(r'^([a-zA-Z_][a-zA-Z_0-9]*),?\(', line)
+ if m:
+ func_name = m.group(1)
+ if func_name in REGEXP_CONFUSE_TERMS:
+ continue
+ func_start = lineno
+ in_function = True
+
+ else:
+ # Find the end of a function
+ if line.startswith("}"):
+ n_lines = lineno - func_start
+ in_function = False
+ yield (func_name, n_lines)
diff --git a/scripts/maint/practracker/practracker.py b/scripts/maint/practracker/practracker.py
new file mode 100755
index 0000000000..febb14639d
--- /dev/null
+++ b/scripts/maint/practracker/practracker.py
@@ -0,0 +1,216 @@
+#!/usr/bin/python
+
+"""
+Best-practices tracker for Tor source code.
+
+Go through the various .c files and collect metrics about them. If the metrics
+violate some of our best practices and they are not found in the optional
+exceptions file, then log a problem about them.
+
+We currently do metrics about file size, function size and number of includes.
+
+practracker.py should be run with its second argument pointing to the Tor
+top-level source directory like this:
+ $ python3 ./scripts/maint/practracker/practracker.py .
+
+To regenerate the exceptions file so that it allows all current
+problems in the Tor source, use the --regen flag:
+ $ python3 --regen ./scripts/maint/practracker/practracker.py .
+"""
+
+from __future__ import print_function
+
+import os, sys
+
+import metrics
+import util
+import problem
+
+# The filename of the exceptions file (it should be placed in the practracker directory)
+EXCEPTIONS_FNAME = "./exceptions.txt"
+
+# Recommended file size
+MAX_FILE_SIZE = 3000 # lines
+# Recommended function size
+MAX_FUNCTION_SIZE = 100 # lines
+# Recommended number of #includes
+MAX_INCLUDE_COUNT = 50
+
+#######################################################
+
+# ProblemVault singleton
+ProblemVault = None
+
+# The Tor source code topdir
+TOR_TOPDIR = None
+
+#######################################################
+
+if sys.version_info[0] <= 2:
+ def open_file(fname):
+ return open(fname, 'r')
+else:
+ def open_file(fname):
+ return open(fname, 'r', encoding='utf-8')
+
+def consider_file_size(fname, f):
+ """Consider file size issues for 'f' and return True if a new issue was found"""
+ file_size = metrics.get_file_len(f)
+ if file_size > MAX_FILE_SIZE:
+ p = problem.FileSizeProblem(fname, file_size)
+ return ProblemVault.register_problem(p)
+ return False
+
+def consider_includes(fname, f):
+ """Consider #include issues for 'f' and return True if a new issue was found"""
+ include_count = metrics.get_include_count(f)
+
+ if include_count > MAX_INCLUDE_COUNT:
+ p = problem.IncludeCountProblem(fname, include_count)
+ return ProblemVault.register_problem(p)
+ return False
+
+def consider_function_size(fname, f):
+ """Consider the function sizes for 'f' and return True if a new issue was found"""
+ found_new_issues = False
+
+ for name, lines in metrics.get_function_lines(f):
+ # Don't worry about functions within our limits
+ if lines <= MAX_FUNCTION_SIZE:
+ continue
+
+ # That's a big function! Issue a problem!
+ canonical_function_name = "%s:%s()" % (fname, name)
+ p = problem.FunctionSizeProblem(canonical_function_name, lines)
+ found_new_issues |= ProblemVault.register_problem(p)
+
+ return found_new_issues
+
+#######################################################
+
+def consider_all_metrics(files_list):
+ """Consider metrics for all files, and return True if new issues were found"""
+ found_new_issues = False
+ for fname in files_list:
+ with open_file(fname) as f:
+ found_new_issues |= consider_metrics_for_file(fname, f)
+ return found_new_issues
+
+def consider_metrics_for_file(fname, f):
+ """
+ Consider the various metrics for file with filename 'fname' and file descriptor 'f'.
+ Return True if we found new issues.
+ """
+ # Strip the useless part of the path
+ if fname.startswith(TOR_TOPDIR):
+ fname = fname[len(TOR_TOPDIR):]
+
+ found_new_issues = False
+
+ # Get file length
+ found_new_issues |= consider_file_size(fname, f)
+
+ # Consider number of #includes
+ f.seek(0)
+ found_new_issues |= consider_includes(fname, f)
+
+ # Get function length
+ f.seek(0)
+ found_new_issues |= consider_function_size(fname, f)
+
+ return found_new_issues
+
+HEADER="""\
+# Welcome to the exceptions file for Tor's best-practices tracker!
+#
+# Each line of this file represents a single violation of Tor's best
+# practices -- typically, a violation that we had before practracker.py
+# first existed.
+#
+# There are three kinds of problems that we recognize right now:
+# function-size -- a function of more than {MAX_FUNCTION_SIZE} lines.
+# file-size -- a file of more than {MAX_FILE_SIZE} lines.
+# include-count -- a file with more than {MAX_INCLUDE_COUNT} #includes.
+#
+# Each line below represents a single exception that practracker should
+# _ignore_. Each line has four parts:
+# 1. The word "problem".
+# 2. The kind of problem.
+# 3. The location of the problem: either a filename, or a
+# filename:functionname pair.
+# 4. The magnitude of the problem to ignore.
+#
+# So for example, consider this line:
+# problem file-size /src/core/or/connection_or.c 3200
+#
+# It tells practracker to allow the mentioned file to be up to 3200 lines
+# long, even though ordinarily it would warn about any file with more than
+# {MAX_FILE_SIZE} lines.
+#
+# You can either edit this file by hand, or regenerate it completely by
+# running `make practracker-regen`.
+#
+# Remember: It is better to fix the problem than to add a new exception!
+
+""".format(**globals())
+
+def main(argv):
+ import argparse
+
+ progname = argv[0]
+ parser = argparse.ArgumentParser(prog=progname)
+ parser.add_argument("--regen", action="store_true",
+ help="Regenerate the exceptions file")
+ parser.add_argument("--exceptions",
+ help="Override the location for the exceptions file")
+ parser.add_argument("topdir", default=".", nargs="?",
+ help="Top-level directory for the tor source")
+ args = parser.parse_args(argv[1:])
+
+ global TOR_TOPDIR
+ TOR_TOPDIR = args.topdir
+ if args.exceptions:
+ exceptions_file = args.exceptions
+ else:
+ exceptions_file = os.path.join(TOR_TOPDIR, "scripts/maint/practracker", EXCEPTIONS_FNAME)
+
+ # 1) Get all the .c files we care about
+ files_list = util.get_tor_c_files(TOR_TOPDIR)
+
+ # 2) Initialize problem vault and load an optional exceptions file so that
+ # we don't warn about the past
+ global ProblemVault
+
+ if args.regen:
+ tmpname = exceptions_file + ".tmp"
+ tmpfile = open(tmpname, "w")
+ sys.stdout = tmpfile
+ sys.stdout.write(HEADER)
+ ProblemVault = problem.ProblemVault()
+ else:
+ ProblemVault = problem.ProblemVault(exceptions_file)
+
+ # 3) Go through all the files and report problems if they are not exceptions
+ found_new_issues = consider_all_metrics(files_list)
+
+ if args.regen:
+ tmpfile.close()
+ os.rename(tmpname, exceptions_file)
+ sys.exit(0)
+
+ # If new issues were found, try to give out some advice to the developer on how to resolve it.
+ if found_new_issues and not args.regen:
+ new_issues_str = """\
+FAILURE: practracker found new problems in the code: see warnings above.
+
+Please fix the problems if you can, and update the exceptions file
+({}) if you can't.
+
+See doc/HACKING/HelpfulTools.md for more information on using practracker.\
+""".format(exceptions_file)
+ print(new_issues_str)
+
+ sys.exit(found_new_issues)
+
+if __name__ == '__main__':
+ main(sys.argv)
diff --git a/scripts/maint/practracker/practracker_tests.py b/scripts/maint/practracker/practracker_tests.py
new file mode 100755
index 0000000000..cdbab2908e
--- /dev/null
+++ b/scripts/maint/practracker/practracker_tests.py
@@ -0,0 +1,50 @@
+"""Some simple tests for practracker metrics"""
+
+import unittest
+
+import StringIO
+
+import metrics
+
+function_file = """static void
+fun(directory_request_t *req, const char *resource)
+{
+ time_t if_modified_since = 0;
+ uint8_t or_diff_from[DIGEST256_LEN];
+}
+
+static void
+fun(directory_request_t *req,
+ const char *resource)
+{
+ time_t if_modified_since = 0;
+ uint8_t or_diff_from[DIGEST256_LEN];
+}
+
+MOCK_IMPL(void,
+fun,(
+ uint8_t dir_purpose,
+ uint8_t router_purpose,
+ const char *resource,
+ int pds_flags,
+ download_want_authority_t want_authority))
+{
+ const routerstatus_t *rs = NULL;
+ const or_options_t *options = get_options();
+}
+"""
+
+class TestFunctionLength(unittest.TestCase):
+ def test_function_length(self):
+ funcs = StringIO.StringIO(function_file)
+ # All functions should have length 2
+ for name, lines in metrics.function_lines(funcs):
+ self.assertEqual(name, "fun")
+
+ funcs.seek(0)
+
+ for name, lines in metrics.function_lines(funcs):
+ self.assertEqual(lines, 2)
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/scripts/maint/practracker/problem.py b/scripts/maint/practracker/problem.py
new file mode 100644
index 0000000000..c82c5db572
--- /dev/null
+++ b/scripts/maint/practracker/problem.py
@@ -0,0 +1,158 @@
+"""
+In this file we define a ProblemVault class where we store all the
+exceptions and all the problems we find with the code.
+
+The ProblemVault is capable of registering problems and also figuring out if a
+problem is worse than a registered exception so that it only warns when things
+get worse.
+"""
+
+from __future__ import print_function
+
+import os.path
+import re
+import sys
+
+class ProblemVault(object):
+ """
+ Singleton where we store the various new problems we
+ found in the code, and also the old problems we read from the exception
+ file.
+ """
+ def __init__(self, exception_fname=None):
+ # Exception dictionary: { problem.key() : Problem object }
+ self.exceptions = {}
+
+ if exception_fname == None:
+ return
+
+ try:
+ with open(exception_fname, 'r') as exception_f:
+ self.register_exceptions(exception_f)
+ except IOError:
+ print("No exception file provided", file=sys.stderr)
+
+ def register_exceptions(self, exception_file):
+ # Register exceptions
+ for lineno, line in enumerate(exception_file, 1):
+ try:
+ problem = get_old_problem_from_exception_str(line)
+ except ValueError as v:
+ print("Exception file line {} not recognized: {}"
+ .format(lineno,v),
+ file=sys.stderr)
+ continue
+
+ if problem is None:
+ continue
+
+ # Fail if we see dup exceptions. There is really no reason to have dup exceptions.
+ if problem.key() in self.exceptions:
+ print("Duplicate exceptions lines found in exception file:\n\t{}\n\t{}\nAborting...".format(problem, self.exceptions[problem.key()]),
+ file=sys.stderr)
+ sys.exit(1)
+
+ self.exceptions[problem.key()] = problem
+ #print "Registering exception: %s" % problem
+
+ def register_problem(self, problem):
+ """
+ Register this problem to the problem value. Return True if it was a new
+ problem or it worsens an already existing problem.
+ """
+ # This is a new problem, print it
+ if problem.key() not in self.exceptions:
+ print(problem)
+ return True
+
+ # If it's an old problem, we don't warn if the situation got better
+ # (e.g. we went from 4k LoC to 3k LoC), but we do warn if the
+ # situation worsened (e.g. we went from 60 includes to 80).
+ if problem.is_worse_than(self.exceptions[problem.key()]):
+ print(problem)
+ return True
+
+ return False
+
+class Problem(object):
+ """
+ A generic problem in our source code. See the subclasses below for the
+ specific problems we are trying to tackle.
+ """
+ def __init__(self, problem_type, problem_location, metric_value):
+ self.problem_location = problem_location
+ self.metric_value = int(metric_value)
+ self.problem_type = problem_type
+
+ def is_worse_than(self, other_problem):
+ """Return True if this is a worse problem than other_problem"""
+ if self.metric_value > other_problem.metric_value:
+ return True
+ return False
+
+ def key(self):
+ """Generate a unique key that describes this problem that can be used as a dictionary key"""
+ # Problem location is a filesystem path, so we need to normalize this
+ # across platforms otherwise same paths are not gonna match.
+ canonical_location = os.path.normcase(self.problem_location)
+ return "%s:%s" % (canonical_location, self.problem_type)
+
+ def __str__(self):
+ return "problem %s %s %s" % (self.problem_type, self.problem_location, self.metric_value)
+
+class FileSizeProblem(Problem):
+ """
+ Denotes a problem with the size of a .c file.
+
+ The 'problem_location' is the filesystem path of the .c file, and the
+ 'metric_value' is the number of lines in the .c file.
+ """
+ def __init__(self, problem_location, metric_value):
+ super(FileSizeProblem, self).__init__("file-size", problem_location, metric_value)
+
+class IncludeCountProblem(Problem):
+ """
+ Denotes a problem with the number of #includes in a .c file.
+
+ The 'problem_location' is the filesystem path of the .c file, and the
+ 'metric_value' is the number of #includes in the .c file.
+ """
+ def __init__(self, problem_location, metric_value):
+ super(IncludeCountProblem, self).__init__("include-count", problem_location, metric_value)
+
+class FunctionSizeProblem(Problem):
+ """
+ Denotes a problem with a size of a function in a .c file.
+
+ The 'problem_location' is "<path>:<function>()" where <path> is the
+ filesystem path of the .c file and <function> is the name of the offending
+ function.
+
+ The 'metric_value' is the size of the offending function in lines.
+ """
+ def __init__(self, problem_location, metric_value):
+ super(FunctionSizeProblem, self).__init__("function-size", problem_location, metric_value)
+
+comment_re = re.compile(r'#.*$')
+
+def get_old_problem_from_exception_str(exception_str):
+ orig_str = exception_str
+ exception_str = comment_re.sub("", exception_str)
+ fields = exception_str.split()
+ if len(fields) == 0:
+ # empty line or comment
+ return None
+ elif len(fields) == 4:
+ # valid line
+ _, problem_type, problem_location, metric_value = fields
+ else:
+ raise ValueError("Misformatted line {!r}".format(orig_str))
+
+ if problem_type == "file-size":
+ return FileSizeProblem(problem_location, metric_value)
+ elif problem_type == "include-count":
+ return IncludeCountProblem(problem_location, metric_value)
+ elif problem_type == "function-size":
+ return FunctionSizeProblem(problem_location, metric_value)
+ else:
+ raise ValueError("Unknown exception type {!r}".format(orig_str))
diff --git a/scripts/maint/practracker/util.py b/scripts/maint/practracker/util.py
new file mode 100644
index 0000000000..b0ca73b997
--- /dev/null
+++ b/scripts/maint/practracker/util.py
@@ -0,0 +1,28 @@
+import os
+
+# We don't want to run metrics for unittests, automatically-generated C files,
+# external libraries or git leftovers.
+EXCLUDE_SOURCE_DIRS = {"/src/test/", "/src/trunnel/", "/src/ext/", "/.git/"}
+
+def get_tor_c_files(tor_topdir):
+ """
+ Return a list with the .c filenames we want to get metrics of.
+ """
+ files_list = []
+
+ for root, directories, filenames in os.walk(tor_topdir):
+ directories.sort()
+ filenames.sort()
+ for filename in filenames:
+ # We only care about .c files
+ if not filename.endswith(".c"):
+ continue
+
+ # Exclude the excluded paths
+ full_path = os.path.join(root,filename)
+ if any(os.path.normcase(exclude_dir) in full_path for exclude_dir in EXCLUDE_SOURCE_DIRS):
+ continue
+
+ files_list.append(full_path)
+
+ return files_list
diff --git a/scripts/maint/pre-commit.git-hook b/scripts/maint/pre-commit.git-hook
deleted file mode 100755
index b4c4ce2061..0000000000
--- a/scripts/maint/pre-commit.git-hook
+++ /dev/null
@@ -1,26 +0,0 @@
-#!/bin/bash
-#
-# To install this script, copy it to .git/hooks/pre-commit in local copy of
-# tor git repo and make sure it has permission to execute.
-#
-# This is pre-commit git hook script that prevents commiting your changeset if
-# it fails our code formatting or changelog entry formatting checkers.
-
-workdir=$(git rev-parse --show-toplevel)
-
-cd "$workdir" || exit 1
-
-python scripts/maint/lintChanges.py ./changes/*
-
-perl scripts/maint/checkSpace.pl -C \
-src/lib/*/*.[ch] \
-src/core/*/*.[ch] \
-src/feature/*/*.[ch] \
-src/app/*/*.[ch] \
-src/test/*.[ch] \
-src/test/*/*.[ch] \
-src/tools/*.[ch]
-
-if test -e scripts/maint/checkIncludes.py; then
- python scripts/maint/checkIncludes.py
-fi
diff --git a/scripts/maint/pre-push.git-hook b/scripts/maint/pre-push.git-hook
deleted file mode 100755
index 26296023fb..0000000000
--- a/scripts/maint/pre-push.git-hook
+++ /dev/null
@@ -1,61 +0,0 @@
-#!/bin/bash
-
-# To install this script, copy it into .git/hooks/pre-push path in your
-# local copy of git repository. Make sure it has permission to execute.
-#
-# This is git pre-push hook script to prevent "fixup!" and "squash!" commits
-# from ending up in upstream branches (master, release-* or maint-*).
-#
-# The following sample script was used as starting point:
-# https://github.com/git/git/blob/master/templates/hooks--pre-push.sample
-
-z40=0000000000000000000000000000000000000000
-
-CUR_BRANCH=$(git rev-parse --abbrev-ref HEAD)
-if [ "$CUR_BRANCH" != "master" ] && [[ $CUR_BRANCH != release-* ]] &&
- [[ $CUR_BRANCH != maint-* ]]
-then
- exit 0
-fi
-
-echo "Running pre-push hook"
-
-# shellcheck disable=SC2034
-while read -r local_ref local_sha remote_ref remote_sha
-do
- if [ "$local_sha" = $z40 ]
- then
- # Handle delete
- :
- else
- if [ "$remote_sha" = $z40 ]
- then
- # New branch, examine all commits
- range="$local_sha"
- else
- # Update to existing branch, examine new commits
- range="$remote_sha..$local_sha"
- fi
-
- # Check for fixup! commit
- commit=$(git rev-list -n 1 --grep '^fixup!' "$range")
- if [ -n "$commit" ]
- then
- echo >&2 "Found fixup! commit in $local_ref, not pushing"
- echo >&2 "If you really want to push this, use --no-verify."
- exit 1
- fi
-
- # Check for squash! commit
- commit=$(git rev-list -n 1 --grep '^squash!' "$range")
- if [ -n "$commit" ]
- then
- echo >&2 "Found squash! commit in $local_ref, not pushing"
- echo >&2 "If you really want to push this, use --no-verify."
- exit 1
- fi
- fi
-done
-
-exit 0
-
diff --git a/scripts/maint/rectify_include_paths.py b/scripts/maint/rectify_include_paths.py
index 401fadae6d..1140e8cd22 100755
--- a/scripts/maint/rectify_include_paths.py
+++ b/scripts/maint/rectify_include_paths.py
@@ -1,8 +1,12 @@
-#!/usr/bin/python3
+#!/usr/bin/python
import os
import os.path
import re
+import sys
+
+def warn(msg):
+ sys.stderr.write("WARNING: %s\n"%msg)
# Find all the include files, map them to their real names.
@@ -11,6 +15,8 @@ def exclude(paths, dirnames):
if p in dirnames:
dirnames.remove(p)
+DUPLICATE = object()
+
def get_include_map():
includes = { }
@@ -19,7 +25,10 @@ def get_include_map():
for fname in fnames:
if fname.endswith(".h"):
- assert fname not in includes
+ if fname in includes:
+ warn("Multiple headers named %s"%fname)
+ includes[fname] = DUPLICATE
+ continue
include = os.path.join(dirpath, fname)
assert include.startswith("src/")
includes[fname] = include[4:]
@@ -37,7 +46,7 @@ def fix_includes(inp, out, mapping):
if m:
include,hdr,rest = m.groups()
basehdr = get_base_header_name(hdr)
- if basehdr in mapping:
+ if basehdr in mapping and mapping[basehdr] is not DUPLICATE:
out.write('{}{}{}\n'.format(include,mapping[basehdr],rest))
continue
diff --git a/scripts/maint/updateCopyright.pl b/scripts/maint/updateCopyright.pl
index 36894b1baf..6800032f87 100755
--- a/scripts/maint/updateCopyright.pl
+++ b/scripts/maint/updateCopyright.pl
@@ -1,7 +1,9 @@
#!/usr/bin/perl -i -w -p
-$NEWYEAR=2019;
+@now = gmtime();
-s/Copyright(.*) (201[^9]), The Tor Project/Copyright$1 $2-${NEWYEAR}, The Tor Project/;
+$NEWYEAR=$now[5]+1900;
+
+s/Copyright([^-]*) (20[^-]*), The Tor Project/Copyright$1 $2-${NEWYEAR}, The Tor Project/;
s/Copyright(.*)-(20..), The Tor Project/Copyright$1-${NEWYEAR}, The Tor Project/;
diff --git a/scripts/test/cov-diff b/scripts/test/cov-diff
index f3ca856888..8751800966 100755
--- a/scripts/test/cov-diff
+++ b/scripts/test/cov-diff
@@ -16,6 +16,5 @@ for B in "$DIRB"/*; do
fi
perl -pe 's/^\s*\!*\d+(\*?):/ 1$1:/; s/^([^:]+:)[\d\s]+:/$1/; s/^ *-:(Runs|Programs):.*//;' "$B" > "$B.tmp"
diff -u "$A.tmp" "$B.tmp" |perl -pe 's/^((?:\+\+\+|---)(?:.*tmp))\s+.*/$1/;'
- rm "$A.tmp" "$B.tmp"
+ rm -f "$A.tmp" "$B.tmp"
done
-
diff --git a/scripts/test/cov-test-determinism.sh b/scripts/test/cov-test-determinism.sh
new file mode 100755
index 0000000000..3458f96968
--- /dev/null
+++ b/scripts/test/cov-test-determinism.sh
@@ -0,0 +1,51 @@
+#!/bin/sh
+
+# To use this script, build Tor with coverage enabled, and then say:
+# ./scripts/test/cov-test-determinism.sh run
+#
+# Let it run for a long time so it can run the tests over and over. It
+# will put their coverage outputs in coverage-raw/coverage-*/.
+#
+# Then say:
+# ./scripts/test/cov-test-determinism.sh check
+#
+# It will diff the other coverage outputs to the first one, and put their
+# diffs in coverage-raw/diff-coverage-*.
+
+run=0
+check=0
+
+if test "$1" = run; then
+ run=1
+elif test "$1" = check; then
+ check=1
+else
+ echo "First use 'run' with this script, then use 'check'."
+ exit 1
+fi
+
+if test "$run" = 1; then
+ # same seed as in travis.yml
+ TOR_TEST_RNG_SEED="636f766572616765"
+ export TOR_TEST_RNG_SEED
+ while true; do
+ make reset-gcov
+ CD=coverage-raw/coverage-$(date +%s)
+ make -j5 check
+ mkdir -p "$CD"
+ ./scripts/test/coverage "$CD"
+ done
+fi
+
+if test "$check" = 1; then
+ cd coverage-raw || exit 1
+
+ FIRST="$(find . -name "coverage-*" -type d | head -1)"
+ rm -f A
+ ln -sf "$FIRST" A
+ for dir in coverage-*; do
+ rm -f B
+ ln -sf "$dir" B
+ ../scripts/test/cov-diff A B > "diff-$dir"
+ done
+fi