summaryrefslogtreecommitdiff
path: root/scripts
diff options
context:
space:
mode:
Diffstat (limited to 'scripts')
-rw-r--r--scripts/coccinelle/debugmm.cocci29
-rwxr-xr-xscripts/git/git-merge-forward.sh330
-rwxr-xr-xscripts/git/git-pull-all.sh114
-rwxr-xr-xscripts/git/git-push-all.sh299
-rwxr-xr-xscripts/git/post-merge.git-hook6
-rwxr-xr-xscripts/git/pre-commit.git-hook26
-rwxr-xr-xscripts/git/pre-push.git-hook18
-rwxr-xr-xscripts/maint/add_c_file.py6
-rwxr-xr-xscripts/maint/annotate_ifdef_directives74
-rwxr-xr-xscripts/maint/annotate_ifdef_directives.py317
-rwxr-xr-xscripts/maint/checkIncludes.py183
-rwxr-xr-xscripts/maint/checkShellScripts.sh61
-rwxr-xr-xscripts/maint/checkSpace.pl2
-rw-r--r--scripts/maint/practracker/README21
-rw-r--r--scripts/maint/practracker/exceptions.txt331
-rwxr-xr-xscripts/maint/practracker/includes.py285
-rw-r--r--scripts/maint/practracker/metrics.py15
-rwxr-xr-xscripts/maint/practracker/practracker.py181
-rwxr-xr-xscripts/maint/practracker/practracker_tests.py27
-rw-r--r--scripts/maint/practracker/problem.py132
-rwxr-xr-xscripts/maint/practracker/test_practracker.sh70
-rw-r--r--scripts/maint/practracker/testdata/.may_include3
-rw-r--r--scripts/maint/practracker/testdata/a.c38
-rw-r--r--scripts/maint/practracker/testdata/b.c15
-rw-r--r--scripts/maint/practracker/testdata/ex.txt0
-rw-r--r--scripts/maint/practracker/testdata/ex0-expected.txt11
-rw-r--r--scripts/maint/practracker/testdata/ex0.txt0
-rw-r--r--scripts/maint/practracker/testdata/ex1-expected.txt3
-rw-r--r--scripts/maint/practracker/testdata/ex1-overbroad-expected.txt2
-rw-r--r--scripts/maint/practracker/testdata/ex1.txt18
-rw-r--r--scripts/maint/practracker/testdata/header.h8
-rw-r--r--scripts/maint/practracker/testdata/not_c_file2
-rw-r--r--scripts/maint/practracker/util.py52
33 files changed, 2056 insertions, 623 deletions
diff --git a/scripts/coccinelle/debugmm.cocci b/scripts/coccinelle/debugmm.cocci
new file mode 100644
index 0000000000..dbd308df33
--- /dev/null
+++ b/scripts/coccinelle/debugmm.cocci
@@ -0,0 +1,29 @@
+// Look for use of expressions with side-effects inside of debug logs.
+//
+// This script detects expressions like ++E, --E, E++, and E-- inside of
+// calls to log_debug().
+//
+// The log_debug() macro exits early if debug logging is not enabled,
+// potentially causing problems if its arguments have side-effects.
+
+@@
+expression E;
+@@
+*log_debug(... , <+... --E ...+>, ... );
+
+
+@@
+expression E;
+@@
+*log_debug(... , <+... ++E ...+>, ... );
+
+@@
+expression E;
+@@
+*log_debug(... , <+... E-- ...+>, ... );
+
+
+@@
+expression E;
+@@
+*log_debug(... , <+... E++ ...+>, ... );
diff --git a/scripts/git/git-merge-forward.sh b/scripts/git/git-merge-forward.sh
index 67af7e98bf..bdd0da5b75 100755
--- a/scripts/git/git-merge-forward.sh
+++ b/scripts/git/git-merge-forward.sh
@@ -1,52 +1,110 @@
-#!/bin/bash
+#!/usr/bin/env bash
-##############################
-# Configuration (change me!) #
-##############################
+SCRIPT_NAME=$(basename "$0")
-# The general setup that is suggested here is:
-#
-# GIT_PATH = /home/<user>/git/
-# ... where the git repository directories resides.
-# TOR_MASTER_NAME = "tor"
-# ... which means that tor.git was cloned in /home/<user>/git/tor
-# TOR_WKT_NAME = "tor-wkt"
-# ... which means that the tor worktrees are in /home/<user>/git/tor-wkt
+function usage()
+{
+ echo "$SCRIPT_NAME [-h] [-n] [-t <test-branch-prefix> [-u]]"
+ echo
+ echo " arguments:"
+ echo " -h: show this help text"
+ echo " -n: dry run mode"
+ echo " (default: run commands)"
+ echo " -t: test branch mode: create new branches from the commits checked"
+ echo " out in each maint directory. Call these branches prefix_029,"
+ echo " prefix_035, ... , prefix_master."
+ echo " (default: merge forward maint-*, release-*, and master)"
+ echo " -u: in test branch mode, if a prefix_* branch already exists,"
+ echo " skip creating that branch. Use after a merge error, to"
+ echo " restart the merge forward at the first unmerged branch."
+ echo " (default: if a prefix_* branch already exists, fail and exit)"
+ echo
+ echo " env vars:"
+ echo " required:"
+ echo " TOR_FULL_GIT_PATH: where the git repository directories reside."
+ echo " You must set this env var, we recommend \$HOME/git/"
+ echo " (default: fail if this env var is not set;"
+ echo " current: $GIT_PATH)"
+ echo
+ echo " optional:"
+ echo " TOR_MASTER: the name of the directory containing the tor.git clone"
+ echo " The tor master git directory is \$GIT_PATH/\$TOR_MASTER"
+ echo " (default: tor; current: $TOR_MASTER_NAME)"
+ echo " TOR_WKT_NAME: the name of the directory containing the tor"
+ echo " worktrees. The tor worktrees are:"
+ echo " \$GIT_PATH/\$TOR_WKT_NAME/{maint-*,release-*}"
+ echo " (default: tor-wkt; current: $TOR_WKT_NAME)"
+ echo " we recommend that you set these env vars in your ~/.profile"
+}
+
+#################
+# Configuration #
+#################
+
+# Don't change this configuration - set the env vars in your .profile
# Where are all those git repositories?
-GIT_PATH="FULL_PATH_TO_GIT_REPOSITORY_DIRECTORY"
+GIT_PATH=${TOR_FULL_GIT_PATH:-"FULL_PATH_TO_GIT_REPOSITORY_DIRECTORY"}
# The tor master git repository directory from which all the worktree have
# been created.
-TOR_MASTER_NAME="tor"
+TOR_MASTER_NAME=${TOR_MASTER_NAME:-"tor"}
# The worktrees location (directory).
-TOR_WKT_NAME="tor-wkt"
+TOR_WKT_NAME=${TOR_WKT_NAME:-"tor-wkt"}
-#########################
-# End of configuration. #
-#########################
+##########################
+# Git branches to manage #
+##########################
+
+# The branches and worktrees need to be modified when there is a new branch,
+# and when an old branch is no longer supported.
# Configuration of the branches that needs merging. The values are in order:
-# (1) Branch name that we merge onto.
-# (2) Branch name to merge from. In other words, this is merge into (1)
-# (3) Full path of the git worktree.
+# (0) current maint/release branch name
+# (1) previous maint/release name to merge into (0)
+# (only used in merge forward mode)
+# (2) Full path of the git worktree
+# (3) current branch suffix
+# (maint branches only, only used in test branch mode)
+# (4) previous test branch suffix to merge into (3)
+# (maint branches only, only used in test branch mode)
+#
+# Merge forward example:
+# $ cd <PATH/TO/WORKTREE> (2)
+# $ git checkout maint-0.3.5 (0)
+# $ git pull
+# $ git merge maint-0.3.4 (1)
#
-# As an example:
-# $ cd <PATH/TO/WORKTREE> (3)
-# $ git checkout maint-0.3.5 (1)
+# Test branch example:
+# $ cd <PATH/TO/WORKTREE> (2)
+# $ git checkout -b ticket99999_035 (3)
+# $ git checkout maint-0.3.5 (0)
# $ git pull
-# $ git merge maint-0.3.4 (2)
+# $ git checkout ticket99999_035
+# $ git merge maint-0.3.5
+# $ git merge ticket99999_034 (4)
#
# First set of arrays are the maint-* branch and then the release-* branch.
# New arrays need to be in the WORKTREE= array else they aren't considered.
-MAINT_034=( "maint-0.3.4" "maint-0.2.9" "$GIT_PATH/$TOR_WKT_NAME/maint-0.3.4" )
-MAINT_035=( "maint-0.3.5" "maint-0.3.4" "$GIT_PATH/$TOR_WKT_NAME/maint-0.3.5" )
-MAINT_040=( "maint-0.4.0" "maint-0.3.5" "$GIT_PATH/$TOR_WKT_NAME/maint-0.4.0" )
-MAINT_MASTER=( "master" "maint-0.4.0" "$GIT_PATH/$TOR_MASTER_NAME" )
+#
+# Only used in test branch mode
+# There is no previous branch to merge forward, so the second and fifth items
+# must be blank ("")
+MAINT_029_TB=( "maint-0.2.9" "" "$GIT_PATH/$TOR_WKT_NAME/maint-0.2.9" \
+ "_029" "")
+# Used in maint/release merge and test branch modes
+MAINT_035=( "maint-0.3.5" "maint-0.2.9" "$GIT_PATH/$TOR_WKT_NAME/maint-0.3.5" \
+ "_035" "_029")
+MAINT_040=( "maint-0.4.0" "maint-0.3.5" "$GIT_PATH/$TOR_WKT_NAME/maint-0.4.0" \
+ "_040" "_035")
+MAINT_041=( "maint-0.4.1" "maint-0.4.0" "$GIT_PATH/$TOR_WKT_NAME/maint-0.4.1" \
+ "_041" "_040")
+MAINT_MASTER=( "master" "maint-0.4.1" "$GIT_PATH/$TOR_MASTER_NAME" \
+ "_master" "_041")
RELEASE_029=( "release-0.2.9" "maint-0.2.9" "$GIT_PATH/$TOR_WKT_NAME/release-0.2.9" )
-RELEASE_034=( "release-0.3.4" "maint-0.3.4" "$GIT_PATH/$TOR_WKT_NAME/release-0.3.4" )
RELEASE_035=( "release-0.3.5" "maint-0.3.5" "$GIT_PATH/$TOR_WKT_NAME/release-0.3.5" )
RELEASE_040=( "release-0.4.0" "maint-0.4.0" "$GIT_PATH/$TOR_WKT_NAME/release-0.4.0" )
+RELEASE_041=( "release-0.4.1" "maint-0.4.1" "$GIT_PATH/$TOR_WKT_NAME/release-0.4.1" )
# The master branch path has to be the main repository thus contains the
# origin that will be used to fetch the updates. All the worktrees are created
@@ -55,41 +113,104 @@ ORIGIN_PATH="$GIT_PATH/$TOR_MASTER_NAME"
# SC2034 -- shellcheck thinks that these are unused. We know better.
ACTUALLY_THESE_ARE_USED=<<EOF
-${MAINT_034[0]}
+${MAINT_029_TB[0]}
${MAINT_035[0]}
${MAINT_040[0]}
+${MAINT_041[0]}
${MAINT_MASTER[0]}
${RELEASE_029[0]}
-${RELEASE_034[0]}
${RELEASE_035[0]}
${RELEASE_040[0]}
+${RELEASE_041[0]}
EOF
-##########################
-# Git Worktree to manage #
-##########################
+#######################
+# Argument processing #
+#######################
+
+# Controlled by the -n option. The dry run option will just output the command
+# that would have been executed for each worktree.
+DRY_RUN=0
+
+# Controlled by the -t <test-branch-prefix> option. The test branch base
+# name option makes git-merge-forward.sh create new test branches:
+# <tbbn>_029, <tbbn>_035, ... , <tbbn>_master, and merge forward.
+TEST_BRANCH_PREFIX=
+
+# Controlled by the -u option. The use existing option checks for existing
+# branches with the <test-branch-prefix>, and checks them out, rather than
+# creating a new branch.
+USE_EXISTING=0
+
+while getopts "hnt:u" opt; do
+ case "$opt" in
+ h) usage
+ exit 0
+ ;;
+ n) DRY_RUN=1
+ echo " *** DRY RUN MODE ***"
+ ;;
+ t) TEST_BRANCH_PREFIX="$OPTARG"
+ echo " *** CREATING TEST BRANCHES: ${TEST_BRANCH_PREFIX}_nnn ***"
+ ;;
+ u) USE_EXISTING=1
+ echo " *** USE EXISTING TEST BRANCHES MODE ***"
+ ;;
+ *)
+ echo
+ usage
+ exit 1
+ ;;
+ esac
+done
+
+###########################
+# Git worktrees to manage #
+###########################
+
+if [ -z "$TEST_BRANCH_PREFIX" ]; then
+
+ # maint/release merge mode
+ #
+ # List of all worktrees to work on. All defined above. Ordering is important.
+ # Always the maint-* branch BEFORE then the release-*.
+ WORKTREE=(
+ RELEASE_029[@]
+
+ MAINT_035[@]
+ RELEASE_035[@]
+
+ MAINT_040[@]
+ RELEASE_040[@]
+
+ MAINT_041[@]
+ RELEASE_041[@]
-# List of all worktrees to work on. All defined above. Ordering is important.
-# Always the maint-* branch BEFORE then the release-*.
-WORKTREE=(
- RELEASE_029[@]
+ MAINT_MASTER[@]
+ )
- MAINT_034[@]
- RELEASE_034[@]
+else
- MAINT_035[@]
- RELEASE_035[@]
+ # Test branch mode: merge to maint only, and create a new branch for 0.2.9
+ WORKTREE=(
+ MAINT_029_TB[@]
- MAINT_040[@]
- RELEASE_040[@]
+ MAINT_035[@]
+
+ MAINT_040[@]
+
+ MAINT_041[@]
+
+ MAINT_MASTER[@]
+ )
+
+fi
- MAINT_MASTER[@]
-)
COUNT=${#WORKTREE[@]}
-# Controlled by the -n option. The dry run option will just output the command
-# that would have been executed for each worktree.
-DRY_RUN=0
+#############
+# Constants #
+#############
# Control characters
CNRM=$'\x1b[0;0m' # Clear color
@@ -127,7 +248,7 @@ function validate_ret
# Switch to the given branch name.
function switch_branch
{
- local cmd="git checkout $1"
+ local cmd="git checkout '$1'"
printf " %s Switching branch to %s..." "$MARKER" "$1"
if [ $DRY_RUN -eq 0 ]; then
msg=$( eval "$cmd" 2>&1 )
@@ -137,6 +258,45 @@ function switch_branch
fi
}
+# Checkout a new branch with the given branch name.
+function new_branch
+{
+ local cmd="git checkout -b '$1'"
+ printf " %s Creating new branch %s..." "$MARKER" "$1"
+ if [ $DRY_RUN -eq 0 ]; then
+ msg=$( eval "$cmd" 2>&1 )
+ validate_ret $? "$msg"
+ else
+ printf "\\n %s\\n" "${IWTH}$cmd${CNRM}"
+ fi
+}
+
+# Switch to an existing branch, or checkout a new branch with the given
+# branch name.
+function switch_or_new_branch
+{
+ local cmd="git rev-parse --verify '$1'"
+ if [ $DRY_RUN -eq 0 ]; then
+ # Call switch_branch if there is a branch, or new_branch if there is not
+ msg=$( eval "$cmd" 2>&1 )
+ RET=$?
+ if [ $RET -eq 0 ]; then
+ # Branch: (commit id)
+ switch_branch "$1"
+ elif [ $RET -eq 128 ]; then
+ # Not a branch: "fatal: Needed a single revision"
+ new_branch "$1"
+ else
+ # Unexpected return value
+ validate_ret $RET "$msg"
+ fi
+ else
+ printf "\\n %s\\n" "${IWTH}$cmd${CNRM}, then depending on the result:"
+ switch_branch "$1"
+ new_branch "$1"
+ fi
+}
+
# Pull the given branch name.
function pull_branch
{
@@ -150,10 +310,10 @@ function pull_branch
fi
}
-# Merge the given branch name ($2) into the current branch ($1).
+# Merge the given branch name ($1) into the current branch ($2).
function merge_branch
{
- local cmd="git merge --no-edit $1"
+ local cmd="git merge --no-edit '$1'"
printf " %s Merging branch %s into %s..." "$MARKER" "$1" "$2"
if [ $DRY_RUN -eq 0 ]; then
msg=$( eval "$cmd" 2>&1 )
@@ -166,7 +326,7 @@ function merge_branch
# Pull the given branch name.
function merge_branch_origin
{
- local cmd="git merge --ff-only origin/$1"
+ local cmd="git merge --ff-only 'origin/$1'"
printf " %s Merging branch origin/%s..." "$MARKER" "$1"
if [ $DRY_RUN -eq 0 ]; then
msg=$( eval "$cmd" 2>&1 )
@@ -203,16 +363,6 @@ function fetch_origin
# Entry point #
###############
-while getopts "n" opt; do
- case "$opt" in
- n) DRY_RUN=1
- echo " *** DRY DRUN MODE ***"
- ;;
- *)
- ;;
- esac
-done
-
# First, fetch the origin.
goto_repo "$ORIGIN_PATH"
fetch_origin
@@ -222,15 +372,57 @@ for ((i=0; i<COUNT; i++)); do
current=${!WORKTREE[$i]:0:1}
previous=${!WORKTREE[$i]:1:1}
repo_path=${!WORKTREE[$i]:2:1}
+ # default to merge forward mode
+ test_current=
+ test_previous=
+ target_current="$current"
+ target_previous="$previous"
+ if [ "$TEST_BRANCH_PREFIX" ]; then
+ test_current_suffix=${!WORKTREE[$i]:3:1}
+ test_current=${TEST_BRANCH_PREFIX}${test_current_suffix}
+ # the current test branch, if present, or maint/release branch, if not
+ target_current="$test_current"
+ test_previous_suffix=${!WORKTREE[$i]:4:1}
+ if [ "$test_previous_suffix" ]; then
+ test_previous=${TEST_BRANCH_PREFIX}${test_previous_suffix}
+ # the previous test branch, if present, or maint/release branch, if not
+ target_previous="$test_previous"
+ fi
+ fi
- printf "%s Handling branch \\n" "$MARKER" "${BYEL}$current${CNRM}"
+ printf "%s Handling branch \\n" "$MARKER" "${BYEL}$target_current${CNRM}"
# Go into the worktree to start merging.
goto_repo "$repo_path"
- # Checkout the current branch
+ if [ "$test_current" ]; then
+ if [ $USE_EXISTING -eq 0 ]; then
+ # Create a test branch from the currently checked-out branch/commit
+ # Fail if it already exists
+ new_branch "$test_current"
+ else
+ # Switch if it exists, or create if it does not
+ switch_or_new_branch "$test_current"
+ fi
+ fi
+ # Checkout the current maint/release branch
switch_branch "$current"
- # Update the current branch with an origin merge to get the latest.
+ # Update the current maint/release branch with an origin merge to get the
+ # latest updates
merge_branch_origin "$current"
- # Merge the previous branch. Ex: merge maint-0.2.5 into maint-0.2.9.
- merge_branch "$previous" "$current"
+ if [ "$test_current" ]; then
+ # Checkout the test branch
+ switch_branch "$test_current"
+ # Merge the updated maint branch into the test branch
+ merge_branch "$current" "$test_current"
+ fi
+ # Merge the previous branch into the target branch
+ # Merge Forward Example:
+ # merge maint-0.2.9 into maint-0.3.5.
+ # Test Branch Example:
+ # merge bug99999_029 into bug99999_035.
+ # Skip the merge if the previous branch does not exist
+ # (there's nothing to merge forward into the oldest test branch)
+ if [ "$target_previous" ]; then
+ merge_branch "$target_previous" "$target_current"
+ fi
done
diff --git a/scripts/git/git-pull-all.sh b/scripts/git/git-pull-all.sh
index 5d1d58e4bf..dc16066388 100755
--- a/scripts/git/git-pull-all.sh
+++ b/scripts/git/git-pull-all.sh
@@ -1,31 +1,53 @@
-#!/bin/bash
+#!/usr/bin/env bash
-##################################
-# User configuration (change me) #
-##################################
+SCRIPT_NAME=$(basename "$0")
-# The general setup that is suggested here is:
-#
-# GIT_PATH = /home/<user>/git/
-# ... where the git repository directories resides.
-# TOR_MASTER_NAME = "tor"
-# ... which means that tor.git was cloned in /home/<user>/git/tor
-# TOR_WKT_NAME = "tor-wkt"
-# ... which means that the tor worktrees are in /home/<user>/git/tor-wkt
+function usage()
+{
+ echo "$SCRIPT_NAME [-h] [-n]"
+ echo
+ echo " arguments:"
+ echo " -h: show this help text"
+ echo " -n: dry run mode"
+ echo " (default: run commands)"
+ echo
+ echo " env vars:"
+ echo " required:"
+ echo " TOR_FULL_GIT_PATH: where the git repository directories reside."
+ echo " You must set this env var, we recommend \$HOME/git/"
+ echo " (default: fail if this env var is not set;"
+ echo " current: $GIT_PATH)"
+ echo
+ echo " optional:"
+ echo " TOR_MASTER: the name of the directory containing the tor.git clone"
+ echo " The tor master git directory is \$GIT_PATH/\$TOR_MASTER"
+ echo " (default: tor; current: $TOR_MASTER_NAME)"
+ echo " TOR_WKT_NAME: the name of the directory containing the tor"
+ echo " worktrees. The tor worktrees are:"
+ echo " \$GIT_PATH/\$TOR_WKT_NAME/{maint-*,release-*}"
+ echo " (default: tor-wkt; current: $TOR_WKT_NAME)"
+ echo " we recommend that you set these env vars in your ~/.profile"
+}
+
+#################
+# Configuration #
+#################
+
+# Don't change this configuration - set the env vars in your .profile
# Where are all those git repositories?
-GIT_PATH="FULL_PATH_TO_GIT_REPOSITORY_DIRECTORY"
+GIT_PATH=${TOR_FULL_GIT_PATH:-"FULL_PATH_TO_GIT_REPOSITORY_DIRECTORY"}
# The tor master git repository directory from which all the worktree have
# been created.
-TOR_MASTER_NAME="tor"
+TOR_MASTER_NAME=${TOR_MASTER_NAME:-"tor"}
# The worktrees location (directory).
-TOR_WKT_NAME="tor-wkt"
+TOR_WKT_NAME=${TOR_WKT_NAME:-"tor-wkt"}
-#########################
-# End of configuration. #
-#########################
+##########################
+# Git branches to manage #
+##########################
-# Configuration of the branches that needs merging. The values are in order:
+# Configuration of the branches that need pulling. The values are in order:
# (1) Branch name to pull (update).
# (2) Full path of the git worktree.
#
@@ -37,15 +59,15 @@ TOR_WKT_NAME="tor-wkt"
# First set of arrays are the maint-* branch and then the release-* branch.
# New arrays need to be in the WORKTREE= array else they aren't considered.
MAINT_029=( "maint-0.2.9" "$GIT_PATH/$TOR_WKT_NAME/maint-0.2.9" )
-MAINT_034=( "maint-0.3.4" "$GIT_PATH/$TOR_WKT_NAME/maint-0.3.4" )
MAINT_035=( "maint-0.3.5" "$GIT_PATH/$TOR_WKT_NAME/maint-0.3.5" )
MAINT_040=( "maint-0.4.0" "$GIT_PATH/$TOR_WKT_NAME/maint-0.4.0" )
+MAINT_041=( "maint-0.4.1" "$GIT_PATH/$TOR_WKT_NAME/maint-0.4.1" )
MAINT_MASTER=( "master" "$GIT_PATH/$TOR_MASTER_NAME" )
RELEASE_029=( "release-0.2.9" "$GIT_PATH/$TOR_WKT_NAME/release-0.2.9" )
-RELEASE_034=( "release-0.3.4" "$GIT_PATH/$TOR_WKT_NAME/release-0.3.4" )
RELEASE_035=( "release-0.3.5" "$GIT_PATH/$TOR_WKT_NAME/release-0.3.5" )
RELEASE_040=( "release-0.4.0" "$GIT_PATH/$TOR_WKT_NAME/release-0.4.0" )
+RELEASE_041=( "release-0.4.1" "$GIT_PATH/$TOR_WKT_NAME/release-0.4.1" )
# The master branch path has to be the main repository thus contains the
# origin that will be used to fetch the updates. All the worktrees are created
@@ -55,19 +77,19 @@ ORIGIN_PATH="$GIT_PATH/$TOR_MASTER_NAME"
# SC2034 -- shellcheck thinks that these are unused. We know better.
ACTUALLY_THESE_ARE_USED=<<EOF
${MAINT_029[0]}
-${MAINT_034[0]}
${MAINT_035[0]}
${MAINT_040[0]}
+${MAINT_041[0]}
${MAINT_MASTER[0]}
${RELEASE_029[0]}
-${RELEASE_034[0]}
${RELEASE_035[0]}
${RELEASE_040[0]}
+${RELEASE_041[0]}
EOF
-##########################
-# Git Worktree to manage #
-##########################
+###########################
+# Git worktrees to manage #
+###########################
# List of all worktrees to work on. All defined above. Ordering is important.
# Always the maint-* branch first then the release-*.
@@ -75,23 +97,47 @@ WORKTREE=(
MAINT_029[@]
RELEASE_029[@]
- MAINT_034[@]
- RELEASE_034[@]
-
MAINT_035[@]
RELEASE_035[@]
MAINT_040[@]
RELEASE_040[@]
+ MAINT_041[@]
+ RELEASE_041[@]
+
MAINT_MASTER[@]
)
COUNT=${#WORKTREE[@]}
+#######################
+# Argument processing #
+#######################
+
# Controlled by the -n option. The dry run option will just output the command
# that would have been executed for each worktree.
DRY_RUN=0
+while getopts "hn" opt; do
+ case "$opt" in
+ h) usage
+ exit 0
+ ;;
+ n) DRY_RUN=1
+ echo " *** DRY DRUN MODE ***"
+ ;;
+ *)
+ echo
+ usage
+ exit 1
+ ;;
+ esac
+done
+
+#############
+# Constants #
+#############
+
# Control characters
CNRM=$'\x1b[0;0m' # Clear color
@@ -191,16 +237,6 @@ function fetch_tor_github
# Entry point #
###############
-while getopts "n" opt; do
- case "$opt" in
- n) DRY_RUN=1
- echo " *** DRY DRUN MODE ***"
- ;;
- *)
- ;;
- esac
-done
-
# First, fetch tor-github.
goto_repo "$ORIGIN_PATH"
fetch_tor_github
diff --git a/scripts/git/git-push-all.sh b/scripts/git/git-push-all.sh
index 0ce951d4bd..7c43fe24d8 100755
--- a/scripts/git/git-push-all.sh
+++ b/scripts/git/git-push-all.sh
@@ -1,11 +1,292 @@
-#!/bin/bash
+#!/usr/bin/env bash
-# The remote upstream branch on which git.torproject.org/tor.git points to.
-UPSTREAM_BRANCH="upstream"
+SCRIPT_NAME=$(basename "$0")
-git push $UPSTREAM_BRANCH \
- master \
- {release,maint}-0.4.0 \
- {release,maint}-0.3.5 \
- {release,maint}-0.3.4 \
- {release,maint}-0.2.9
+function usage()
+{
+ if [ "$TOR_PUSH_SAME" ]; then
+ CURRENT_PUSH_SAME="push"
+ else
+ CURRENT_PUSH_SAME="skip"
+ fi
+
+ echo "$SCRIPT_NAME [-h] [-r <remote-name> [-t <test-branch-prefix>]] [-s]"
+ # The next line looks misaligned, but it lines up in the output
+ echo " [-- [-n] [--no-atomic] <git push options>]"
+ echo
+ echo " arguments:"
+ echo " -h: show this help text"
+ echo " -n: dry run mode"
+ echo " (default: run commands)"
+ echo " -r: push to remote-name, rather than the default upstream remote."
+ echo " (default: $DEFAULT_UPSTREAM_REMOTE, current: $UPSTREAM_REMOTE)"
+ echo " -t: test branch mode: push test branches to remote-name. Pushes"
+ echo " branches prefix_029, prefix_035, ... , prefix_master."
+ echo " (default: push maint-*, release-*, and master)"
+ echo " -s: push branches whose tips match upstream maint, release, or"
+ echo " master branches. The default is to skip these branches,"
+ echo " because they do not contain any new code. Use -s to test for"
+ echo " CI environment failures, using code that previously passed CI."
+ echo " (default: skip; current: $CURRENT_PUSH_SAME matching branches)"
+ echo " --: pass further arguments to git push."
+ echo " (default: git push --atomic, current: $GIT_PUSH)"
+ echo
+ echo " env vars:"
+ echo " optional:"
+ echo " TOR_GIT_PUSH_PATH: change to this directory before pushing."
+ echo " (default: if \$TOR_FULL_GIT_PATH is set,"
+ echo " use \$TOR_FULL_GIT_PATH/\$TOR_MASTER;"
+ echo " Otherwise, use the current directory for pushes;"
+ echo " current: $TOR_GIT_PUSH_PATH)"
+ echo " TOR_FULL_GIT_PATH: where the git repository directories reside."
+ echo " We recommend using \$HOME/git/."
+ echo " (default: use the current directory for pushes;"
+ echo " current: $TOR_FULL_GIT_PATH)"
+ echo " TOR_MASTER: the name of the directory containing the tor.git clone"
+ echo " The tor master git directory is \$GIT_PATH/\$TOR_MASTER"
+ echo " (default: tor; current: $TOR_MASTER_NAME)"
+ echo
+ echo " TOR_UPSTREAM_REMOTE_NAME: the default upstream remote."
+ echo " Overridden by -r."
+ echo " (default: upstream; current: $UPSTREAM_REMOTE)"
+ echo " TOR_GIT_PUSH: the git push command and default arguments."
+ echo " Overridden by <git push options> after --."
+ echo " (default: git push --atomic; current: $GIT_PUSH)"
+ echo " TOR_PUSH_SAME: push branches whose tips match upstream maint,"
+ echo " release, or master branches. Inverted by -s."
+ echo " (default: skip; current: $CURRENT_PUSH_SAME matching branches)"
+ echo " TOR_PUSH_DELAY: pushes the master and maint branches separately,"
+ echo " so that CI runs in a sensible order."
+ echo " (default: push all branches immediately; current: $PUSH_DELAY)"
+ echo " we recommend that you set these env vars in your ~/.profile"
+}
+
+set -e
+
+#################
+# Configuration #
+#################
+
+# Don't change this configuration - set the env vars in your .profile
+#
+# The tor master git repository directory from which all the worktree have
+# been created.
+TOR_MASTER_NAME=${TOR_MASTER_NAME:-"tor"}
+# Which directory do we push from?
+if [ "$TOR_FULL_GIT_PATH" ]; then
+ TOR_GIT_PUSH_PATH=${TOR_GIT_PUSH_PATH:-"$TOR_FULL_GIT_PATH/$TOR_MASTER_NAME"}
+fi
+# git push command and default arguments
+GIT_PUSH=${TOR_GIT_PUSH:-"git push --atomic"}
+# The upstream remote which git.torproject.org/tor.git points to.
+DEFAULT_UPSTREAM_REMOTE=${TOR_UPSTREAM_REMOTE_NAME:-"upstream"}
+# Push to a different upstream remote using -r <remote-name>
+UPSTREAM_REMOTE=${DEFAULT_UPSTREAM_REMOTE}
+# Add a delay between pushes, so CI runs on the most important branches first
+PUSH_DELAY=${TOR_PUSH_DELAY:-0}
+# Push (1) or skip (0) test branches that are the same as an upstream
+# maint/master branch. Push if you are testing that the CI environment still
+# works on old code, skip if you are testing new code in the branch.
+# Default: skip unchanged branches.
+# Inverted by the -s option.
+PUSH_SAME=${TOR_PUSH_SAME:-0}
+
+#######################
+# Argument processing #
+#######################
+
+# Controlled by the -t <test-branch-prefix> option. The test branch base
+# name option makes git-merge-forward.sh create new test branches:
+# <tbbn>_029, <tbbn>_035, ... , <tbbn>_master, and merge forward.
+TEST_BRANCH_PREFIX=
+
+while getopts ":hr:st:" opt; do
+ case "$opt" in
+ h) usage
+ exit 0
+ ;;
+ r) UPSTREAM_REMOTE="$OPTARG"
+ echo " *** PUSHING TO REMOTE: ${UPSTREAM_REMOTE} ***"
+ shift
+ shift
+ OPTIND=$((OPTIND - 2))
+ ;;
+ s) PUSH_SAME=$((! PUSH_SAME))
+ if [ "$PUSH_SAME" -eq 0 ]; then
+ echo " *** SKIPPING UNCHANGED TEST BRANCHES ***"
+ else
+ echo " *** PUSHING UNCHANGED TEST BRANCHES ***"
+ fi
+ shift
+ OPTIND=$((OPTIND - 1))
+ ;;
+ t) TEST_BRANCH_PREFIX="$OPTARG"
+ echo " *** PUSHING TEST BRANCHES: ${TEST_BRANCH_PREFIX}_nnn ***"
+ shift
+ shift
+ OPTIND=$((OPTIND - 2))
+ ;;
+ *)
+ # Assume we're done with script arguments,
+ # and git push will handle the option
+ break
+ ;;
+ esac
+done
+
+# getopts doesn't allow "-" as an option character,
+# so we have to handle -- manually
+if [ "$1" = "--" ]; then
+ shift
+fi
+
+if [ "$TEST_BRANCH_PREFIX" ]; then
+ if [ "$UPSTREAM_REMOTE" = "$DEFAULT_UPSTREAM_REMOTE" ]; then
+ echo "Pushing test branches ${TEST_BRANCH_PREFIX}_nnn to " \
+ "the default remote $DEFAULT_UPSTREAM_REMOTE is not allowed."
+ echo
+ usage
+ exit 1
+ fi
+fi
+
+if [ "$TOR_GIT_PUSH_PATH" ]; then
+ echo "Changing to $GIT_PUSH_PATH before pushing"
+ cd "$TOR_GIT_PUSH_PATH"
+else
+ echo "Pushing from the current directory"
+fi
+
+echo "Calling $GIT_PUSH" "$@" "<branches>"
+
+################################
+# Git upstream remote branches #
+################################
+
+DEFAULT_UPSTREAM_BRANCHES=
+if [ "$DEFAULT_UPSTREAM_REMOTE" != "$UPSTREAM_REMOTE" ]; then
+ DEFAULT_UPSTREAM_BRANCHES=$(echo \
+ "$DEFAULT_UPSTREAM_REMOTE"/master \
+ "$DEFAULT_UPSTREAM_REMOTE"/{release,maint}-0.4.1 \
+ "$DEFAULT_UPSTREAM_REMOTE"/{release,maint}-0.4.0 \
+ "$DEFAULT_UPSTREAM_REMOTE"/{release,maint}-0.3.5 \
+ "$DEFAULT_UPSTREAM_REMOTE"/{release,maint}-0.2.9 \
+ )
+fi
+
+UPSTREAM_BRANCHES=$(echo \
+ "$UPSTREAM_REMOTE"/master \
+ "$UPSTREAM_REMOTE"/{release,maint}-0.4.1 \
+ "$UPSTREAM_REMOTE"/{release,maint}-0.4.0 \
+ "$UPSTREAM_REMOTE"/{release,maint}-0.3.5 \
+ "$UPSTREAM_REMOTE"/{release,maint}-0.2.9 \
+ )
+
+########################
+# Git branches to push #
+########################
+
+PUSH_BRANCHES=$(echo \
+ master \
+ {release,maint}-0.4.1 \
+ {release,maint}-0.4.0 \
+ {release,maint}-0.3.5 \
+ {release,maint}-0.2.9 \
+ )
+
+if [ -z "$TEST_BRANCH_PREFIX" ]; then
+
+ # maint/release push mode
+ #
+ # List of branches to push. Ordering is not important.
+ PUSH_BRANCHES=$(echo \
+ master \
+ {release,maint}-0.4.1 \
+ {release,maint}-0.4.0 \
+ {release,maint}-0.3.5 \
+ {release,maint}-0.2.9 \
+ )
+else
+
+ # Test branch mode: merge to maint only, and create a new branch for 0.2.9
+ #
+ # List of branches to push. Ordering is not important.
+ PUSH_BRANCHES=" \
+ ${TEST_BRANCH_PREFIX}_master \
+ ${TEST_BRANCH_PREFIX}_041 \
+ ${TEST_BRANCH_PREFIX}_040 \
+ ${TEST_BRANCH_PREFIX}_035 \
+ ${TEST_BRANCH_PREFIX}_029 \
+ "
+fi
+
+###############
+# Entry point #
+###############
+
+# Skip the test branches that are the same as the upstream branches
+if [ "$PUSH_SAME" -eq 0 ] && [ "$TEST_BRANCH_PREFIX" ]; then
+ NEW_PUSH_BRANCHES=
+ for b in $PUSH_BRANCHES; do
+ PUSH_COMMIT=$(git rev-parse "$b")
+ SKIP_UPSTREAM=
+ for u in $DEFAULT_UPSTREAM_BRANCHES $UPSTREAM_BRANCHES; do
+ UPSTREAM_COMMIT=$(git rev-parse "$u")
+ if [ "$PUSH_COMMIT" = "$UPSTREAM_COMMIT" ]; then
+ SKIP_UPSTREAM="$u"
+ fi
+ done
+ if [ "$SKIP_UPSTREAM" ]; then
+ printf "Skipping unchanged: %s remote: %s\\n" \
+ "$b" "$SKIP_UPSTREAM"
+ else
+ if [ "$NEW_PUSH_BRANCHES" ]; then
+ NEW_PUSH_BRANCHES="${NEW_PUSH_BRANCHES} ${b}"
+ else
+ NEW_PUSH_BRANCHES="${b}"
+ fi
+ fi
+ done
+ PUSH_BRANCHES=${NEW_PUSH_BRANCHES}
+fi
+
+if [ "$PUSH_DELAY" -le 0 ]; then
+ echo "Pushing $PUSH_BRANCHES"
+ # We know that there are no spaces in any branch within $PUSH_BRANCHES, so
+ # it is safe to use it unquoted. (This also applies to the other shellcheck
+ # exceptions below.)
+ #
+ # Push all the branches at the same time
+ # shellcheck disable=SC2086
+ $GIT_PUSH "$@" "$UPSTREAM_REMOTE" $PUSH_BRANCHES
+else
+ # Push the branches in optimal CI order, with a delay between each push
+ PUSH_BRANCHES=$(echo "$PUSH_BRANCHES" | tr " " "\\n" | sort -V)
+ MASTER_BRANCH=$(echo "$PUSH_BRANCHES" | tr " " "\\n" | grep master)
+ if [ -z "$TEST_BRANCH_PREFIX" ]; then
+ MAINT_BRANCHES=$(echo "$PUSH_BRANCHES" | tr " " "\\n" | grep maint)
+ RELEASE_BRANCHES=$(echo "$PUSH_BRANCHES" | tr " " "\\n" | grep release | \
+ tr "\\n" " ")
+ printf \
+ "Pushing with %ss delays, so CI runs in this order:\\n%s\\n%s\\n%s\\n" \
+ "$PUSH_DELAY" "$MASTER_BRANCH" "$MAINT_BRANCHES" "$RELEASE_BRANCHES"
+ else
+ # Actually test branches based on maint branches
+ MAINT_BRANCHES=$(echo "$PUSH_BRANCHES" | tr " " "\\n" | grep -v master)
+ printf "Pushing with %ss delays, so CI runs in this order:\\n%s\\n%s\\n" \
+ "$PUSH_DELAY" "$MASTER_BRANCH" "$MAINT_BRANCHES"
+ # No release branches
+ RELEASE_BRANCHES=
+ fi
+ $GIT_PUSH "$@" "$UPSTREAM_REMOTE" "$MASTER_BRANCH"
+ sleep "$PUSH_DELAY"
+ # shellcheck disable=SC2086
+ for b in $MAINT_BRANCHES; do
+ $GIT_PUSH "$@" "$UPSTREAM_REMOTE" "$b"
+ sleep "$PUSH_DELAY"
+ done
+ if [ "$RELEASE_BRANCHES" ]; then
+ # shellcheck disable=SC2086
+ $GIT_PUSH "$@" "$UPSTREAM_REMOTE" $RELEASE_BRANCHES
+ fi
+fi
diff --git a/scripts/git/post-merge.git-hook b/scripts/git/post-merge.git-hook
index 176b7c9bbd..eae4f999e7 100755
--- a/scripts/git/post-merge.git-hook
+++ b/scripts/git/post-merge.git-hook
@@ -35,6 +35,12 @@ check_for_script_update() {
fi
}
+cur_branch=$(git rev-parse --abbrev-ref HEAD)
+if [ "$cur_branch" != "master" ]; then
+ echo "post-merge: Not a master branch. Skipping."
+ exit 0
+fi
+
check_for_diffs "pre-push"
check_for_diffs "pre-commit"
check_for_diffs "post-merge"
diff --git a/scripts/git/pre-commit.git-hook b/scripts/git/pre-commit.git-hook
index b285776c04..1c381ec60a 100755
--- a/scripts/git/pre-commit.git-hook
+++ b/scripts/git/pre-commit.git-hook
@@ -1,10 +1,11 @@
-#!/bin/bash
+#!/usr/bin/env bash
#
# To install this script, copy it to .git/hooks/pre-commit in local copy of
# tor git repo and make sure it has permission to execute.
#
# This is pre-commit git hook script that prevents commiting your changeset if
-# it fails our code formatting or changelog entry formatting checkers.
+# it fails our code formatting, changelog entry formatting, module include
+# rules, or best practices tracker.
workdir=$(git rev-parse --show-toplevel)
@@ -36,10 +37,23 @@ elif [ -d src/common ]; then
src/tools/*.[ch]
fi
-if test -e scripts/maint/checkIncludes.py; then
- python scripts/maint/checkIncludes.py
+if test -e scripts/maint/practracker/includes.py; then
+ python scripts/maint/practracker/includes.py
fi
-if [ -e scripts/maint/practracker/practracker.py ]; then
- python3 ./scripts/maint/practracker/practracker.py "$workdir"
+# Only call practracker if ${PT_DIR}/.enable_practracker_in_hooks exists
+# We do this check so that we can enable practracker in hooks in master, and
+# disable it on maint branches
+PT_DIR=scripts/maint/practracker
+
+if [ -e "${PT_DIR}/practracker.py" ]; then
+ if [ -e "${PT_DIR}/.enable_practracker_in_hooks" ]; then
+ if ! python3 "${PT_DIR}/practracker.py" "$workdir"; then
+ exit 1
+ fi
+ fi
+fi
+
+if [ -e scripts/maint/checkShellScripts.sh ]; then
+ scripts/maint/checkShellScripts.sh
fi
diff --git a/scripts/git/pre-push.git-hook b/scripts/git/pre-push.git-hook
index c9e72a4d43..f4504c4215 100755
--- a/scripts/git/pre-push.git-hook
+++ b/scripts/git/pre-push.git-hook
@@ -1,10 +1,11 @@
-#!/bin/bash
+#!/usr/bin/env bash
# git pre-push hook script to:
+# 0) Call the pre-commit hook, if it is available
# 1) prevent "fixup!" and "squash!" commit from ending up in master, release-*
# or maint-*
# 2) Disallow pushing branches other than master, release-*
-# and maint-* to origin (e.g. gitweb.torproject.org).
+# and maint-* to origin (e.g. gitweb.torproject.org)
#
# To install this script, copy it into .git/hooks/pre-push path in your
# local copy of git repository. Make sure it has permission to execute.
@@ -21,6 +22,11 @@ z40=0000000000000000000000000000000000000000
upstream_name=${TOR_UPSTREAM_REMOTE_NAME:-"upstream"}
+# Are you adding a new check to the git hooks?
+# - Common checks belong in the pre-commit hook
+# - Push-only checks belong in the pre-push hook
+#
+# Call the pre-commit hook for the common checks, if it is executable.
workdir=$(git rev-parse --show-toplevel)
if [ -x "$workdir/.git/hooks/pre-commit" ]; then
if ! "$workdir"/.git/hooks/pre-commit; then
@@ -28,14 +34,7 @@ if [ -x "$workdir/.git/hooks/pre-commit" ]; then
fi
fi
-if [ -e scripts/maint/practracker/practracker.py ]; then
- if ! python3 ./scripts/maint/practracker/practracker.py "$workdir"; then
- exit 1
- fi
-fi
-
remote="$1"
-remote_loc="$2"
remote_name=$(git remote --verbose | grep "$2" | awk '{print $1}' | head -n 1)
@@ -105,4 +104,3 @@ do
done
exit 0
-
diff --git a/scripts/maint/add_c_file.py b/scripts/maint/add_c_file.py
index 499415974f..adf7ce79bb 100755
--- a/scripts/maint/add_c_file.py
+++ b/scripts/maint/add_c_file.py
@@ -125,8 +125,8 @@ class AutomakeChunk:
Y \
Z
"""
- self.prespace = "\t"
- self.postspace = "\t\t"
+ prespace = "\t"
+ postspace = "\t\t"
for lineno, line in enumerate(self.lines):
m = re.match(r'(\s+)(\S+)(\s+)\\', line)
if not m:
@@ -135,7 +135,7 @@ class AutomakeChunk:
if fname > member:
self.insert_before(lineno, member, prespace, postspace)
return
- self.insert_at_end(member)
+ self.insert_at_end(member, prespace, postspace)
def insert_before(self, lineno, member, prespace, postspace):
self.lines.insert(lineno,
diff --git a/scripts/maint/annotate_ifdef_directives b/scripts/maint/annotate_ifdef_directives
deleted file mode 100755
index ca267a865e..0000000000
--- a/scripts/maint/annotate_ifdef_directives
+++ /dev/null
@@ -1,74 +0,0 @@
-#!/usr/bin/python
-# Copyright (c) 2017-2019, The Tor Project, Inc.
-# See LICENSE for licensing information
-
-import re
-
-LINE_OBVIOUSNESS_LIMIT = 4
-
-class Problem(Exception):
- pass
-
-def uncomment(s):
- s = re.sub(r'//.*','',s)
- s = re.sub(r'/\*.*','',s)
- return s.strip()
-
-def translate(f_in, f_out):
- whole_file = []
- stack = []
- cur_level = whole_file
- lineno = 0
- for line in f_in:
- lineno += 1
- m = re.match(r'\s*#\s*(if|ifdef|ifndef|else|endif|elif)\b\s*(.*)',
- line)
- if not m:
- f_out.write(line)
- continue
- command,rest = m.groups()
- if command in ("if", "ifdef", "ifndef"):
- # The #if directive pushes us one level lower on the stack.
- if command == 'ifdef':
- rest = "defined(%s)"%uncomment(rest)
- elif command == 'ifndef':
- rest = "!defined(%s)"%uncomment(rest)
- elif rest.endswith("\\"):
- rest = rest[:-1]+"..."
-
- rest = uncomment(rest)
-
- new_level = [ (command, rest, lineno) ]
- stack.append(cur_level)
- cur_level = new_level
- f_out.write(line)
- elif command in ("else", "elif"):
- if len(cur_level) == 0 or cur_level[-1][0] == 'else':
- raise Problem("Unexpected #%s on %d"% (command,lineno))
- if (len(cur_level) == 1 and command == 'else' and
- lineno > cur_level[0][2] + LINE_OBVIOUSNESS_LIMIT):
- f_out.write("#else /* !(%s) */\n"%cur_level[0][1])
- else:
- f_out.write(line)
- cur_level.append((command, rest, lineno))
- else:
- assert command == 'endif'
- if len(stack) == 0:
- raise Problem("Unmatched #%s on %s"% (command,lineno))
- if lineno <= cur_level[0][2] + LINE_OBVIOUSNESS_LIMIT:
- f_out.write(line)
- elif len(cur_level) == 1 or (
- len(cur_level) == 2 and cur_level[1][0] == 'else'):
- f_out.write("#endif /* %s */\n"%cur_level[0][1])
- else:
- f_out.write("#endif /* %s || ... */\n"%cur_level[0][1])
- cur_level = stack.pop()
- if len(stack) or cur_level != whole_file:
- raise Problem("Missing #endif")
-
-import sys,os
-for fn in sys.argv[1:]:
- with open(fn+"_OUT", 'w') as output_file:
- translate(open(fn, 'r'), output_file)
- os.rename(fn+"_OUT", fn)
-
diff --git a/scripts/maint/annotate_ifdef_directives.py b/scripts/maint/annotate_ifdef_directives.py
new file mode 100755
index 0000000000..102128bfa0
--- /dev/null
+++ b/scripts/maint/annotate_ifdef_directives.py
@@ -0,0 +1,317 @@
+#!/usr/bin/python
+# Copyright (c) 2017-2019, The Tor Project, Inc.
+# See LICENSE for licensing information
+
+r"""
+This script iterates over a list of C files. For each file, it looks at the
+#if/#else C macros, and annotates them with comments explaining what they
+match.
+
+For example, it replaces this kind of input...
+
+>>> INPUT = '''
+... #ifdef HAVE_OCELOT
+... C code here
+... #if MIMSY == BOROGROVE
+... block 1
+... block 1
+... block 1
+... block 1
+... #else
+... block 2
+... block 2
+... block 2
+... block 2
+... #endif
+... #endif
+... '''
+
+With this kind of output:
+>>> EXPECTED_OUTPUT = '''
+... #ifdef HAVE_OCELOT
+... C code here
+... #if MIMSY == BOROGROVE
+... block 1
+... block 1
+... block 1
+... block 1
+... #else /* !(MIMSY == BOROGROVE) */
+... block 2
+... block 2
+... block 2
+... block 2
+... #endif /* MIMSY == BOROGROVE */
+... #endif /* defined(HAVE_OCELOT) */
+... '''
+
+Here's how to use it:
+>>> import sys
+>>> if sys.version_info.major < 3: from cStringIO import StringIO
+>>> if sys.version_info.major >= 3: from io import StringIO
+
+>>> OUTPUT = StringIO()
+>>> translate(StringIO(INPUT), OUTPUT)
+>>> assert OUTPUT.getvalue() == EXPECTED_OUTPUT
+
+Note that only #else and #endif lines are annotated. Existing comments
+on those lines are removed.
+"""
+
+import re
+
+# Any block with fewer than this many lines does not need annotations.
+LINE_OBVIOUSNESS_LIMIT = 4
+
+# Maximum line width. This includes a terminating newline character.
+#
+# (This is the maximum before encoding, so that if the the operating system
+# uses multiple characers to encode newline, that's still okay.)
+LINE_WIDTH=80
+
+class Problem(Exception):
+ pass
+
+def close_parens_needed(expr):
+ """Return the number of left-parentheses needed to make 'expr'
+ balanced.
+
+ >>> close_parens_needed("1+2")
+ 0
+ >>> close_parens_needed("(1 + 2)")
+ 0
+ >>> close_parens_needed("(1 + 2")
+ 1
+ >>> close_parens_needed("(1 + (2 *")
+ 2
+ >>> close_parens_needed("(1 + (2 * 3) + (4")
+ 2
+ """
+ return expr.count("(") - expr.count(")")
+
+def truncate_expression(expr, new_width):
+ """Given a parenthesized C expression in 'expr', try to return a new
+ expression that is similar to 'expr', but no more than 'new_width'
+ characters long.
+
+ Try to return an expression with balanced parentheses.
+
+ >>> truncate_expression("1+2+3", 8)
+ '1+2+3'
+ >>> truncate_expression("1+2+3+4+5", 8)
+ '1+2+3...'
+ >>> truncate_expression("(1+2+3+4)", 8)
+ '(1+2...)'
+ >>> truncate_expression("(1+(2+3+4))", 8)
+ '(1+...)'
+ >>> truncate_expression("(((((((((", 8)
+ '((...))'
+ """
+ if len(expr) <= new_width:
+ # The expression is already short enough.
+ return expr
+
+ ellipsis = "..."
+
+ # Start this at the minimum that we might truncate.
+ n_to_remove = len(expr) + len(ellipsis) - new_width
+
+ # Try removing characters, one by one, until we get something where
+ # re-balancing the parentheses still fits within the limit.
+ while n_to_remove < len(expr):
+ truncated = expr[:-n_to_remove] + ellipsis
+ truncated += ")" * close_parens_needed(truncated)
+ if len(truncated) <= new_width:
+ return truncated
+ n_to_remove += 1
+
+ return ellipsis
+
+def commented_line(fmt, argument, maxwidth=LINE_WIDTH):
+ # (This is a raw docstring so that our doctests can use \.)
+ r"""
+ Return fmt%argument, for use as a commented line. If the line would
+ be longer than maxwidth, truncate argument but try to keep its
+ parentheses balanced.
+
+ Requires that fmt%"..." will fit into maxwidth characters.
+
+ Requires that fmt ends with a newline.
+
+ >>> commented_line("/* %s */\n", "hello world", 32)
+ '/* hello world */\n'
+ >>> commented_line("/* %s */\n", "hello world", 15)
+ '/* hello... */\n'
+ >>> commented_line("#endif /* %s */\n", "((1+2) && defined(FOO))", 32)
+ '#endif /* ((1+2) && defi...) */\n'
+
+
+ The default line limit is 80 characters including the newline:
+
+ >>> long_argument = "long " * 100
+ >>> long_line = commented_line("#endif /* %s */\n", long_argument)
+ >>> len(long_line)
+ 80
+
+ >>> long_line[:40]
+ '#endif /* long long long long long long '
+ >>> long_line[40:]
+ 'long long long long long long lon... */\n'
+
+ If a line works out to being 80 characters naturally, it isn't truncated,
+ and no ellipsis is added.
+
+ >>> medium_argument = "a"*66
+ >>> medium_line = commented_line("#endif /* %s */\n", medium_argument)
+ >>> len(medium_line)
+ 80
+ >>> "..." in medium_line
+ False
+ >>> medium_line[:40]
+ '#endif /* aaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'
+ >>> medium_line[40:]
+ 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa */\n'
+
+
+ """
+ assert fmt.endswith("\n")
+ result = fmt % argument
+ if len(result) <= maxwidth:
+ return result
+ else:
+ # How long can we let the argument be? Try filling in the
+ # format with an empty argument to find out.
+ max_arg_width = maxwidth - len(fmt % "")
+ result = fmt % truncate_expression(argument, max_arg_width)
+ assert len(result) <= maxwidth
+ return result
+
+def negate(expr):
+ """Return a negated version of expr; try to avoid double-negation.
+
+ We usually wrap expressions in parentheses and add a "!".
+ >>> negate("A && B")
+ '!(A && B)'
+
+ But if we recognize the expression as negated, we can restore it.
+ >>> negate(negate("A && B"))
+ 'A && B'
+
+ The same applies for defined(FOO).
+ >>> negate("defined(FOO)")
+ '!defined(FOO)'
+ >>> negate(negate("defined(FOO)"))
+ 'defined(FOO)'
+
+ Internal parentheses don't confuse us:
+ >>> negate("!(FOO) && !(BAR)")
+ '!(!(FOO) && !(BAR))'
+
+ """
+ expr = expr.strip()
+ # See whether we match !(...), with no intervening close-parens.
+ m = re.match(r'^!\s*\(([^\)]*)\)$', expr)
+ if m:
+ return m.group(1)
+
+
+ # See whether we match !?defined(...), with no intervening close-parens.
+ m = re.match(r'^(!?)\s*(defined\([^\)]*\))$', expr)
+ if m:
+ if m.group(1) == "!":
+ prefix = ""
+ else:
+ prefix = "!"
+ return prefix + m.group(2)
+
+ return "!(%s)" % expr
+
+def uncomment(s):
+ """
+ Remove existing trailing comments from an #else or #endif line.
+ """
+ s = re.sub(r'//.*','',s)
+ s = re.sub(r'/\*.*','',s)
+ return s.strip()
+
+def translate(f_in, f_out):
+ """
+ Read a file from f_in, and write its annotated version to f_out.
+ """
+ # A stack listing our current if/else state. Each member of the stack
+ # is a list of directives. Each directive is a 3-tuple of
+ # (command, rest, lineno)
+ # where "command" is one of if/ifdef/ifndef/else/elif, and where
+ # "rest" is an expression in a format suitable for use with #if, and where
+ # lineno is the line number where the directive occurred.
+ stack = []
+ # the stack element corresponding to the top level of the file.
+ whole_file = []
+ cur_level = whole_file
+ lineno = 0
+ for line in f_in:
+ lineno += 1
+ m = re.match(r'\s*#\s*(if|ifdef|ifndef|else|endif|elif)\b\s*(.*)',
+ line)
+ if not m:
+ # no directive, so we can just write it out.
+ f_out.write(line)
+ continue
+ command,rest = m.groups()
+ if command in ("if", "ifdef", "ifndef"):
+ # The #if directive pushes us one level lower on the stack.
+ if command == 'ifdef':
+ rest = "defined(%s)"%uncomment(rest)
+ elif command == 'ifndef':
+ rest = "!defined(%s)"%uncomment(rest)
+ elif rest.endswith("\\"):
+ rest = rest[:-1]+"..."
+
+ rest = uncomment(rest)
+
+ new_level = [ (command, rest, lineno) ]
+ stack.append(cur_level)
+ cur_level = new_level
+ f_out.write(line)
+ elif command in ("else", "elif"):
+ # We stay at the same level on the stack. If we have an #else,
+ # we comment it.
+ if len(cur_level) == 0 or cur_level[-1][0] == 'else':
+ raise Problem("Unexpected #%s on %d"% (command,lineno))
+ if (len(cur_level) == 1 and command == 'else' and
+ lineno > cur_level[0][2] + LINE_OBVIOUSNESS_LIMIT):
+ f_out.write(commented_line("#else /* %s */\n",
+ negate(cur_level[0][1])))
+ else:
+ f_out.write(line)
+ cur_level.append((command, rest, lineno))
+ else:
+ # We pop one element on the stack, and comment an endif.
+ assert command == 'endif'
+ if len(stack) == 0:
+ raise Problem("Unmatched #%s on %s"% (command,lineno))
+ if lineno <= cur_level[0][2] + LINE_OBVIOUSNESS_LIMIT:
+ f_out.write(line)
+ elif len(cur_level) == 1 or (
+ len(cur_level) == 2 and cur_level[1][0] == 'else'):
+ f_out.write(commented_line("#endif /* %s */\n",
+ cur_level[0][1]))
+ else:
+ f_out.write(commented_line("#endif /* %s || ... */\n",
+ cur_level[0][1]))
+ cur_level = stack.pop()
+ if len(stack) or cur_level != whole_file:
+ raise Problem("Missing #endif")
+
+if __name__ == '__main__':
+
+ import sys,os
+
+ if sys.argv[1] == "--self-test":
+ import doctest
+ doctest.testmod()
+ sys.exit(0)
+
+ for fn in sys.argv[1:]:
+ with open(fn+"_OUT", 'w') as output_file:
+ translate(open(fn, 'r'), output_file)
+ os.rename(fn+"_OUT", fn)
diff --git a/scripts/maint/checkIncludes.py b/scripts/maint/checkIncludes.py
index ec9350b9b1..926b201b35 100755
--- a/scripts/maint/checkIncludes.py
+++ b/scripts/maint/checkIncludes.py
@@ -1,181 +1,14 @@
#!/usr/bin/python
# Copyright 2018 The Tor Project, Inc. See LICENSE file for licensing info.
-"""This script looks through all the directories for files matching *.c or
- *.h, and checks their #include directives to make sure that only "permitted"
- headers are included.
+# This file is no longer here; see practracker/includes.py for this
+# functionality. This is a stub file that exists so that older git
+# hooks will know where to look.
- Any #include directives with angle brackets (like #include <stdio.h>) are
- ignored -- only directives with quotes (like #include "foo.h") are
- considered.
+import sys, os
- To decide what includes are permitted, this script looks at a .may_include
- file in each directory. This file contains empty lines, #-prefixed
- comments, filenames (like "lib/foo/bar.h") and file globs (like lib/*/*.h)
- for files that are permitted.
-"""
+dirname = os.path.split(sys.argv[0])[0]
+new_location = os.path.join(dirname, "practracker", "includes.py")
+python = sys.executable
-
-from __future__ import print_function
-
-import fnmatch
-import os
-import re
-import sys
-
-# Global: Have there been any errors?
-trouble = False
-
-if sys.version_info[0] <= 2:
- def open_file(fname):
- return open(fname, 'r')
-else:
- def open_file(fname):
- return open(fname, 'r', encoding='utf-8')
-
-def warn(msg):
- print(msg, file=sys.stderr)
-
-def err(msg):
- """ Declare that an error has happened, and remember that there has
- been an error. """
- global trouble
- trouble = True
- print(msg, file=sys.stderr)
-
-def fname_is_c(fname):
- """ Return true iff 'fname' is the name of a file that we should
- search for possibly disallowed #include directives. """
- return fname.endswith(".h") or fname.endswith(".c")
-
-INCLUDE_PATTERN = re.compile(r'\s*#\s*include\s+"([^"]*)"')
-RULES_FNAME = ".may_include"
-
-ALLOWED_PATTERNS = [
- re.compile(r'^.*\*\.(h|inc)$'),
- re.compile(r'^.*/.*\.h$'),
- re.compile(r'^ext/.*\.c$'),
- re.compile(r'^orconfig.h$'),
- re.compile(r'^micro-revision.i$'),
-]
-
-def pattern_is_normal(s):
- for p in ALLOWED_PATTERNS:
- if p.match(s):
- return True
- return False
-
-class Rules(object):
- """ A 'Rules' object is the parsed version of a .may_include file. """
- def __init__(self, dirpath):
- self.dirpath = dirpath
- if dirpath.startswith("src/"):
- self.incpath = dirpath[4:]
- else:
- self.incpath = dirpath
- self.patterns = []
- self.usedPatterns = set()
-
- def addPattern(self, pattern):
- if not pattern_is_normal(pattern):
- warn("Unusual pattern {} in {}".format(pattern, self.dirpath))
- self.patterns.append(pattern)
-
- def includeOk(self, path):
- for pattern in self.patterns:
- if fnmatch.fnmatchcase(path, pattern):
- self.usedPatterns.add(pattern)
- return True
- return False
-
- def applyToLines(self, lines, context=""):
- lineno = 0
- for line in lines:
- lineno += 1
- m = INCLUDE_PATTERN.match(line)
- if m:
- include = m.group(1)
- if not self.includeOk(include):
- err("Forbidden include of {} on line {}{}".format(
- include, lineno, context))
-
- def applyToFile(self, fname):
- with open_file(fname) as f:
- #print(fname)
- self.applyToLines(iter(f), " of {}".format(fname))
-
- def noteUnusedRules(self):
- for p in self.patterns:
- if p not in self.usedPatterns:
- print("Pattern {} in {} was never used.".format(p, self.dirpath))
-
- def getAllowedDirectories(self):
- allowed = []
- for p in self.patterns:
- m = re.match(r'^(.*)/\*\.(h|inc)$', p)
- if m:
- allowed.append(m.group(1))
- continue
- m = re.match(r'^(.*)/[^/]*$', p)
- if m:
- allowed.append(m.group(1))
- continue
-
- return allowed
-
-def load_include_rules(fname):
- """ Read a rules file from 'fname', and return it as a Rules object. """
- result = Rules(os.path.split(fname)[0])
- with open_file(fname) as f:
- for line in f:
- line = line.strip()
- if line.startswith("#") or not line:
- continue
- result.addPattern(line)
- return result
-
-list_unused = False
-log_sorted_levels = False
-
-uses_dirs = { }
-
-for dirpath, dirnames, fnames in os.walk("src"):
- if ".may_include" in fnames:
- rules = load_include_rules(os.path.join(dirpath, RULES_FNAME))
- for fname in fnames:
- if fname_is_c(fname):
- rules.applyToFile(os.path.join(dirpath,fname))
- if list_unused:
- rules.noteUnusedRules()
-
- uses_dirs[rules.incpath] = rules.getAllowedDirectories()
-
-if trouble:
- err(
-"""To change which includes are allowed in a C file, edit the {}
-files in its enclosing directory.""".format(RULES_FNAME))
- sys.exit(1)
-
-all_levels = []
-
-n = 0
-while uses_dirs:
- n += 0
- cur_level = []
- for k in list(uses_dirs):
- uses_dirs[k] = [ d for d in uses_dirs[k]
- if (d in uses_dirs and d != k)]
- if uses_dirs[k] == []:
- cur_level.append(k)
- for k in cur_level:
- del uses_dirs[k]
- n += 1
- if cur_level and log_sorted_levels:
- print(n, cur_level)
- if n > 100:
- break
-
-if uses_dirs:
- print("There are circular .may_include dependencies in here somewhere:",
- uses_dirs)
- sys.exit(1)
+os.execl(python, python, new_location, *sys.argv[1:])
diff --git a/scripts/maint/checkShellScripts.sh b/scripts/maint/checkShellScripts.sh
new file mode 100755
index 0000000000..4c872c7ee0
--- /dev/null
+++ b/scripts/maint/checkShellScripts.sh
@@ -0,0 +1,61 @@
+#!/usr/bin/env bash
+#
+# Copyright (c) 2019 The Tor Project, Inc.
+# See LICENSE for license information
+#
+# checkShellScripts.sh
+# --------------------
+# If shellcheck is installed, check all the shell scripts that we can fix.
+
+set -e
+
+# Only run this script if shellcheck is installed
+# command echoes the path to shellcheck, which is a useful diagnostic log
+if ! command -v shellcheck; then
+ printf "%s: Install shellcheck to check shell scripts.\\n" "$0"
+ exit 0
+fi
+
+# Some platforms don't have realpath
+if command -v realpath ; then
+ HERE=$(dirname "$(realpath "$0")")
+else
+ HERE=$(dirname "$0")
+ if [ ! -d "$HERE" ] || [ "$HERE" = "." ]; then
+ HERE=$(dirname "$PWD/$0")
+ fi
+fi
+TOPLEVEL=$(dirname "$(dirname "$HERE")")
+
+# Check we actually have a tor/src directory
+if [ ! -d "$TOPLEVEL/src" ]; then
+ printf "Error: Couldn't find src directory in expected location: %s\\n" \
+ "$TOPLEVEL/src"
+ exit 1
+fi
+
+# Check *.sh scripts, but ignore the ones that we can't fix
+find "$TOPLEVEL/contrib" "$TOPLEVEL/doc" "$TOPLEVEL/scripts" "$TOPLEVEL/src" \
+ -name "*.sh" \
+ -not -path "$TOPLEVEL/src/ext/*" \
+ -not -path "$TOPLEVEL/src/rust/registry/*" \
+ -exec shellcheck {} +
+
+# Check scripts that aren't named *.sh
+if [ -d "$TOPLEVEL/scripts/test" ]; then
+ shellcheck \
+ "$TOPLEVEL/scripts/test/cov-diff" \
+ "$TOPLEVEL/scripts/test/coverage"
+fi
+if [ -e \
+ "$TOPLEVEL/contrib/dirauth-tools/nagios-check-tor-authority-cert" \
+ ]; then
+ shellcheck \
+ "$TOPLEVEL/contrib/dirauth-tools/nagios-check-tor-authority-cert"
+fi
+if [ -e "$TOPLEVEL/contrib/client-tools/torify" ]; then
+ shellcheck "$TOPLEVEL/contrib/client-tools/torify"
+fi
+if [ -d "$TOPLEVEL/scripts/git" ]; then
+ shellcheck "$TOPLEVEL/scripts/git/"*.git-hook
+fi
diff --git a/scripts/maint/checkSpace.pl b/scripts/maint/checkSpace.pl
index 433ae62807..9c9b68ff9d 100755
--- a/scripts/maint/checkSpace.pl
+++ b/scripts/maint/checkSpace.pl
@@ -177,7 +177,7 @@ for my $fn (@ARGV) {
$1 ne "elsif" and $1 ne "WINAPI" and $2 ne "WINAPI" and
$1 ne "void" and $1 ne "__attribute__" and $1 ne "op" and
$1 ne "size_t" and $1 ne "double" and $1 ne "uint64_t" and
- $1 ne "workqueue_reply_t") {
+ $1 ne "workqueue_reply_t" and $1 ne "bool") {
msg " fn ():$fn:$.\n";
}
}
diff --git a/scripts/maint/practracker/README b/scripts/maint/practracker/README
new file mode 100644
index 0000000000..d978b39806
--- /dev/null
+++ b/scripts/maint/practracker/README
@@ -0,0 +1,21 @@
+Practracker is a simple python tool that keeps track of places where
+our code is ugly, and tries to warn us about new ones or ones that
+get worse.
+
+Right now, practracker looks for the following kinds of
+best-practices violations:
+
+ .c files greater than 3000 lines long
+ .h files greater than 500 lines long
+ .c files with more than 50 includes
+ .h files with more than 15 includes
+
+ All files that include a local header not listed in a .may_include
+ file in the same directory, when that .may_include file has an
+ "!advisory" marker.
+
+The list of current violations is tracked in exceptions.txt; slight
+deviations of the current exceptions cause warnings, whereas large
+ones cause practracker to fail.
+
+For usage information, run "practracker.py --help".
diff --git a/scripts/maint/practracker/exceptions.txt b/scripts/maint/practracker/exceptions.txt
index eaa98f0f2e..7b15b37f8c 100644
--- a/scripts/maint/practracker/exceptions.txt
+++ b/scripts/maint/practracker/exceptions.txt
@@ -30,259 +30,306 @@
# Remember: It is better to fix the problem than to add a new exception!
problem file-size /src/app/config/config.c 8518
-problem include-count /src/app/config/config.c 87
+problem include-count /src/app/config/config.c 89
problem function-size /src/app/config/config.c:options_act_reversible() 296
problem function-size /src/app/config/config.c:options_act() 589
-problem function-size /src/app/config/config.c:resolve_my_address() 192
-problem function-size /src/app/config/config.c:options_validate() 1217
+problem function-size /src/app/config/config.c:resolve_my_address() 190
+problem function-size /src/app/config/config.c:options_validate() 1209
problem function-size /src/app/config/config.c:options_init_from_torrc() 207
-problem function-size /src/app/config/config.c:options_init_from_string() 173
-problem function-size /src/app/config/config.c:options_init_logs() 146
+problem function-size /src/app/config/config.c:options_init_from_string() 171
+problem function-size /src/app/config/config.c:options_init_logs() 145
problem function-size /src/app/config/config.c:parse_bridge_line() 104
-problem function-size /src/app/config/config.c:parse_transport_line() 191
-problem function-size /src/app/config/config.c:parse_dir_authority_line() 151
-problem function-size /src/app/config/config.c:parse_dir_fallback_line() 102
-problem function-size /src/app/config/config.c:parse_port_config() 452
-problem function-size /src/app/config/config.c:parse_ports() 170
-problem function-size /src/app/config/config.c:getinfo_helper_config() 116
-problem function-size /src/app/config/confparse.c:config_assign_value() 205
-problem function-size /src/app/config/confparse.c:config_get_assigned_option() 129
-problem include-count /src/app/main/main.c 67
+problem function-size /src/app/config/config.c:parse_transport_line() 189
+problem function-size /src/app/config/config.c:parse_dir_authority_line() 150
+problem function-size /src/app/config/config.c:parse_dir_fallback_line() 101
+problem function-size /src/app/config/config.c:parse_port_config() 446
+problem function-size /src/app/config/config.c:parse_ports() 168
+problem file-size /src/app/config/or_options_st.h 1112
+problem include-count /src/app/main/main.c 68
problem function-size /src/app/main/main.c:dumpstats() 102
problem function-size /src/app/main/main.c:tor_init() 137
problem function-size /src/app/main/main.c:sandbox_init_filter() 291
problem function-size /src/app/main/main.c:run_tor_main_loop() 105
-problem function-size /src/app/main/ntmain.c:nt_service_install() 125
+problem function-size /src/app/main/ntmain.c:nt_service_install() 126
+problem dependency-violation /src/core/crypto/hs_ntor.c 1
+problem dependency-violation /src/core/crypto/onion_crypto.c 5
+problem dependency-violation /src/core/crypto/onion_fast.c 1
+problem dependency-violation /src/core/crypto/onion_tap.c 3
+problem dependency-violation /src/core/crypto/relay_crypto.c 9
problem file-size /src/core/mainloop/connection.c 5569
problem include-count /src/core/mainloop/connection.c 62
problem function-size /src/core/mainloop/connection.c:connection_free_minimal() 185
-problem function-size /src/core/mainloop/connection.c:connection_listener_new() 328
+problem function-size /src/core/mainloop/connection.c:connection_listener_new() 324
problem function-size /src/core/mainloop/connection.c:connection_handle_listener_read() 161
-problem function-size /src/core/mainloop/connection.c:connection_connect_sockaddr() 103
problem function-size /src/core/mainloop/connection.c:connection_proxy_connect() 148
problem function-size /src/core/mainloop/connection.c:connection_read_proxy_handshake() 153
-problem function-size /src/core/mainloop/connection.c:retry_listener_ports() 116
+problem function-size /src/core/mainloop/connection.c:retry_listener_ports() 112
problem function-size /src/core/mainloop/connection.c:connection_handle_read_impl() 111
-problem function-size /src/core/mainloop/connection.c:connection_buf_read_from_socket() 181
+problem function-size /src/core/mainloop/connection.c:connection_buf_read_from_socket() 180
problem function-size /src/core/mainloop/connection.c:connection_handle_write_impl() 241
problem function-size /src/core/mainloop/connection.c:assert_connection_ok() 143
+problem dependency-violation /src/core/mainloop/connection.c 44
+problem dependency-violation /src/core/mainloop/cpuworker.c 12
problem include-count /src/core/mainloop/mainloop.c 63
problem function-size /src/core/mainloop/mainloop.c:conn_close_if_marked() 108
problem function-size /src/core/mainloop/mainloop.c:run_connection_housekeeping() 123
+problem dependency-violation /src/core/mainloop/mainloop.c 49
+problem dependency-violation /src/core/mainloop/mainloop_pubsub.c 1
+problem dependency-violation /src/core/mainloop/mainloop_sys.c 1
+problem dependency-violation /src/core/mainloop/netstatus.c 4
+problem dependency-violation /src/core/mainloop/periodic.c 2
+problem dependency-violation /src/core/or/address_set.c 1
problem file-size /src/core/or/channel.c 3487
+problem dependency-violation /src/core/or/channel.c 9
+problem file-size /src/core/or/channel.h 780
+problem dependency-violation /src/core/or/channelpadding.c 6
problem function-size /src/core/or/channeltls.c:channel_tls_handle_var_cell() 160
problem function-size /src/core/or/channeltls.c:channel_tls_process_versions_cell() 170
problem function-size /src/core/or/channeltls.c:channel_tls_process_netinfo_cell() 214
problem function-size /src/core/or/channeltls.c:channel_tls_process_certs_cell() 246
problem function-size /src/core/or/channeltls.c:channel_tls_process_authenticate_cell() 202
+problem dependency-violation /src/core/or/channeltls.c 10
problem include-count /src/core/or/circuitbuild.c 54
problem function-size /src/core/or/circuitbuild.c:get_unique_circ_id_by_chan() 128
problem function-size /src/core/or/circuitbuild.c:circuit_extend() 147
problem function-size /src/core/or/circuitbuild.c:choose_good_exit_server_general() 206
+problem dependency-violation /src/core/or/circuitbuild.c 25
problem include-count /src/core/or/circuitlist.c 55
-problem function-size /src/core/or/circuitlist.c:HT_PROTOTYPE() 128
+problem function-size /src/core/or/circuitlist.c:HT_PROTOTYPE() 109
problem function-size /src/core/or/circuitlist.c:circuit_free_() 143
-problem function-size /src/core/or/circuitlist.c:circuit_find_to_cannibalize() 102
+problem function-size /src/core/or/circuitlist.c:circuit_find_to_cannibalize() 101
problem function-size /src/core/or/circuitlist.c:circuit_about_to_free() 120
problem function-size /src/core/or/circuitlist.c:circuits_handle_oom() 117
-problem function-size /src/core/or/circuitmux.c:circuitmux_set_policy() 110
-problem function-size /src/core/or/circuitmux.c:circuitmux_attach_circuit() 114
-problem file-size /src/core/or/circuitpadding.c 3040
-problem function-size /src/core/or/circuitpadding.c:circpad_machine_schedule_padding() 113
-problem function-size /src/core/or/circuitpadding_machines.c:circpad_machine_relay_hide_intro_circuits() 104
+problem dependency-violation /src/core/or/circuitlist.c 19
+problem function-size /src/core/or/circuitmux.c:circuitmux_set_policy() 109
+problem function-size /src/core/or/circuitmux.c:circuitmux_attach_circuit() 113
+problem dependency-violation /src/core/or/circuitmux_ewma.c 2
+problem file-size /src/core/or/circuitpadding.c 3096
+problem function-size /src/core/or/circuitpadding.c:circpad_machine_schedule_padding() 113
+problem dependency-violation /src/core/or/circuitpadding.c 6
+problem file-size /src/core/or/circuitpadding.h 813
+problem function-size /src/core/or/circuitpadding_machines.c:circpad_machine_relay_hide_intro_circuits() 103
problem function-size /src/core/or/circuitpadding_machines.c:circpad_machine_client_hide_rend_circuits() 112
-problem function-size /src/core/or/circuitstats.c:circuit_build_times_parse_state() 124
+problem dependency-violation /src/core/or/circuitpadding_machines.c 1
+problem function-size /src/core/or/circuitstats.c:circuit_build_times_parse_state() 123
+problem dependency-violation /src/core/or/circuitstats.c 11
problem file-size /src/core/or/circuituse.c 3162
-problem function-size /src/core/or/circuituse.c:circuit_is_acceptable() 132
+problem function-size /src/core/or/circuituse.c:circuit_is_acceptable() 128
problem function-size /src/core/or/circuituse.c:circuit_expire_building() 394
problem function-size /src/core/or/circuituse.c:circuit_log_ancient_one_hop_circuits() 126
problem function-size /src/core/or/circuituse.c:circuit_build_failed() 149
-problem function-size /src/core/or/circuituse.c:circuit_launch_by_extend_info() 110
-problem function-size /src/core/or/circuituse.c:circuit_get_open_circ_or_launch() 354
+problem function-size /src/core/or/circuituse.c:circuit_launch_by_extend_info() 108
+problem function-size /src/core/or/circuituse.c:circuit_get_open_circ_or_launch() 352
problem function-size /src/core/or/circuituse.c:connection_ap_handshake_attach_circuit() 244
+problem dependency-violation /src/core/or/circuituse.c 23
problem function-size /src/core/or/command.c:command_process_create_cell() 156
problem function-size /src/core/or/command.c:command_process_relay_cell() 132
-problem file-size /src/core/or/connection_edge.c 4595
+problem dependency-violation /src/core/or/command.c 8
+problem file-size /src/core/or/connection_edge.c 4596
problem include-count /src/core/or/connection_edge.c 65
problem function-size /src/core/or/connection_edge.c:connection_ap_expire_beginning() 117
-problem function-size /src/core/or/connection_edge.c:connection_ap_handshake_rewrite() 192
-problem function-size /src/core/or/connection_edge.c:connection_ap_handle_onion() 188
-problem function-size /src/core/or/connection_edge.c:connection_ap_handshake_rewrite_and_attach() 423
+problem function-size /src/core/or/connection_edge.c:connection_ap_handshake_rewrite() 191
+problem function-size /src/core/or/connection_edge.c:connection_ap_handle_onion() 185
+problem function-size /src/core/or/connection_edge.c:connection_ap_handshake_rewrite_and_attach() 421
problem function-size /src/core/or/connection_edge.c:connection_ap_handshake_send_begin() 111
-problem function-size /src/core/or/connection_edge.c:connection_ap_handshake_socks_resolved() 106
-problem function-size /src/core/or/connection_edge.c:connection_exit_begin_conn() 184
+problem function-size /src/core/or/connection_edge.c:connection_ap_handshake_socks_resolved() 101
+problem function-size /src/core/or/connection_edge.c:connection_exit_begin_conn() 185
problem function-size /src/core/or/connection_edge.c:connection_exit_connect() 102
-problem file-size /src/core/or/connection_or.c 3124
+problem dependency-violation /src/core/or/connection_edge.c 27
+problem file-size /src/core/or/connection_or.c 3122
problem include-count /src/core/or/connection_or.c 51
problem function-size /src/core/or/connection_or.c:connection_or_group_set_badness_() 105
-problem function-size /src/core/or/connection_or.c:connection_or_client_learned_peer_id() 144
-problem function-size /src/core/or/connection_or.c:connection_or_compute_authenticate_cell_body() 235
+problem function-size /src/core/or/connection_or.c:connection_or_client_learned_peer_id() 142
+problem function-size /src/core/or/connection_or.c:connection_or_compute_authenticate_cell_body() 231
+problem dependency-violation /src/core/or/connection_or.c 20
+problem dependency-violation /src/core/or/dos.c 5
+problem dependency-violation /src/core/or/onion.c 2
+problem file-size /src/core/or/or.h 1107
+problem include-count /src/core/or/or.h 49
+problem dependency-violation /src/core/or/or_periodic.c 1
problem file-size /src/core/or/policies.c 3249
problem function-size /src/core/or/policies.c:policy_summarize() 107
+problem dependency-violation /src/core/or/policies.c 14
problem function-size /src/core/or/protover.c:protover_all_supported() 117
-problem file-size /src/core/or/relay.c 3244
+problem dependency-violation /src/core/or/reasons.c 2
+problem file-size /src/core/or/relay.c 3264
problem function-size /src/core/or/relay.c:circuit_receive_relay_cell() 127
-problem function-size /src/core/or/relay.c:relay_send_command_from_edge_() 112
-problem function-size /src/core/or/relay.c:connection_ap_process_end_not_open() 194
-problem function-size /src/core/or/relay.c:connection_edge_process_relay_cell_not_open() 139
-problem function-size /src/core/or/relay.c:connection_edge_process_relay_cell() 430
-problem function-size /src/core/or/relay.c:connection_edge_package_raw_inbuf() 129
-problem function-size /src/core/or/relay.c:circuit_resume_edge_reading_helper() 148
+problem function-size /src/core/or/relay.c:relay_send_command_from_edge_() 109
+problem function-size /src/core/or/relay.c:connection_ap_process_end_not_open() 192
+problem function-size /src/core/or/relay.c:connection_edge_process_relay_cell_not_open() 137
+problem function-size /src/core/or/relay.c:handle_relay_cell_command() 369
+problem function-size /src/core/or/relay.c:connection_edge_package_raw_inbuf() 128
+problem function-size /src/core/or/relay.c:circuit_resume_edge_reading_helper() 146
+problem dependency-violation /src/core/or/relay.c 16
+problem dependency-violation /src/core/or/scheduler.c 1
problem function-size /src/core/or/scheduler_kist.c:kist_scheduler_run() 171
+problem dependency-violation /src/core/or/scheduler_kist.c 2
problem function-size /src/core/or/scheduler_vanilla.c:vanilla_scheduler_run() 109
+problem dependency-violation /src/core/or/scheduler_vanilla.c 1
+problem dependency-violation /src/core/or/sendme.c 2
+problem dependency-violation /src/core/or/status.c 12
problem function-size /src/core/or/versions.c:tor_version_parse() 104
-problem function-size /src/core/proto/proto_socks.c:parse_socks_client() 112
-problem function-size /src/feature/client/addressmap.c:addressmap_rewrite() 112
+problem dependency-violation /src/core/proto/proto_cell.c 3
+problem dependency-violation /src/core/proto/proto_control0.c 1
+problem dependency-violation /src/core/proto/proto_ext_or.c 2
+problem dependency-violation /src/core/proto/proto_http.c 1
+problem function-size /src/core/proto/proto_socks.c:parse_socks_client() 110
+problem dependency-violation /src/core/proto/proto_socks.c 8
+problem function-size /src/feature/client/addressmap.c:addressmap_rewrite() 109
problem function-size /src/feature/client/bridges.c:rewrite_node_address_for_bridge() 126
problem function-size /src/feature/client/circpathbias.c:pathbias_measure_close_rate() 108
problem function-size /src/feature/client/dnsserv.c:evdns_server_callback() 153
problem file-size /src/feature/client/entrynodes.c 3824
-problem function-size /src/feature/client/entrynodes.c:entry_guards_upgrade_waiting_circuits() 157
+problem function-size /src/feature/client/entrynodes.c:entry_guards_upgrade_waiting_circuits() 155
problem function-size /src/feature/client/entrynodes.c:entry_guard_parse_from_state() 246
+problem file-size /src/feature/client/entrynodes.h 639
problem function-size /src/feature/client/transports.c:handle_proxy_line() 108
-problem function-size /src/feature/client/transports.c:parse_method_line_helper() 112
+problem function-size /src/feature/client/transports.c:parse_method_line_helper() 110
problem function-size /src/feature/client/transports.c:create_managed_proxy_environment() 109
-problem function-size /src/feature/control/control.c:connection_control_process_inbuf() 136
-problem function-size /src/feature/control/control_auth.c:handle_control_authchallenge() 103
-problem function-size /src/feature/control/control_auth.c:handle_control_authenticate() 187
-problem function-size /src/feature/control/control_cmd.c:handle_control_extendcircuit() 151
-problem function-size /src/feature/control/control_cmd.c:handle_control_add_onion() 269
-problem function-size /src/feature/control/control_cmd.c:add_onion_helper_keyarg() 125
-problem function-size /src/feature/control/control_events.c:control_event_stream_status() 119
+problem function-size /src/feature/control/control.c:connection_control_process_inbuf() 113
+problem function-size /src/feature/control/control_auth.c:handle_control_authenticate() 186
+problem function-size /src/feature/control/control_cmd.c:handle_control_extendcircuit() 150
+problem function-size /src/feature/control/control_cmd.c:handle_control_add_onion() 256
+problem function-size /src/feature/control/control_cmd.c:add_onion_helper_keyarg() 116
+problem function-size /src/feature/control/control_events.c:control_event_stream_status() 118
problem include-count /src/feature/control/control_getinfo.c 54
-problem function-size /src/feature/control/control_getinfo.c:getinfo_helper_misc() 109
-problem function-size /src/feature/control/control_getinfo.c:getinfo_helper_dir() 304
-problem function-size /src/feature/control/control_getinfo.c:getinfo_helper_events() 236
-problem function-size /src/feature/dirauth/bwauth.c:dirserv_read_measured_bandwidths() 124
-problem file-size /src/feature/dirauth/dirvote.c 4726
+problem function-size /src/feature/control/control_getinfo.c:getinfo_helper_misc() 108
+problem function-size /src/feature/control/control_getinfo.c:getinfo_helper_dir() 302
+problem function-size /src/feature/control/control_getinfo.c:getinfo_helper_events() 234
+problem function-size /src/feature/dirauth/bwauth.c:dirserv_read_measured_bandwidths() 121
+problem file-size /src/feature/dirauth/dirvote.c 4700
problem include-count /src/feature/dirauth/dirvote.c 53
-problem function-size /src/feature/dirauth/dirvote.c:format_networkstatus_vote() 249
-problem function-size /src/feature/dirauth/dirvote.c:networkstatus_compute_bw_weights_v10() 235
-problem function-size /src/feature/dirauth/dirvote.c:networkstatus_compute_consensus() 962
-problem function-size /src/feature/dirauth/dirvote.c:networkstatus_add_detached_signatures() 123
+problem function-size /src/feature/dirauth/dirvote.c:format_networkstatus_vote() 231
+problem function-size /src/feature/dirauth/dirvote.c:networkstatus_compute_bw_weights_v10() 233
+problem function-size /src/feature/dirauth/dirvote.c:networkstatus_compute_consensus() 956
+problem function-size /src/feature/dirauth/dirvote.c:networkstatus_add_detached_signatures() 119
problem function-size /src/feature/dirauth/dirvote.c:dirvote_add_vote() 162
problem function-size /src/feature/dirauth/dirvote.c:dirvote_compute_consensuses() 164
-problem function-size /src/feature/dirauth/dirvote.c:dirserv_generate_networkstatus_vote_obj() 293
+problem function-size /src/feature/dirauth/dirvote.c:dirserv_generate_networkstatus_vote_obj() 283
problem function-size /src/feature/dirauth/dsigs_parse.c:networkstatus_parse_detached_signatures() 196
-problem function-size /src/feature/dirauth/guardfraction.c:dirserv_read_guardfraction_file_from_str() 110
+problem function-size /src/feature/dirauth/guardfraction.c:dirserv_read_guardfraction_file_from_str() 109
problem function-size /src/feature/dirauth/process_descs.c:dirserv_add_descriptor() 125
-problem function-size /src/feature/dirauth/shared_random.c:should_keep_commit() 110
+problem function-size /src/feature/dirauth/shared_random.c:should_keep_commit() 109
problem function-size /src/feature/dirauth/voteflags.c:dirserv_compute_performance_thresholds() 172
problem function-size /src/feature/dircache/consdiffmgr.c:consdiffmgr_cleanup() 115
problem function-size /src/feature/dircache/consdiffmgr.c:consdiffmgr_rescan_flavor_() 111
problem function-size /src/feature/dircache/consdiffmgr.c:consensus_diff_worker_threadfn() 132
-problem function-size /src/feature/dircache/dircache.c:handle_get_current_consensus() 166
-problem function-size /src/feature/dircache/dircache.c:directory_handle_command_post() 120
+problem function-size /src/feature/dircache/dircache.c:handle_get_current_consensus() 165
+problem function-size /src/feature/dircache/dircache.c:directory_handle_command_post() 119
problem file-size /src/feature/dirclient/dirclient.c 3215
problem include-count /src/feature/dirclient/dirclient.c 51
-problem function-size /src/feature/dirclient/dirclient.c:directory_get_from_dirserver() 131
+problem function-size /src/feature/dirclient/dirclient.c:directory_get_from_dirserver() 126
problem function-size /src/feature/dirclient/dirclient.c:directory_initiate_request() 201
-problem function-size /src/feature/dirclient/dirclient.c:directory_send_command() 241
-problem function-size /src/feature/dirclient/dirclient.c:dir_client_decompress_response_body() 114
+problem function-size /src/feature/dirclient/dirclient.c:directory_send_command() 239
+problem function-size /src/feature/dirclient/dirclient.c:dir_client_decompress_response_body() 111
problem function-size /src/feature/dirclient/dirclient.c:connection_dir_client_reached_eof() 189
-problem function-size /src/feature/dirclient/dirclient.c:handle_response_fetch_consensus() 105
-problem function-size /src/feature/dircommon/consdiff.c:gen_ed_diff() 204
-problem function-size /src/feature/dircommon/consdiff.c:apply_ed_diff() 159
-problem function-size /src/feature/dirparse/authcert_parse.c:authority_cert_parse_from_string() 182
-problem function-size /src/feature/dirparse/microdesc_parse.c:microdescs_parse_from_string() 169
-problem function-size /src/feature/dirparse/ns_parse.c:routerstatus_parse_entry_from_string() 286
+problem function-size /src/feature/dirclient/dirclient.c:handle_response_fetch_consensus() 104
+problem function-size /src/feature/dircommon/consdiff.c:gen_ed_diff() 203
+problem function-size /src/feature/dircommon/consdiff.c:apply_ed_diff() 158
+problem function-size /src/feature/dirparse/authcert_parse.c:authority_cert_parse_from_string() 181
+problem function-size /src/feature/dirparse/ns_parse.c:routerstatus_parse_entry_from_string() 280
problem function-size /src/feature/dirparse/ns_parse.c:networkstatus_verify_bw_weights() 389
-problem function-size /src/feature/dirparse/ns_parse.c:networkstatus_parse_vote_from_string() 638
-problem function-size /src/feature/dirparse/parsecommon.c:tokenize_string() 103
-problem function-size /src/feature/dirparse/parsecommon.c:get_next_token() 159
-problem function-size /src/feature/dirparse/routerparse.c:router_parse_entry_from_string() 557
-problem function-size /src/feature/dirparse/routerparse.c:extrainfo_parse_entry_from_string() 210
+problem function-size /src/feature/dirparse/ns_parse.c:networkstatus_parse_vote_from_string() 635
+problem function-size /src/feature/dirparse/parsecommon.c:tokenize_string() 101
+problem function-size /src/feature/dirparse/parsecommon.c:get_next_token() 158
+problem function-size /src/feature/dirparse/routerparse.c:router_parse_entry_from_string() 554
+problem function-size /src/feature/dirparse/routerparse.c:extrainfo_parse_entry_from_string() 208
problem function-size /src/feature/hibernate/hibernate.c:accounting_parse_options() 109
problem function-size /src/feature/hs/hs_cell.c:hs_cell_build_establish_intro() 115
-problem function-size /src/feature/hs/hs_cell.c:hs_cell_parse_introduce2() 154
-problem function-size /src/feature/hs/hs_client.c:send_introduce1() 104
-problem function-size /src/feature/hs/hs_client.c:hs_config_client_authorization() 108
-problem function-size /src/feature/hs/hs_common.c:hs_get_responsible_hsdirs() 104
-problem function-size /src/feature/hs/hs_config.c:config_generic_service() 140
-problem function-size /src/feature/hs/hs_descriptor.c:desc_encode_v3() 104
-problem function-size /src/feature/hs/hs_descriptor.c:decrypt_desc_layer() 110
+problem function-size /src/feature/hs/hs_cell.c:hs_cell_parse_introduce2() 152
+problem function-size /src/feature/hs/hs_client.c:send_introduce1() 103
+problem function-size /src/feature/hs/hs_client.c:hs_config_client_authorization() 107
+problem function-size /src/feature/hs/hs_common.c:hs_get_responsible_hsdirs() 102
+problem function-size /src/feature/hs/hs_config.c:config_service_v3() 107
+problem function-size /src/feature/hs/hs_config.c:config_generic_service() 138
+problem function-size /src/feature/hs/hs_descriptor.c:desc_encode_v3() 101
+problem function-size /src/feature/hs/hs_descriptor.c:decrypt_desc_layer() 111
problem function-size /src/feature/hs/hs_descriptor.c:decode_introduction_point() 122
-problem function-size /src/feature/hs/hs_descriptor.c:desc_decode_superencrypted_v3() 109
-problem function-size /src/feature/hs/hs_descriptor.c:desc_decode_encrypted_v3() 109
-problem file-size /src/feature/hs/hs_service.c 4109
-problem function-size /src/feature/keymgt/loadkey.c:ed_key_init_from_file() 333
-problem function-size /src/feature/nodelist/authcert.c:trusted_dirs_load_certs_from_string() 124
-problem function-size /src/feature/nodelist/authcert.c:authority_certs_fetch_missing() 296
-problem function-size /src/feature/nodelist/fmt_routerstatus.c:routerstatus_format_entry() 166
+problem function-size /src/feature/hs/hs_descriptor.c:desc_decode_superencrypted_v3() 107
+problem function-size /src/feature/hs/hs_descriptor.c:desc_decode_encrypted_v3() 107
+problem file-size /src/feature/hs/hs_service.c 4116
+problem function-size /src/feature/keymgt/loadkey.c:ed_key_init_from_file() 326
+problem function-size /src/feature/nodelist/authcert.c:trusted_dirs_load_certs_from_string() 123
+problem function-size /src/feature/nodelist/authcert.c:authority_certs_fetch_missing() 295
+problem function-size /src/feature/nodelist/fmt_routerstatus.c:routerstatus_format_entry() 162
problem function-size /src/feature/nodelist/microdesc.c:microdesc_cache_rebuild() 134
-problem include-count /src/feature/nodelist/networkstatus.c 62
-problem function-size /src/feature/nodelist/networkstatus.c:networkstatus_check_consensus_signature() 176
-problem function-size /src/feature/nodelist/networkstatus.c:networkstatus_set_current_consensus() 293
-problem function-size /src/feature/nodelist/node_select.c:router_pick_directory_server_impl() 123
-problem function-size /src/feature/nodelist/node_select.c:compute_weighted_bandwidths() 206
-problem function-size /src/feature/nodelist/node_select.c:router_pick_trusteddirserver_impl() 114
-problem function-size /src/feature/nodelist/nodelist.c:compute_frac_paths_available() 193
-problem file-size /src/feature/nodelist/routerlist.c 3238
+problem include-count /src/feature/nodelist/networkstatus.c 63
+problem function-size /src/feature/nodelist/networkstatus.c:networkstatus_check_consensus_signature() 175
+problem function-size /src/feature/nodelist/networkstatus.c:networkstatus_set_current_consensus() 289
+problem function-size /src/feature/nodelist/node_select.c:router_pick_directory_server_impl() 122
+problem function-size /src/feature/nodelist/node_select.c:compute_weighted_bandwidths() 203
+problem function-size /src/feature/nodelist/node_select.c:router_pick_trusteddirserver_impl() 112
+problem function-size /src/feature/nodelist/nodelist.c:compute_frac_paths_available() 190
+problem file-size /src/feature/nodelist/routerlist.c 3241
problem function-size /src/feature/nodelist/routerlist.c:router_rebuild_store() 148
-problem function-size /src/feature/nodelist/routerlist.c:router_add_to_routerlist() 169
+problem function-size /src/feature/nodelist/routerlist.c:router_add_to_routerlist() 168
problem function-size /src/feature/nodelist/routerlist.c:routerlist_remove_old_routers() 121
-problem function-size /src/feature/nodelist/routerlist.c:update_consensus_router_descriptor_downloads() 136
+problem function-size /src/feature/nodelist/routerlist.c:update_consensus_router_descriptor_downloads() 135
problem function-size /src/feature/nodelist/routerlist.c:update_extrainfo_downloads() 103
-problem function-size /src/feature/relay/dns.c:dns_resolve_impl() 134
+problem function-size /src/feature/relay/dns.c:dns_resolve_impl() 131
problem function-size /src/feature/relay/dns.c:configure_nameservers() 161
-problem function-size /src/feature/relay/dns.c:evdns_callback() 109
-problem file-size /src/feature/relay/router.c 3407
+problem function-size /src/feature/relay/dns.c:evdns_callback() 108
+problem file-size /src/feature/relay/router.c 3522
problem include-count /src/feature/relay/router.c 56
problem function-size /src/feature/relay/router.c:init_keys() 252
problem function-size /src/feature/relay/router.c:get_my_declared_family() 114
problem function-size /src/feature/relay/router.c:router_build_fresh_unsigned_routerinfo() 136
-problem function-size /src/feature/relay/router.c:router_dump_router_to_string() 371
-problem function-size /src/feature/relay/router.c:extrainfo_dump_to_string() 206
+problem function-size /src/feature/relay/router.c:router_dump_router_to_string() 367
problem function-size /src/feature/relay/routerkeys.c:load_ed_keys() 294
-problem function-size /src/feature/rend/rendcache.c:rend_cache_store_v2_desc_as_client() 193
-problem function-size /src/feature/rend/rendclient.c:rend_client_send_introduction() 220
-problem function-size /src/feature/rend/rendcommon.c:rend_encode_v2_descriptors() 225
+problem function-size /src/feature/rend/rendcache.c:rend_cache_store_v2_desc_as_client() 190
+problem function-size /src/feature/rend/rendclient.c:rend_client_send_introduction() 219
+problem function-size /src/feature/rend/rendcommon.c:rend_encode_v2_descriptors() 221
problem function-size /src/feature/rend/rendmid.c:rend_mid_establish_intro_legacy() 104
-problem function-size /src/feature/rend/rendparse.c:rend_parse_v2_service_descriptor() 187
-problem function-size /src/feature/rend/rendparse.c:rend_decrypt_introduction_points() 104
-problem function-size /src/feature/rend/rendparse.c:rend_parse_introduction_points() 131
+problem function-size /src/feature/rend/rendparse.c:rend_parse_v2_service_descriptor() 181
+problem function-size /src/feature/rend/rendparse.c:rend_parse_introduction_points() 129
problem file-size /src/feature/rend/rendservice.c 4511
problem function-size /src/feature/rend/rendservice.c:rend_service_prune_list_impl_() 107
-problem function-size /src/feature/rend/rendservice.c:rend_config_service() 164
+problem function-size /src/feature/rend/rendservice.c:rend_config_service() 162
problem function-size /src/feature/rend/rendservice.c:rend_service_load_auth_keys() 178
-problem function-size /src/feature/rend/rendservice.c:rend_service_receive_introduction() 332
-problem function-size /src/feature/rend/rendservice.c:rend_service_parse_intro_for_v3() 115
-problem function-size /src/feature/rend/rendservice.c:rend_service_decrypt_intro() 115
+problem function-size /src/feature/rend/rendservice.c:rend_service_receive_introduction() 330
+problem function-size /src/feature/rend/rendservice.c:rend_service_parse_intro_for_v3() 111
+problem function-size /src/feature/rend/rendservice.c:rend_service_decrypt_intro() 112
problem function-size /src/feature/rend/rendservice.c:rend_service_intro_has_opened() 126
problem function-size /src/feature/rend/rendservice.c:rend_service_rendezvous_has_opened() 117
-problem function-size /src/feature/rend/rendservice.c:directory_post_to_hs_dir() 108
+problem function-size /src/feature/rend/rendservice.c:directory_post_to_hs_dir() 106
problem function-size /src/feature/rend/rendservice.c:upload_service_descriptor() 111
problem function-size /src/feature/rend/rendservice.c:rend_consider_services_intro_points() 170
problem function-size /src/feature/stats/rephist.c:rep_hist_load_mtbf_data() 185
problem function-size /src/feature/stats/rephist.c:rep_hist_format_exit_stats() 148
-problem function-size /src/lib/compress/compress.c:tor_compress_impl() 133
-problem function-size /src/lib/compress/compress_zstd.c:tor_zstd_compress_process() 126
-problem function-size /src/lib/container/smartlist.c:smartlist_bsearch_idx() 109
+problem function-size /src/lib/compress/compress.c:tor_compress_impl() 127
+problem function-size /src/lib/compress/compress_zstd.c:tor_zstd_compress_process() 123
+problem function-size /src/lib/container/smartlist.c:smartlist_bsearch_idx() 107
problem function-size /src/lib/crypt_ops/crypto_rand.c:crypto_strongest_rand_syscall() 102
-problem function-size /src/lib/encoding/binascii.c:base64_encode() 107
-problem function-size /src/lib/encoding/confline.c:parse_config_line_from_str_verbose() 119
+problem function-size /src/lib/encoding/binascii.c:base64_encode() 106
+problem function-size /src/lib/encoding/confline.c:parse_config_line_from_str_verbose() 117
problem function-size /src/lib/encoding/cstring.c:unescape_string() 108
-problem function-size /src/lib/fs/dir.c:check_private_dir() 231
-problem function-size /src/lib/log/log.c:parse_log_severity_config() 101
+problem function-size /src/lib/fs/dir.c:check_private_dir() 230
problem function-size /src/lib/math/prob_distr.c:sample_uniform_interval() 145
-problem function-size /src/lib/net/address.c:tor_addr_parse_mask_ports() 198
-problem function-size /src/lib/net/address.c:tor_addr_compare_masked() 111
+problem function-size /src/lib/net/address.c:tor_addr_parse_mask_ports() 194
+problem function-size /src/lib/net/address.c:tor_addr_compare_masked() 110
problem function-size /src/lib/net/inaddr.c:tor_inet_pton() 107
-problem function-size /src/lib/net/resolve.c:tor_addr_lookup() 110
problem function-size /src/lib/net/socketpair.c:tor_ersatz_socketpair() 102
problem function-size /src/lib/osinfo/uname.c:get_uname() 116
problem function-size /src/lib/process/process_unix.c:process_unix_exec() 220
-problem function-size /src/lib/process/process_win32.c:process_win32_exec() 133
-problem function-size /src/lib/process/process_win32.c:process_win32_create_pipe() 112
+problem function-size /src/lib/process/process_win32.c:process_win32_exec() 151
+problem function-size /src/lib/process/process_win32.c:process_win32_create_pipe() 109
problem function-size /src/lib/process/restrict.c:set_max_file_descriptors() 102
problem function-size /src/lib/process/setuid.c:switch_id() 156
problem function-size /src/lib/sandbox/sandbox.c:prot_strings() 104
problem function-size /src/lib/string/scanf.c:tor_vsscanf() 112
-problem function-size /src/lib/tls/tortls_nss.c:tor_tls_context_new() 153
-problem function-size /src/lib/tls/tortls_openssl.c:tor_tls_context_new() 171
-problem function-size /src/lib/tls/x509_nss.c:tor_tls_create_certificate_internal() 126
+problem function-size /src/lib/tls/tortls_nss.c:tor_tls_context_new() 152
+problem function-size /src/lib/tls/tortls_openssl.c:tor_tls_context_new() 170
+problem function-size /src/lib/tls/x509_nss.c:tor_tls_create_certificate_internal() 121
problem function-size /src/tools/tor-gencert.c:parse_commandline() 111
-problem function-size /src/tools/tor-resolve.c:build_socks5_resolve_request() 104
-problem function-size /src/tools/tor-resolve.c:do_resolve() 174
+problem function-size /src/tools/tor-resolve.c:build_socks5_resolve_request() 102
+problem function-size /src/tools/tor-resolve.c:do_resolve() 171
problem function-size /src/tools/tor-resolve.c:main() 112
+problem dependency-violation /scripts/maint/practracker/testdata/a.c 3
+problem dependency-violation /scripts/maint/practracker/testdata/header.h 3
+problem dependency-violation /src/core/crypto/hs_ntor.h 1
+problem dependency-violation /src/core/or/cell_queue_st.h 1
+problem dependency-violation /src/core/or/channel.h 1
+problem dependency-violation /src/core/or/circuitlist.h 1
+problem dependency-violation /src/core/or/connection_edge.h 1
+problem dependency-violation /src/core/or/or.h 1
diff --git a/scripts/maint/practracker/includes.py b/scripts/maint/practracker/includes.py
new file mode 100755
index 0000000000..397439b4ef
--- /dev/null
+++ b/scripts/maint/practracker/includes.py
@@ -0,0 +1,285 @@
+#!/usr/bin/python
+# Copyright 2018 The Tor Project, Inc. See LICENSE file for licensing info.
+
+"""This script looks through all the directories for files matching *.c or
+ *.h, and checks their #include directives to make sure that only "permitted"
+ headers are included.
+
+ Any #include directives with angle brackets (like #include <stdio.h>) are
+ ignored -- only directives with quotes (like #include "foo.h") are
+ considered.
+
+ To decide what includes are permitted, this script looks at a .may_include
+ file in each directory. This file contains empty lines, #-prefixed
+ comments, filenames (like "lib/foo/bar.h") and file globs (like lib/*/*.h)
+ for files that are permitted.
+"""
+
+
+from __future__ import print_function
+
+import fnmatch
+import os
+import re
+import sys
+
+if sys.version_info[0] <= 2:
+ def open_file(fname):
+ return open(fname, 'r')
+else:
+ def open_file(fname):
+ return open(fname, 'r', encoding='utf-8')
+
+def warn(msg):
+ print(msg, file=sys.stderr)
+
+def fname_is_c(fname):
+ """ Return true iff 'fname' is the name of a file that we should
+ search for possibly disallowed #include directives. """
+ return fname.endswith(".h") or fname.endswith(".c")
+
+INCLUDE_PATTERN = re.compile(r'\s*#\s*include\s+"([^"]*)"')
+RULES_FNAME = ".may_include"
+
+ALLOWED_PATTERNS = [
+ re.compile(r'^.*\*\.(h|inc)$'),
+ re.compile(r'^.*/.*\.h$'),
+ re.compile(r'^ext/.*\.c$'),
+ re.compile(r'^orconfig.h$'),
+ re.compile(r'^micro-revision.i$'),
+]
+
+def pattern_is_normal(s):
+ for p in ALLOWED_PATTERNS:
+ if p.match(s):
+ return True
+ return False
+
+class Error(object):
+ def __init__(self, location, msg, is_advisory=False):
+ self.location = location
+ self.msg = msg
+ self.is_advisory = is_advisory
+
+ def __str__(self):
+ return "{} at {}".format(self.msg, self.location)
+
+class Rules(object):
+ """ A 'Rules' object is the parsed version of a .may_include file. """
+ def __init__(self, dirpath):
+ self.dirpath = dirpath
+ if dirpath.startswith("src/"):
+ self.incpath = dirpath[4:]
+ else:
+ self.incpath = dirpath
+ self.patterns = []
+ self.usedPatterns = set()
+ self.is_advisory = False
+
+ def addPattern(self, pattern):
+ if pattern == "!advisory":
+ self.is_advisory = True
+ return
+ if not pattern_is_normal(pattern):
+ warn("Unusual pattern {} in {}".format(pattern, self.dirpath))
+ self.patterns.append(pattern)
+
+ def includeOk(self, path):
+ for pattern in self.patterns:
+ if fnmatch.fnmatchcase(path, pattern):
+ self.usedPatterns.add(pattern)
+ return True
+ return False
+
+ def applyToLines(self, lines, loc_prefix=""):
+ lineno = 0
+ for line in lines:
+ lineno += 1
+ m = INCLUDE_PATTERN.match(line)
+ if m:
+ include = m.group(1)
+ if not self.includeOk(include):
+ yield Error("{}{}".format(loc_prefix,str(lineno)),
+ "Forbidden include of {}".format(include),
+ is_advisory=self.is_advisory)
+
+ def applyToFile(self, fname, f):
+ for error in self.applyToLines(iter(f), "{}:".format(fname)):
+ yield error
+
+ def noteUnusedRules(self):
+ for p in self.patterns:
+ if p not in self.usedPatterns:
+ warn("Pattern {} in {} was never used.".format(p, self.dirpath))
+
+ def getAllowedDirectories(self):
+ allowed = []
+ for p in self.patterns:
+ m = re.match(r'^(.*)/\*\.(h|inc)$', p)
+ if m:
+ allowed.append(m.group(1))
+ continue
+ m = re.match(r'^(.*)/[^/]*$', p)
+ if m:
+ allowed.append(m.group(1))
+ continue
+
+ return allowed
+
+include_rules_cache = {}
+
+def load_include_rules(fname):
+ """ Read a rules file from 'fname', and return it as a Rules object.
+ Return 'None' if fname does not exist.
+ """
+ if fname in include_rules_cache:
+ return include_rules_cache[fname]
+ if not os.path.exists(fname):
+ include_rules_cache[fname] = None
+ return None
+ result = Rules(os.path.split(fname)[0])
+ with open_file(fname) as f:
+ for line in f:
+ line = line.strip()
+ if line.startswith("#") or not line:
+ continue
+ result.addPattern(line)
+ include_rules_cache[fname] = result
+ return result
+
+def get_all_include_rules():
+ """Return a list of all the Rules objects we have loaded so far,
+ sorted by their directory names."""
+ return [ rules for (fname,rules) in
+ sorted(include_rules_cache.items())
+ if rules is not None ]
+
+def remove_self_edges(graph):
+ """Takes a directed graph in as an adjacency mapping (a mapping from
+ node to a list of the nodes to which it connects).
+
+ Remove all edges from a node to itself."""
+
+ for k in list(graph):
+ graph[k] = [ d for d in graph[k] if d != k ]
+
+def toposort(graph, limit=100):
+ """Takes a directed graph in as an adjacency mapping (a mapping from
+ node to a list of the nodes to which it connects). Tries to
+ perform a topological sort on the graph, arranging the nodes into
+ "levels", such that every member of each level is only reachable
+ by members of later levels.
+
+ Returns a list of the members of each level.
+
+ Modifies the input graph, removing every member that could be
+ sorted. If the graph does not become empty, then it contains a
+ cycle.
+
+ "limit" is the max depth of the graph after which we give up trying
+ to sort it and conclude we have a cycle.
+ """
+ all_levels = []
+
+ n = 0
+ while graph:
+ n += 0
+ cur_level = []
+ all_levels.append(cur_level)
+ for k in list(graph):
+ graph[k] = [ d for d in graph[k] if d in graph ]
+ if graph[k] == []:
+ cur_level.append(k)
+ for k in cur_level:
+ del graph[k]
+ n += 1
+ if n > limit:
+ break
+
+ return all_levels
+
+def consider_include_rules(fname, f):
+ dirpath = os.path.split(fname)[0]
+ rules_fname = os.path.join(dirpath, RULES_FNAME)
+ rules = load_include_rules(os.path.join(dirpath, RULES_FNAME))
+ if rules is None:
+ return
+
+ for err in rules.applyToFile(fname, f):
+ yield err
+
+ list_unused = False
+ log_sorted_levels = False
+
+def walk_c_files(topdir="src"):
+ """Run through all .c and .h files under topdir, looking for
+ include-rule violations. Yield those violations."""
+
+ for dirpath, dirnames, fnames in os.walk(topdir):
+ for fname in fnames:
+ if fname_is_c(fname):
+ fullpath = os.path.join(dirpath,fname)
+ with open(fullpath) as f:
+ for err in consider_include_rules(fullpath, f):
+ yield err
+
+def run_check_includes(topdir, list_unused=False, log_sorted_levels=False,
+ list_advisories=False):
+ trouble = False
+
+ for err in walk_c_files(topdir):
+ if err.is_advisory and not list_advisories:
+ continue
+ print(err, file=sys.stderr)
+ if not err.is_advisory:
+ trouble = True
+
+ if trouble:
+ err(
+ """To change which includes are allowed in a C file, edit the {}
+ files in its enclosing directory.""".format(RULES_FNAME))
+ sys.exit(1)
+
+ if list_unused:
+ for rules in get_all_include_rules():
+ rules.noteUnusedRules()
+
+ uses_dirs = { }
+ for rules in get_all_include_rules():
+ uses_dirs[rules.incpath] = rules.getAllowedDirectories()
+
+ remove_self_edges(uses_dirs)
+ all_levels = toposort(uses_dirs)
+
+ if log_sorted_levels:
+ for (n, cur_level) in enumerate(all_levels):
+ if cur_level:
+ print(n, cur_level)
+
+ if uses_dirs:
+ print("There are circular .may_include dependencies in here somewhere:",
+ uses_dirs)
+ sys.exit(1)
+
+def main(argv):
+ import argparse
+
+ progname = argv[0]
+ parser = argparse.ArgumentParser(prog=progname)
+ parser.add_argument("--toposort", action="store_true",
+ help="Print a topologically sorted list of modules")
+ parser.add_argument("--list-unused", action="store_true",
+ help="List unused lines in .may_include files.")
+ parser.add_argument("--list-advisories", action="store_true",
+ help="List advisories as well as forbidden includes")
+ parser.add_argument("topdir", default="src", nargs="?",
+ help="Top-level directory for the tor source")
+ args = parser.parse_args(argv[1:])
+
+ run_check_includes(topdir=args.topdir,
+ log_sorted_levels=args.toposort,
+ list_unused=args.list_unused,
+ list_advisories=args.list_advisories)
+
+if __name__ == '__main__':
+ main(sys.argv)
diff --git a/scripts/maint/practracker/metrics.py b/scripts/maint/practracker/metrics.py
index 5fa305a868..4c62bc2425 100644
--- a/scripts/maint/practracker/metrics.py
+++ b/scripts/maint/practracker/metrics.py
@@ -8,6 +8,7 @@ import re
def get_file_len(f):
"""Get file length of file"""
+ i = -1
for i, l in enumerate(f):
pass
return i + 1
@@ -16,7 +17,7 @@ def get_include_count(f):
"""Get number of #include statements in the file"""
include_count = 0
for line in f:
- if re.match(r' *# *include', line):
+ if re.match(r'\s*#\s*include', line):
include_count += 1
return include_count
@@ -27,10 +28,13 @@ def get_function_lines(f):
# Skip lines that look like they are defining functions with these
# names: they aren't real function definitions.
- REGEXP_CONFUSE_TERMS = {"MOCK_IMPL", "ENABLE_GCC_WARNINGS", "ENABLE_GCC_WARNING", "DUMMY_TYPECHECK_INSTANCE",
+ REGEXP_CONFUSE_TERMS = {"MOCK_IMPL", "MOCK_DECL", "HANDLE_DECL",
+ "ENABLE_GCC_WARNINGS", "ENABLE_GCC_WARNING",
+ "DUMMY_TYPECHECK_INSTANCE",
"DISABLE_GCC_WARNING", "DISABLE_GCC_WARNINGS"}
in_function = False
+ found_openbrace = False
for lineno, line in enumerate(f):
if not in_function:
# find the start of a function
@@ -41,10 +45,13 @@ def get_function_lines(f):
continue
func_start = lineno
in_function = True
-
+ elif not found_openbrace and line.startswith("{"):
+ found_openbrace = True
+ func_start = lineno
else:
# Find the end of a function
if line.startswith("}"):
- n_lines = lineno - func_start
+ n_lines = lineno - func_start + 1
in_function = False
+ found_openbrace = False
yield (func_name, n_lines)
diff --git a/scripts/maint/practracker/practracker.py b/scripts/maint/practracker/practracker.py
index febb14639d..71741265f6 100755
--- a/scripts/maint/practracker/practracker.py
+++ b/scripts/maint/practracker/practracker.py
@@ -7,7 +7,8 @@ Go through the various .c files and collect metrics about them. If the metrics
violate some of our best practices and they are not found in the optional
exceptions file, then log a problem about them.
-We currently do metrics about file size, function size and number of includes.
+We currently do metrics about file size, function size and number of includes,
+for C source files and headers.
practracker.py should be run with its second argument pointing to the Tor
top-level source directory like this:
@@ -25,6 +26,7 @@ import os, sys
import metrics
import util
import problem
+import includes
# The filename of the exceptions file (it should be placed in the practracker directory)
EXCEPTIONS_FNAME = "./exceptions.txt"
@@ -35,12 +37,23 @@ MAX_FILE_SIZE = 3000 # lines
MAX_FUNCTION_SIZE = 100 # lines
# Recommended number of #includes
MAX_INCLUDE_COUNT = 50
+# Recommended file size for headers
+MAX_H_FILE_SIZE = 500
+# Recommended include count for headers
+MAX_H_INCLUDE_COUNT = 15
+# Recommended number of dependency violations
+MAX_DEP_VIOLATIONS = 0
+
+# Map from problem type to functions that adjust for tolerance
+TOLERANCE_FNS = {
+ 'include-count': lambda n: int(n*1.1),
+ 'function-size': lambda n: int(n*1.1),
+ 'file-size': lambda n: int(n*1.02),
+ 'dependency-violation': lambda n: (n+2)
+}
#######################################################
-# ProblemVault singleton
-ProblemVault = None
-
# The Tor source code topdir
TOR_TOPDIR = None
@@ -54,71 +67,73 @@ else:
return open(fname, 'r', encoding='utf-8')
def consider_file_size(fname, f):
- """Consider file size issues for 'f' and return True if a new issue was found"""
+ """Consider the size of 'f' and yield an FileSizeItem for it.
+ """
file_size = metrics.get_file_len(f)
- if file_size > MAX_FILE_SIZE:
- p = problem.FileSizeProblem(fname, file_size)
- return ProblemVault.register_problem(p)
- return False
+ yield problem.FileSizeItem(fname, file_size)
def consider_includes(fname, f):
- """Consider #include issues for 'f' and return True if a new issue was found"""
+ """Consider the #include count in for 'f' and yield an IncludeCountItem
+ for it.
+ """
include_count = metrics.get_include_count(f)
- if include_count > MAX_INCLUDE_COUNT:
- p = problem.IncludeCountProblem(fname, include_count)
- return ProblemVault.register_problem(p)
- return False
+ yield problem.IncludeCountItem(fname, include_count)
def consider_function_size(fname, f):
- """Consider the function sizes for 'f' and return True if a new issue was found"""
- found_new_issues = False
+ """yield a FunctionSizeItem for every function in f.
+ """
for name, lines in metrics.get_function_lines(f):
- # Don't worry about functions within our limits
- if lines <= MAX_FUNCTION_SIZE:
- continue
-
- # That's a big function! Issue a problem!
canonical_function_name = "%s:%s()" % (fname, name)
- p = problem.FunctionSizeProblem(canonical_function_name, lines)
- found_new_issues |= ProblemVault.register_problem(p)
+ yield problem.FunctionSizeItem(canonical_function_name, lines)
+
+def consider_include_violations(fname, real_fname, f):
+ n = 0
+ for item in includes.consider_include_rules(real_fname, f):
+ n += 1
+ if n:
+ yield problem.DependencyViolationItem(fname, n)
- return found_new_issues
#######################################################
def consider_all_metrics(files_list):
- """Consider metrics for all files, and return True if new issues were found"""
- found_new_issues = False
+ """Consider metrics for all files, and yield a sequence of problem.Item
+ object for those issues."""
for fname in files_list:
with open_file(fname) as f:
- found_new_issues |= consider_metrics_for_file(fname, f)
- return found_new_issues
+ for item in consider_metrics_for_file(fname, f):
+ yield item
def consider_metrics_for_file(fname, f):
"""
- Consider the various metrics for file with filename 'fname' and file descriptor 'f'.
- Return True if we found new issues.
+ Yield a sequence of problem.Item objects for all of the metrics in
+ 'f'.
"""
+ real_fname = fname
# Strip the useless part of the path
if fname.startswith(TOR_TOPDIR):
fname = fname[len(TOR_TOPDIR):]
- found_new_issues = False
-
# Get file length
- found_new_issues |= consider_file_size(fname, f)
+ for item in consider_file_size(fname, f):
+ yield item
# Consider number of #includes
f.seek(0)
- found_new_issues |= consider_includes(fname, f)
+ for item in consider_includes(fname, f):
+ yield item
# Get function length
f.seek(0)
- found_new_issues |= consider_function_size(fname, f)
+ for item in consider_function_size(fname, f):
+ yield item
- return found_new_issues
+ # Check for "upward" includes
+ f.seek(0)
+ for item in consider_include_violations(fname, real_fname, f):
+ yield item
HEADER="""\
# Welcome to the exceptions file for Tor's best-practices tracker!
@@ -129,8 +144,12 @@ HEADER="""\
#
# There are three kinds of problems that we recognize right now:
# function-size -- a function of more than {MAX_FUNCTION_SIZE} lines.
-# file-size -- a file of more than {MAX_FILE_SIZE} lines.
-# include-count -- a file with more than {MAX_INCLUDE_COUNT} #includes.
+# file-size -- a .c file of more than {MAX_FILE_SIZE} lines, or a .h
+# file with more than {MAX_H_FILE_SIZE} lines.
+# include-count -- a .c file with more than {MAX_INCLUDE_COUNT} #includes,
+ or a .h file with more than {MAX_H_INCLUDE_COUNT} #includes.
+# dependency-violation -- a file includes a header that it should
+# not, according to an advisory .may_include file.
#
# Each line below represents a single exception that practracker should
# _ignore_. Each line has four parts:
@@ -161,8 +180,29 @@ def main(argv):
parser = argparse.ArgumentParser(prog=progname)
parser.add_argument("--regen", action="store_true",
help="Regenerate the exceptions file")
+ parser.add_argument("--list-overbroad", action="store_true",
+ help="List over-broad exceptions")
parser.add_argument("--exceptions",
help="Override the location for the exceptions file")
+ parser.add_argument("--strict", action="store_true",
+ help="Make all warnings into errors")
+ parser.add_argument("--terse", action="store_true",
+ help="Do not emit helpful instructions.")
+ parser.add_argument("--max-h-file-size", default=MAX_H_FILE_SIZE,
+ help="Maximum lines per .h file")
+ parser.add_argument("--max-h-include-count", default=MAX_H_INCLUDE_COUNT,
+ help="Maximum includes per .h file")
+ parser.add_argument("--max-file-size", default=MAX_FILE_SIZE,
+ help="Maximum lines per .c file")
+ parser.add_argument("--max-include-count", default=MAX_INCLUDE_COUNT,
+ help="Maximum includes per .c file")
+ parser.add_argument("--max-function-size", default=MAX_FUNCTION_SIZE,
+ help="Maximum lines per function")
+ parser.add_argument("--max-dependency-violations", default=MAX_DEP_VIOLATIONS,
+ help="Maximum number of dependency violations to allow")
+ parser.add_argument("--include-dir", action="append",
+ default=["src"],
+ help="A directory (under topdir) to search for source")
parser.add_argument("topdir", default=".", nargs="?",
help="Top-level directory for the tor source")
args = parser.parse_args(argv[1:])
@@ -174,24 +214,55 @@ def main(argv):
else:
exceptions_file = os.path.join(TOR_TOPDIR, "scripts/maint/practracker", EXCEPTIONS_FNAME)
+ # 0) Configure our thresholds of "what is a problem actually"
+ filt = problem.ProblemFilter()
+ filt.addThreshold(problem.FileSizeItem("*.c", int(args.max_file_size)))
+ filt.addThreshold(problem.IncludeCountItem("*.c", int(args.max_include_count)))
+ filt.addThreshold(problem.FileSizeItem("*.h", int(args.max_h_file_size)))
+ filt.addThreshold(problem.IncludeCountItem("*.h", int(args.max_h_include_count)))
+ filt.addThreshold(problem.FunctionSizeItem("*.c", int(args.max_function_size)))
+ filt.addThreshold(problem.DependencyViolationItem("*.c", int(args.max_dependency_violations)))
+ filt.addThreshold(problem.DependencyViolationItem("*.h", int(args.max_dependency_violations)))
+
+ if args.list_overbroad and args.regen:
+ print("Cannot use --regen with --list-overbroad",
+ file=sys.stderr)
+ sys.exit(1)
+
# 1) Get all the .c files we care about
- files_list = util.get_tor_c_files(TOR_TOPDIR)
+ files_list = util.get_tor_c_files(TOR_TOPDIR, args.include_dir)
# 2) Initialize problem vault and load an optional exceptions file so that
# we don't warn about the past
- global ProblemVault
-
if args.regen:
tmpname = exceptions_file + ".tmp"
tmpfile = open(tmpname, "w")
- sys.stdout = tmpfile
- sys.stdout.write(HEADER)
+ problem_file = tmpfile
+ problem_file.write(HEADER)
ProblemVault = problem.ProblemVault()
else:
ProblemVault = problem.ProblemVault(exceptions_file)
+ problem_file = sys.stdout
+
+ if args.list_overbroad:
+ # If we're listing overbroad exceptions, don't list problems.
+ problem_file = util.NullFile()
+
+ # 2.1) Adjust the exceptions so that we warn only about small problems,
+ # and produce errors on big ones.
+ if not (args.regen or args.list_overbroad or args.strict):
+ ProblemVault.set_tolerances(TOLERANCE_FNS)
# 3) Go through all the files and report problems if they are not exceptions
- found_new_issues = consider_all_metrics(files_list)
+ found_new_issues = 0
+ for item in filt.filter(consider_all_metrics(files_list)):
+ status = ProblemVault.register_problem(item)
+ if status == problem.STATUS_ERR:
+ print(item, file=problem_file)
+ found_new_issues += 1
+ elif status == problem.STATUS_WARN:
+ # warnings always go to stdout.
+ print("(warning) {}".format(item))
if args.regen:
tmpfile.close()
@@ -199,18 +270,34 @@ def main(argv):
sys.exit(0)
# If new issues were found, try to give out some advice to the developer on how to resolve it.
- if found_new_issues and not args.regen:
+ if found_new_issues and not args.regen and not args.terse:
new_issues_str = """\
-FAILURE: practracker found new problems in the code: see warnings above.
+FAILURE: practracker found {} new problem(s) in the code: see warnings above.
Please fix the problems if you can, and update the exceptions file
({}) if you can't.
See doc/HACKING/HelpfulTools.md for more information on using practracker.\
-""".format(exceptions_file)
+
+You can disable this message by setting the TOR_DISABLE_PRACTRACKER environment
+variable.
+""".format(found_new_issues, exceptions_file)
print(new_issues_str)
+ if args.list_overbroad:
+ def k_fn(tup):
+ return tup[0].key()
+ for (ex,p) in sorted(ProblemVault.list_overbroad_exceptions(), key=k_fn):
+ if p is None:
+ print(ex, "->", 0)
+ else:
+ print(ex, "->", p.metric_value)
+
sys.exit(found_new_issues)
if __name__ == '__main__':
+ if os.environ.get("TOR_DISABLE_PRACTRACKER"):
+ print("TOR_DISABLE_PRACTRACKER is set, skipping practracker tests.",
+ file=sys.stderr)
+ sys.exit(0)
main(sys.argv)
diff --git a/scripts/maint/practracker/practracker_tests.py b/scripts/maint/practracker/practracker_tests.py
index cdbab2908e..45719d6cb7 100755
--- a/scripts/maint/practracker/practracker_tests.py
+++ b/scripts/maint/practracker/practracker_tests.py
@@ -1,8 +1,15 @@
+#!/usr/bin/python
+
"""Some simple tests for practracker metrics"""
import unittest
-import StringIO
+try:
+ # python 2 names the module this way...
+ from StringIO import StringIO
+except ImportError:
+ # python 3 names the module this way.
+ from io import StringIO
import metrics
@@ -36,15 +43,25 @@ fun,(
class TestFunctionLength(unittest.TestCase):
def test_function_length(self):
- funcs = StringIO.StringIO(function_file)
+ funcs = StringIO(function_file)
# All functions should have length 2
- for name, lines in metrics.function_lines(funcs):
+ for name, lines in metrics.get_function_lines(funcs):
self.assertEqual(name, "fun")
funcs.seek(0)
- for name, lines in metrics.function_lines(funcs):
- self.assertEqual(lines, 2)
+ for name, lines in metrics.get_function_lines(funcs):
+ self.assertEqual(lines, 4)
+
+class TestIncludeCount(unittest.TestCase):
+ def test_include_count(self):
+ f = StringIO("""
+ # include <abc.h>
+ # include "def.h"
+#include "ghi.h"
+\t#\t include "jkl.h"
+""")
+ self.assertEqual(metrics.get_include_count(f),4)
if __name__ == '__main__':
unittest.main()
diff --git a/scripts/maint/practracker/problem.py b/scripts/maint/practracker/problem.py
index c82c5db572..d21840a213 100644
--- a/scripts/maint/practracker/problem.py
+++ b/scripts/maint/practracker/problem.py
@@ -13,6 +13,10 @@ import os.path
import re
import sys
+STATUS_ERR = 2
+STATUS_WARN = 1
+STATUS_OK = 0
+
class ProblemVault(object):
"""
Singleton where we store the various new problems we
@@ -22,6 +26,9 @@ class ProblemVault(object):
def __init__(self, exception_fname=None):
# Exception dictionary: { problem.key() : Problem object }
self.exceptions = {}
+ # Exception dictionary: maps key to the problem it was used to
+ # suppress.
+ self.used_exception_for = {}
if exception_fname == None:
return
@@ -57,42 +64,93 @@ class ProblemVault(object):
def register_problem(self, problem):
"""
- Register this problem to the problem value. Return True if it was a new
- problem or it worsens an already existing problem.
+ Register this problem to the problem value. Return true if it was a new
+ problem or it worsens an already existing problem. A true
+ value may be STATUS_ERR to indicate a hard violation, or STATUS_WARN
+ to indicate a warning.
"""
# This is a new problem, print it
if problem.key() not in self.exceptions:
- print(problem)
- return True
+ return STATUS_ERR
# If it's an old problem, we don't warn if the situation got better
# (e.g. we went from 4k LoC to 3k LoC), but we do warn if the
# situation worsened (e.g. we went from 60 includes to 80).
- if problem.is_worse_than(self.exceptions[problem.key()]):
- print(problem)
- return True
+ status = problem.is_worse_than(self.exceptions[problem.key()])
+
+ # Remember that we used this exception, so that we can later
+ # determine whether the exception was overbroad.
+ self.used_exception_for[problem.key()] = problem
- return False
+ return status
-class Problem(object):
+ def list_overbroad_exceptions(self):
+ """Return an iterator of tuples containing (ex,prob) where ex is an
+ exceptions in this vault that are stricter than it needs to be, and
+ prob is the worst problem (if any) that it covered.
+ """
+ for k in self.exceptions:
+ e = self.exceptions[k]
+ p = self.used_exception_for.get(k)
+ if p is None or e.is_worse_than(p):
+ yield (e, p)
+
+ def set_tolerances(self, fns):
+ """Adjust the tolerances for the exceptions in this vault. Takes
+ a map of problem type to a function that adjusts the permitted
+ function to its new maximum value."""
+ for k in self.exceptions:
+ ex = self.exceptions[k]
+ fn = fns.get(ex.problem_type)
+ if fn is not None:
+ ex.metric_value = fn(ex.metric_value)
+
+class ProblemFilter(object):
+ def __init__(self):
+ self.thresholds = dict()
+
+ def addThreshold(self, item):
+ self.thresholds[(item.get_type(),item.get_file_type())] = item
+
+ def matches(self, item):
+ key = (item.get_type(), item.get_file_type())
+ filt = self.thresholds.get(key, None)
+ if filt is None:
+ return False
+ return item.is_worse_than(filt)
+
+ def filter(self, sequence):
+ for item in iter(sequence):
+ if self.matches(item):
+ yield item
+
+class Item(object):
"""
- A generic problem in our source code. See the subclasses below for the
- specific problems we are trying to tackle.
+ A generic measurement about some aspect of our source code. See
+ the subclasses below for the specific problems we are trying to tackle.
"""
def __init__(self, problem_type, problem_location, metric_value):
self.problem_location = problem_location
self.metric_value = int(metric_value)
+ self.warning_threshold = self.metric_value
self.problem_type = problem_type
def is_worse_than(self, other_problem):
- """Return True if this is a worse problem than other_problem"""
+ """Return STATUS_ERR if this is a worse problem than other_problem.
+ Return STATUS_WARN if it is a little worse, but falls within the
+ warning threshold. Return STATUS_OK if this problem is not
+ at all worse than other_problem.
+ """
if self.metric_value > other_problem.metric_value:
- return True
- return False
+ return STATUS_ERR
+ elif self.metric_value > other_problem.warning_threshold:
+ return STATUS_WARN
+ else:
+ return STATUS_OK
def key(self):
"""Generate a unique key that describes this problem that can be used as a dictionary key"""
- # Problem location is a filesystem path, so we need to normalize this
+ # Item location is a filesystem path, so we need to normalize this
# across platforms otherwise same paths are not gonna match.
canonical_location = os.path.normcase(self.problem_location)
return "%s:%s" % (canonical_location, self.problem_type)
@@ -100,7 +158,16 @@ class Problem(object):
def __str__(self):
return "problem %s %s %s" % (self.problem_type, self.problem_location, self.metric_value)
-class FileSizeProblem(Problem):
+ def get_type(self):
+ return self.problem_type
+
+ def get_file_type(self):
+ if self.problem_location.endswith(".h"):
+ return "*.h"
+ else:
+ return "*.c"
+
+class FileSizeItem(Item):
"""
Denotes a problem with the size of a .c file.
@@ -108,9 +175,9 @@ class FileSizeProblem(Problem):
'metric_value' is the number of lines in the .c file.
"""
def __init__(self, problem_location, metric_value):
- super(FileSizeProblem, self).__init__("file-size", problem_location, metric_value)
+ super(FileSizeItem, self).__init__("file-size", problem_location, metric_value)
-class IncludeCountProblem(Problem):
+class IncludeCountItem(Item):
"""
Denotes a problem with the number of #includes in a .c file.
@@ -118,9 +185,9 @@ class IncludeCountProblem(Problem):
'metric_value' is the number of #includes in the .c file.
"""
def __init__(self, problem_location, metric_value):
- super(IncludeCountProblem, self).__init__("include-count", problem_location, metric_value)
+ super(IncludeCountItem, self).__init__("include-count", problem_location, metric_value)
-class FunctionSizeProblem(Problem):
+class FunctionSizeItem(Item):
"""
Denotes a problem with a size of a function in a .c file.
@@ -131,7 +198,22 @@ class FunctionSizeProblem(Problem):
The 'metric_value' is the size of the offending function in lines.
"""
def __init__(self, problem_location, metric_value):
- super(FunctionSizeProblem, self).__init__("function-size", problem_location, metric_value)
+ super(FunctionSizeItem, self).__init__("function-size", problem_location, metric_value)
+
+class DependencyViolationItem(Item):
+ """
+ Denotes a dependency violation in a .c or .h file. A dependency violation
+ occurs when a file includes a file from some module that is not listed
+ in its .may_include file.
+
+ The 'problem_location' is the file that contains the problem.
+
+ The 'metric_value' is the number of forbidden includes.
+ """
+ def __init__(self, problem_location, metric_value):
+ super(DependencyViolationItem, self).__init__("dependency-violation",
+ problem_location,
+ metric_value)
comment_re = re.compile(r'#.*$')
@@ -149,10 +231,12 @@ def get_old_problem_from_exception_str(exception_str):
raise ValueError("Misformatted line {!r}".format(orig_str))
if problem_type == "file-size":
- return FileSizeProblem(problem_location, metric_value)
+ return FileSizeItem(problem_location, metric_value)
elif problem_type == "include-count":
- return IncludeCountProblem(problem_location, metric_value)
+ return IncludeCountItem(problem_location, metric_value)
elif problem_type == "function-size":
- return FunctionSizeProblem(problem_location, metric_value)
+ return FunctionSizeItem(problem_location, metric_value)
+ elif problem_type == "dependency-violation":
+ return DependencyViolationItem(problem_location, metric_value)
else:
raise ValueError("Unknown exception type {!r}".format(orig_str))
diff --git a/scripts/maint/practracker/test_practracker.sh b/scripts/maint/practracker/test_practracker.sh
new file mode 100755
index 0000000000..207a5ceded
--- /dev/null
+++ b/scripts/maint/practracker/test_practracker.sh
@@ -0,0 +1,70 @@
+#!/bin/sh
+
+umask 077
+unset TOR_DISABLE_PRACTRACKER
+
+TMPDIR=""
+clean () {
+ if [ -n "$TMPDIR" ] && [ -d "$TMPDIR" ]; then
+ rm -rf "$TMPDIR"
+ fi
+}
+trap clean EXIT HUP INT TERM
+
+if test "${PRACTRACKER_DIR}" = "" ||
+ test ! -e "${PRACTRACKER_DIR}/practracker.py" ; then
+ PRACTRACKER_DIR=$(dirname "$0")
+fi
+
+TMPDIR="$(mktemp -d -t pracktracker.test.XXXXXX)"
+if test -z "${TMPDIR}" || test ! -d "${TMPDIR}" ; then
+ echo >&2 "mktemp failed."
+ exit 1;
+fi
+
+DATA="${PRACTRACKER_DIR}/testdata"
+
+run_practracker() {
+ "${PYTHON:-python}" "${PRACTRACKER_DIR}/practracker.py" \
+ --include-dir "" \
+ --max-file-size=0 \
+ --max-function-size=0 \
+ --max-h-file-size=0 \
+ --max-h-include-count=0 \
+ --max-include-count=0 \
+ --terse \
+ "${DATA}/" "$@";
+}
+compare() {
+ # we can't use cmp because we need to use -b for windows
+ diff -b -u "$@" > "${TMPDIR}/test-diff"
+ if test -z "$(cat "${TMPDIR}"/test-diff)"; then
+ echo "OK"
+ else
+ cat "${TMPDIR}/test-diff"
+ echo "FAILED"
+ exit 1
+ fi
+}
+
+echo "unit tests:"
+
+"${PYTHON:-python}" "${PRACTRACKER_DIR}/practracker_tests.py" || exit 1
+
+echo "ex0:"
+
+run_practracker --exceptions "${DATA}/ex0.txt" > "${TMPDIR}/ex0-received.txt"
+
+compare "${TMPDIR}/ex0-received.txt" "${DATA}/ex0-expected.txt"
+
+echo "ex1:"
+
+run_practracker --exceptions "${DATA}/ex1.txt" > "${TMPDIR}/ex1-received.txt"
+
+compare "${TMPDIR}/ex1-received.txt" "${DATA}/ex1-expected.txt"
+
+echo "ex1.overbroad:"
+
+run_practracker --exceptions "${DATA}/ex1.txt" --list-overbroad > "${TMPDIR}/ex1-overbroad-received.txt"
+
+compare "${TMPDIR}/ex1-overbroad-received.txt" "${DATA}/ex1-overbroad-expected.txt"
diff --git a/scripts/maint/practracker/testdata/.may_include b/scripts/maint/practracker/testdata/.may_include
new file mode 100644
index 0000000000..40bf8155d9
--- /dev/null
+++ b/scripts/maint/practracker/testdata/.may_include
@@ -0,0 +1,3 @@
+!advisory
+
+permitted.h
diff --git a/scripts/maint/practracker/testdata/a.c b/scripts/maint/practracker/testdata/a.c
new file mode 100644
index 0000000000..1939773f57
--- /dev/null
+++ b/scripts/maint/practracker/testdata/a.c
@@ -0,0 +1,38 @@
+
+#include "one.h"
+#include "two.h"
+#incldue "three.h"
+
+# include "permitted.h"
+
+int
+i_am_a_function(void)
+{
+ call();
+ call();
+ /* comment
+
+ another */
+
+ return 3;
+}
+
+# include "five.h"
+
+long
+another_function(long x,
+ long y)
+{
+ int abcd;
+
+ abcd = x+y;
+ abcd *= abcd;
+
+ /* comment here */
+
+ return abcd +
+ abcd +
+ abcd;
+}
+
+/* And a comment to grow! */
diff --git a/scripts/maint/practracker/testdata/b.c b/scripts/maint/practracker/testdata/b.c
new file mode 100644
index 0000000000..bef277aaae
--- /dev/null
+++ b/scripts/maint/practracker/testdata/b.c
@@ -0,0 +1,15 @@
+
+MOCK_IMPL(int,
+foo,(void))
+{
+ // blah1
+ return 0;
+}
+
+MOCK_IMPL(int,
+bar,( long z))
+{
+ // blah2
+
+ return (int)(z+2);
+}
diff --git a/scripts/maint/practracker/testdata/ex.txt b/scripts/maint/practracker/testdata/ex.txt
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/scripts/maint/practracker/testdata/ex.txt
diff --git a/scripts/maint/practracker/testdata/ex0-expected.txt b/scripts/maint/practracker/testdata/ex0-expected.txt
new file mode 100644
index 0000000000..5f3d9e5aec
--- /dev/null
+++ b/scripts/maint/practracker/testdata/ex0-expected.txt
@@ -0,0 +1,11 @@
+problem file-size a.c 38
+problem include-count a.c 4
+problem function-size a.c:i_am_a_function() 9
+problem function-size a.c:another_function() 12
+problem dependency-violation a.c 3
+problem file-size b.c 15
+problem function-size b.c:foo() 4
+problem function-size b.c:bar() 5
+problem file-size header.h 8
+problem include-count header.h 4
+problem dependency-violation header.h 3
diff --git a/scripts/maint/practracker/testdata/ex0.txt b/scripts/maint/practracker/testdata/ex0.txt
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/scripts/maint/practracker/testdata/ex0.txt
diff --git a/scripts/maint/practracker/testdata/ex1-expected.txt b/scripts/maint/practracker/testdata/ex1-expected.txt
new file mode 100644
index 0000000000..58140a4d9a
--- /dev/null
+++ b/scripts/maint/practracker/testdata/ex1-expected.txt
@@ -0,0 +1,3 @@
+problem function-size a.c:i_am_a_function() 9
+(warning) problem function-size a.c:another_function() 12
+problem function-size b.c:foo() 4
diff --git a/scripts/maint/practracker/testdata/ex1-overbroad-expected.txt b/scripts/maint/practracker/testdata/ex1-overbroad-expected.txt
new file mode 100644
index 0000000000..f69c608f40
--- /dev/null
+++ b/scripts/maint/practracker/testdata/ex1-overbroad-expected.txt
@@ -0,0 +1,2 @@
+problem file-size a.c 40 -> 38
+problem file-size z.c 100 -> 0
diff --git a/scripts/maint/practracker/testdata/ex1.txt b/scripts/maint/practracker/testdata/ex1.txt
new file mode 100644
index 0000000000..c698005d07
--- /dev/null
+++ b/scripts/maint/practracker/testdata/ex1.txt
@@ -0,0 +1,18 @@
+
+problem file-size a.c 40
+problem include-count a.c 4
+# this problem will produce an error
+problem function-size a.c:i_am_a_function() 8
+# this problem will produce a warning
+problem function-size a.c:another_function() 11
+problem file-size b.c 15
+# This is removed, and so will produce an error.
+# problem function-size b.c:foo() 4
+# This exception isn't used.
+problem file-size z.c 100
+
+problem function-size b.c:bar() 5
+problem dependency-violation a.c 3
+problem dependency-violation header.h 3
+problem file-size header.h 8
+problem include-count header.h 4
diff --git a/scripts/maint/practracker/testdata/header.h b/scripts/maint/practracker/testdata/header.h
new file mode 100644
index 0000000000..1183f5db9a
--- /dev/null
+++ b/scripts/maint/practracker/testdata/header.h
@@ -0,0 +1,8 @@
+
+// some forbidden includes
+#include "foo.h"
+#include "quux.h"
+#include "quup.h"
+
+// a permitted include
+#include "permitted.h"
diff --git a/scripts/maint/practracker/testdata/not_c_file b/scripts/maint/practracker/testdata/not_c_file
new file mode 100644
index 0000000000..e150962c02
--- /dev/null
+++ b/scripts/maint/practracker/testdata/not_c_file
@@ -0,0 +1,2 @@
+
+This isn't a C file, so practracker shouldn't care about it.
diff --git a/scripts/maint/practracker/util.py b/scripts/maint/practracker/util.py
index b0ca73b997..df629110c2 100644
--- a/scripts/maint/practracker/util.py
+++ b/scripts/maint/practracker/util.py
@@ -2,27 +2,49 @@ import os
# We don't want to run metrics for unittests, automatically-generated C files,
# external libraries or git leftovers.
-EXCLUDE_SOURCE_DIRS = {"/src/test/", "/src/trunnel/", "/src/ext/", "/.git/"}
+EXCLUDE_SOURCE_DIRS = {"src/test/", "src/trunnel/", "src/rust/",
+ "src/ext/" }
-def get_tor_c_files(tor_topdir):
+EXCLUDE_FILES = {"orconfig.h"}
+
+def _norm(p):
+ return os.path.normcase(os.path.normpath(p))
+
+def get_tor_c_files(tor_topdir, include_dirs=None):
"""
- Return a list with the .c filenames we want to get metrics of.
+ Return a list with the .c and .h filenames we want to get metrics of.
"""
files_list = []
+ exclude_dirs = { _norm(os.path.join(tor_topdir, p)) for p in EXCLUDE_SOURCE_DIRS }
- for root, directories, filenames in os.walk(tor_topdir):
- directories.sort()
- filenames.sort()
- for filename in filenames:
- # We only care about .c files
- if not filename.endswith(".c"):
- continue
+ if include_dirs is None:
+ topdirs = [ tor_topdir ]
+ else:
+ topdirs = [ os.path.join(tor_topdir, inc) for inc in include_dirs ]
- # Exclude the excluded paths
- full_path = os.path.join(root,filename)
- if any(os.path.normcase(exclude_dir) in full_path for exclude_dir in EXCLUDE_SOURCE_DIRS):
- continue
+ for topdir in topdirs:
+ for root, directories, filenames in os.walk(topdir):
+ # Remove all the directories that are excluded.
+ directories[:] = [ d for d in directories
+ if _norm(os.path.join(root,d)) not in exclude_dirs ]
+ directories.sort()
+ filenames.sort()
+ for filename in filenames:
+ # We only care about .c and .h files
+ if not (filename.endswith(".c") or filename.endswith(".h")):
+ continue
+ if filename in EXCLUDE_FILES:
+ continue
- files_list.append(full_path)
+ full_path = os.path.join(root,filename)
+
+ files_list.append(full_path)
return files_list
+
+class NullFile:
+ """A file-like object that we can us to suppress output."""
+ def __init__(self):
+ pass
+ def write(self, s):
+ pass