diff options
Diffstat (limited to 'scripts')
-rwxr-xr-x[-rw-r--r--] | scripts/codegen/get_mozilla_ciphers.py | 13 | ||||
-rwxr-xr-x | scripts/maint/updateFallbackDirs.py | 122 |
2 files changed, 91 insertions, 44 deletions
diff --git a/scripts/codegen/get_mozilla_ciphers.py b/scripts/codegen/get_mozilla_ciphers.py index e673ec7dc6..b07746c2e2 100644..100755 --- a/scripts/codegen/get_mozilla_ciphers.py +++ b/scripts/codegen/get_mozilla_ciphers.py @@ -127,9 +127,9 @@ for k, v in enabled_ciphers.items(): #oSSLinclude = ('/usr/include/openssl/ssl3.h', '/usr/include/openssl/ssl.h', # '/usr/include/openssl/ssl2.h', '/usr/include/openssl/ssl23.h', # '/usr/include/openssl/tls1.h') -oSSLinclude = ('ssl/ssl3.h', 'ssl/ssl.h', - 'ssl/ssl2.h', 'ssl/ssl23.h', - 'ssl/tls1.h') +oSSLinclude = ['ssl3.h', 'ssl.h' + 'ssl2.h', 'ssl23.h', + 'tls1.h'] ##### # This reads the hex code for the ciphers that are used by firefox. @@ -155,9 +155,12 @@ for x in used_ciphers: openssl_macro_by_hex = {} all_openssl_macros = {} for fl in oSSLinclude: - fp = open(ossl(fl), 'r') + fname = ossl("include/openssl/"+fl) + if not os.path.exists(fname): + continue + fp = open(fname, 'r') for line in fp.readlines(): - m = re.match('#define\s+(\S+)\s+(\S+)', line) + m = re.match('# *define\s+(\S+)\s+(\S+)', line) if m: value,key = m.groups() if key.startswith('0x') and "_CK_" in value: diff --git a/scripts/maint/updateFallbackDirs.py b/scripts/maint/updateFallbackDirs.py index e0bc93935c..117ac5cccb 100755 --- a/scripts/maint/updateFallbackDirs.py +++ b/scripts/maint/updateFallbackDirs.py @@ -1,6 +1,8 @@ #!/usr/bin/python -# Usage: scripts/maint/updateFallbackDirs.py > src/or/fallback_dirs.inc +# Usage: +# scripts/maint/updateFallbackDirs.py > src/or/fallback_dirs.inc +# scripts/maint/updateFallbackDirs.py check_existing > src/or/fallback_dirs.inc # # This script should be run from a stable, reliable network connection, # with no other network activity (and not over tor). @@ -37,17 +39,13 @@ import dateutil.parser # bson_lazy provides bson #from bson import json_util import copy +import re from stem.descriptor import DocumentHandler from stem.descriptor.remote import get_consensus import logging -# INFO tells you why each relay was included or excluded -# WARN tells you about potential misconfigurations and relay detail changes -logging.basicConfig(level=logging.WARNING) logging.root.name = '' -# INFO tells you about each consensus download attempt -logging.getLogger('stem').setLevel(logging.WARNING) HAVE_IPADDRESS = False try: @@ -148,6 +146,7 @@ BLACKLIST_EXCLUDES_WHITELIST_ENTRIES = True WHITELIST_FILE_NAME = 'scripts/maint/fallback.whitelist' BLACKLIST_FILE_NAME = 'scripts/maint/fallback.blacklist' +FALLBACK_FILE_NAME = 'src/or/fallback_dirs.inc' # The number of bytes we'll read from a filter file before giving up MAX_LIST_FILE_SIZE = 1024 * 1024 @@ -368,6 +367,15 @@ def read_from_file(file_name, max_len): ) return None +def parse_fallback_file(file_name): + file_data = read_from_file(file_name, MAX_LIST_FILE_SIZE) + file_data = cleanse_unprintable(file_data) + file_data = remove_bad_chars(file_data, '\n"\0') + file_data = re.sub('/\*.*?\*/', '', file_data) + file_data = file_data.replace(',', '\n') + file_data = file_data.replace(' weight=10', '') + return file_data + def load_possibly_compressed_response_json(response): if response.info().get('Content-Encoding') == 'gzip': buf = StringIO.StringIO( response.read() ) @@ -702,15 +710,15 @@ class Candidate(object): # # if the relay doesn't have a recommended version field, exclude the relay if not self._data.has_key('recommended_version'): - logging.info('%s not a candidate: no recommended_version field', + log_excluded('%s not a candidate: no recommended_version field', self._fpr) return False if not self._data['recommended_version']: - logging.info('%s not a candidate: version not recommended', self._fpr) + log_excluded('%s not a candidate: version not recommended', self._fpr) return False # if the relay doesn't have version field, exclude the relay if not self._data.has_key('version'): - logging.info('%s not a candidate: no version field', self._fpr) + log_excluded('%s not a candidate: no version field', self._fpr) return False if self._data['version'] in Candidate.STALE_CONSENSUS_VERSIONS: logging.warning('%s not a candidate: version delivers stale consensuses', @@ -864,36 +872,36 @@ class Candidate(object): def is_candidate(self): try: if (MUST_BE_RUNNING_NOW and not self.is_running()): - logging.info('%s not a candidate: not running now, unable to check ' + + log_excluded('%s not a candidate: not running now, unable to check ' + 'DirPort consensus download', self._fpr) return False if (self._data['last_changed_address_or_port'] > self.CUTOFF_ADDRESS_AND_PORT_STABLE): - logging.info('%s not a candidate: changed address/port recently (%s)', + log_excluded('%s not a candidate: changed address/port recently (%s)', self._fpr, self._data['last_changed_address_or_port']) return False if self._running < CUTOFF_RUNNING: - logging.info('%s not a candidate: running avg too low (%lf)', + log_excluded('%s not a candidate: running avg too low (%lf)', self._fpr, self._running) return False if self._v2dir < CUTOFF_V2DIR: - logging.info('%s not a candidate: v2dir avg too low (%lf)', + log_excluded('%s not a candidate: v2dir avg too low (%lf)', self._fpr, self._v2dir) return False if self._badexit is not None and self._badexit > PERMITTED_BADEXIT: - logging.info('%s not a candidate: badexit avg too high (%lf)', + log_excluded('%s not a candidate: badexit avg too high (%lf)', self._fpr, self._badexit) return False # this function logs a message depending on which check fails if not self.is_valid_version(): return False if self._guard < CUTOFF_GUARD: - logging.info('%s not a candidate: guard avg too low (%lf)', + log_excluded('%s not a candidate: guard avg too low (%lf)', self._fpr, self._guard) return False if (not self._data.has_key('consensus_weight') or self._data['consensus_weight'] < 1): - logging.info('%s not a candidate: consensus weight invalid', self._fpr) + log_excluded('%s not a candidate: consensus weight invalid', self._fpr) return False except BaseException as e: logging.warning("Exception %s when checking if fallback is a candidate", @@ -974,26 +982,26 @@ class Candidate(object): for key in entry: value = entry[key] if key == 'id' and value == self._fpr: - logging.info('%s is in the blacklist: fingerprint matches', + log_excluded('%s is in the blacklist: fingerprint matches', self._fpr) return True if key == 'ipv4' and value == self.dirip: # if the dirport is present, check it too if entry.has_key('dirport'): if int(entry['dirport']) == self.dirport: - logging.info('%s is in the blacklist: IPv4 (%s) and ' + + log_excluded('%s is in the blacklist: IPv4 (%s) and ' + 'DirPort (%d) match', self._fpr, self.dirip, self.dirport) return True # if the orport is present, check it too elif entry.has_key('orport'): if int(entry['orport']) == self.orport: - logging.info('%s is in the blacklist: IPv4 (%s) and ' + + log_excluded('%s is in the blacklist: IPv4 (%s) and ' + 'ORPort (%d) match', self._fpr, self.dirip, self.orport) return True else: - logging.info('%s is in the blacklist: IPv4 (%s) matches, and ' + + log_excluded('%s is in the blacklist: IPv4 (%s) matches, and ' + 'entry has no DirPort or ORPort', self._fpr, self.dirip) return True @@ -1007,19 +1015,19 @@ class Candidate(object): # if the dirport is present, check it too if entry.has_key('dirport'): if int(entry['dirport']) == self.dirport: - logging.info('%s is in the blacklist: IPv6 (%s) and ' + + log_excluded('%s is in the blacklist: IPv6 (%s) and ' + 'DirPort (%d) match', self._fpr, ipv6, self.dirport) return True # we've already checked the ORPort, it's part of entry['ipv6'] else: - logging.info('%s is in the blacklist: IPv6 (%s) matches, and' + + log_excluded('%s is in the blacklist: IPv6 (%s) matches, and' + 'entry has no DirPort', self._fpr, ipv6) return True elif (key == 'ipv6' or self.has_ipv6()): # only log if the fingerprint matches but the IPv6 doesn't if entry.has_key('id') and entry['id'] == self._fpr: - logging.info('%s skipping IPv6 blacklist comparison: relay ' + + log_excluded('%s skipping IPv6 blacklist comparison: relay ' + 'has%s IPv6%s, but entry has%s IPv6%s', self._fpr, '' if self.has_ipv6() else ' no', (' (' + ipv6 + ')') if self.has_ipv6() else '', @@ -1187,7 +1195,7 @@ class Candidate(object): time_since_expiry = (end - consensus.valid_until).total_seconds() except Exception, stem_error: end = datetime.datetime.utcnow() - logging.info('Unable to retrieve a consensus from %s: %s', nickname, + log_excluded('Unable to retrieve a consensus from %s: %s', nickname, stem_error) status = 'error: "%s"' % (stem_error) level = logging.WARNING @@ -1431,7 +1439,7 @@ class CandidateList(dict): self.fallbacks.sort(key=lambda f: f._data[data_field]) @staticmethod - def load_relaylist(file_name): + def load_relaylist(file_obj): """ Read each line in the file, and parse it like a FallbackDir line: an IPv4 address and optional port: <IPv4 address>:<port> @@ -1446,8 +1454,9 @@ class CandidateList(dict): (of string -> string key/value pairs), and these dictionaries are placed in an array. comments start with # and are ignored """ + file_data = file_obj['data'] + file_name = file_obj['name'] relaylist = [] - file_data = read_from_file(file_name, MAX_LIST_FILE_SIZE) if file_data is None: return relaylist for line in file_data.split('\n'): @@ -1488,12 +1497,12 @@ class CandidateList(dict): return relaylist # apply the fallback whitelist and blacklist - def apply_filter_lists(self): + def apply_filter_lists(self, whitelist_obj, blacklist_obj): excluded_count = 0 logging.debug('Applying whitelist and blacklist.') # parse the whitelist and blacklist - whitelist = self.load_relaylist(WHITELIST_FILE_NAME) - blacklist = self.load_relaylist(BLACKLIST_FILE_NAME) + whitelist = self.load_relaylist(whitelist_obj) + blacklist = self.load_relaylist(blacklist_obj) filtered_fallbacks = [] for f in self.fallbacks: in_whitelist = f.is_in_whitelist(whitelist) @@ -1513,7 +1522,7 @@ class CandidateList(dict): elif in_blacklist: # exclude excluded_count += 1 - logging.info('Excluding %s: in blacklist.', f._fpr) + log_excluded('Excluding %s: in blacklist.', f._fpr) else: if INCLUDE_UNLISTED_ENTRIES: # include @@ -1521,7 +1530,7 @@ class CandidateList(dict): else: # exclude excluded_count += 1 - logging.info('Excluding %s: in neither blacklist nor whitelist.', + log_excluded('Excluding %s: in neither blacklist nor whitelist.', f._fpr) self.fallbacks = filtered_fallbacks return excluded_count @@ -1557,7 +1566,7 @@ class CandidateList(dict): # the bandwidth we log here is limited by the relay's consensus weight # as well as its adverttised bandwidth. See set_measured_bandwidth # for details - logging.info('%s not a candidate: bandwidth %.1fMByte/s too low, ' + + log_excluded('%s not a candidate: bandwidth %.1fMByte/s too low, ' + 'must be at least %.1fMByte/s', f._fpr, f._data['measured_bandwidth']/(1024.0*1024.0), MIN_BANDWIDTH/(1024.0*1024.0)) @@ -1661,13 +1670,13 @@ class CandidateList(dict): CandidateList.attribute_add(f.ipv6addr, ip_list) elif not CandidateList.attribute_allow(f.dirip, ip_list, MAX_FALLBACKS_PER_IPV4): - logging.info('Eliminated %s: already have %d fallback(s) on IPv4 %s' + log_excluded('Eliminated %s: already have %d fallback(s) on IPv4 %s' %(f._fpr, CandidateList.attribute_count(f.dirip, ip_list), f.dirip)) elif (f.has_ipv6() and not CandidateList.attribute_allow(f.ipv6addr, ip_list, MAX_FALLBACKS_PER_IPV6)): - logging.info('Eliminated %s: already have %d fallback(s) on IPv6 %s' + log_excluded('Eliminated %s: already have %d fallback(s) on IPv6 %s' %(f._fpr, CandidateList.attribute_count(f.ipv6addr, ip_list), f.ipv6addr)) @@ -1691,7 +1700,7 @@ class CandidateList(dict): contact_limit_fallbacks.append(f) CandidateList.attribute_add(f._data['contact'], contact_list) else: - logging.info( + log_excluded( 'Eliminated %s: already have %d fallback(s) on ContactInfo %s' %(f._fpr, CandidateList.attribute_count(f._data['contact'], contact_list), @@ -1720,7 +1729,7 @@ class CandidateList(dict): else: # we already have a fallback with this fallback in its effective # family - logging.info( + log_excluded( 'Eliminated %s: already have %d fallback(s) in effective family' %(f._fpr, CandidateList.attribute_count(f._fpr, fingerprint_list))) original_count = len(self.fallbacks) @@ -2064,9 +2073,44 @@ class CandidateList(dict): s += 'or setting INCLUDE_UNLISTED_ENTRIES = True.' return s +def process_existing(): + logging.basicConfig(level=logging.INFO) + logging.getLogger('stem').setLevel(logging.INFO) + whitelist = {'data': parse_fallback_file(FALLBACK_FILE_NAME), + 'name': FALLBACK_FILE_NAME} + blacklist = {'data': read_from_file(BLACKLIST_FILE_NAME, MAX_LIST_FILE_SIZE), + 'name': BLACKLIST_FILE_NAME} + list_fallbacks(whitelist, blacklist) + +def process_default(): + logging.basicConfig(level=logging.WARNING) + logging.getLogger('stem').setLevel(logging.WARNING) + whitelist = {'data': read_from_file(WHITELIST_FILE_NAME, MAX_LIST_FILE_SIZE), + 'name': WHITELIST_FILE_NAME} + blacklist = {'data': read_from_file(BLACKLIST_FILE_NAME, MAX_LIST_FILE_SIZE), + 'name': BLACKLIST_FILE_NAME} + list_fallbacks(whitelist, blacklist) + ## Main Function +def main(): + if get_command() == 'check_existing': + process_existing() + else: + process_default() + +def get_command(): + if len(sys.argv) == 2: + return sys.argv[1] + else: + return None + +def log_excluded(msg, *args): + if get_command() == 'check_existing': + logging.warning(msg, *args) + else: + logging.info(msg, *args) -def list_fallbacks(): +def list_fallbacks(whitelist, blacklist): """ Fetches required onionoo documents and evaluates the fallback directory criteria for each of the relays """ @@ -2099,7 +2143,7 @@ def list_fallbacks(): # warning that the details have changed from those in the whitelist. # instead, there will be an info-level log during the eligibility check. initial_count = len(candidates.fallbacks) - excluded_count = candidates.apply_filter_lists() + excluded_count = candidates.apply_filter_lists(whitelist, blacklist) print candidates.summarise_filters(initial_count, excluded_count) eligible_count = len(candidates.fallbacks) @@ -2170,4 +2214,4 @@ def list_fallbacks(): print x.fallbackdir_line(candidates.fallbacks, prefilter_fallbacks) if __name__ == "__main__": - list_fallbacks() + main() |