diff --git a/CVE-2020-7921.patch b/CVE-2020-7921.patch deleted file mode 100644 index 8bb520d513c3abd793311e4d6f8156fe1cb7bf09..0000000000000000000000000000000000000000 --- a/CVE-2020-7921.patch +++ /dev/null @@ -1,159 +0,0 @@ -From fb87cc88ecb5d300f14cda7bc238d7d5132118f5 Mon Sep 17 00:00:00 2001 -From: Spencer Jackson -Date: Wed, 15 Jan 2020 16:30:37 +0000 -Subject: [PATCH] SERVER-45472 Ensure RoleGraph can serialize authentication - restrictions to BSON - -(cherry picked from commit 521e56b407ac72bc69a97a24d1253f51a5b6e81b) -(cherry picked from commit a10d0a22d5d009d27664967181042933ec1bef36) ---- - .../auth/authentication_restrictions_role.js | 8 +++++ - src/mongo/db/auth/role_graph.cpp | 9 +++++ - src/mongo/db/auth/role_graph_test.cpp | 36 ++++++++++++++++++- - 3 files changed, 52 insertions(+), 1 deletion(-) - -diff --git a/jstests/auth/authentication_restrictions_role.js b/jstests/auth/authentication_restrictions_role.js -index 3f23cfdcb921..691491a0765c 100644 ---- a/jstests/auth/authentication_restrictions_role.js -+++ b/jstests/auth/authentication_restrictions_role.js -@@ -42,6 +42,14 @@ - assert.commandWorked(admin.runCommand({createRole: "role3", roles: [], privileges: []})); - - print("=== Role creation tests"); -+ print("When a role is updated, it retains authenticationRestrictions"); -+ assert.commandWorked(admin.runCommand({updateRole: "role2", roles: ["root"]})); -+ const role2Info = assert.commandWorked( -+ admin.runCommand({rolesInfo: "role2", showAuthenticationRestrictions: true})); -+ printjson(role2Info); -+ assert.eq(JSON.stringify([[{clientSource: ["127.0.0.1/32"]}]]), -+ JSON.stringify(role2Info.roles[0].authenticationRestrictions)); -+ - print( - "When a client creates roles with empty authenticationRestrictions, the operation succeeds, though it has no effect"); - assert.commandWorked(admin.runCommand( -diff --git a/src/mongo/db/auth/role_graph.cpp b/src/mongo/db/auth/role_graph.cpp -index fc55b6a1d43b..03776361e3ca 100644 ---- a/src/mongo/db/auth/role_graph.cpp -+++ b/src/mongo/db/auth/role_graph.cpp -@@ -590,6 +590,15 @@ Status RoleGraph::getBSONForRole(RoleGraph* graph, - uassertStatusOK(rolesArrayElement.pushBack(roleObj)); - } - -+ // Build authentication restrictions -+ auto restrictions = graph->getDirectAuthenticationRestrictions(roleName); -+ mutablebson::Element authenticationRestrictionsElement = -+ result.getDocument().makeElementArray("authenticationRestrictions"); -+ uassertStatusOK(result.pushBack(authenticationRestrictionsElement)); -+ if (restrictions) { -+ uassertStatusOK(authenticationRestrictionsElement.setValueArray(restrictions->toBSON())); -+ } -+ - return Status::OK(); - } catch (...) { - return exceptionToStatus(); -diff --git a/src/mongo/db/auth/role_graph_test.cpp b/src/mongo/db/auth/role_graph_test.cpp -index 0e22892f4ebf..e763b0929f99 100644 ---- a/src/mongo/db/auth/role_graph_test.cpp -+++ b/src/mongo/db/auth/role_graph_test.cpp -@@ -35,6 +35,7 @@ - #include - - #include "mongo/bson/mutable/document.h" -+#include "mongo/db/auth/address_restriction.h" - #include "mongo/db/auth/role_graph.h" - #include "mongo/unittest/unittest.h" - #include "mongo/util/mongoutils/str.h" -@@ -48,16 +49,21 @@ TEST(RoleParsingTest, BuildRoleBSON) { - RoleName roleA("roleA", "dbA"); - RoleName roleB("roleB", "dbB"); - RoleName roleC("roleC", "dbC"); -+ RoleName roleD("roleD", "dbD"); - ActionSet actions; - actions.addAction(ActionType::find); - actions.addAction(ActionType::insert); -+ SharedRestrictionDocument restrictions = uassertStatusOK(parseAuthenticationRestriction( -+ BSON_ARRAY(BSON("clientSource" << BSON_ARRAY("127.0.0.1"))))); - - ASSERT_OK(graph.createRole(roleA)); - ASSERT_OK(graph.createRole(roleB)); - ASSERT_OK(graph.createRole(roleC)); -+ ASSERT_OK(graph.createRole(roleD)); - - ASSERT_OK(graph.addRoleToRole(roleA, roleC)); - ASSERT_OK(graph.addRoleToRole(roleA, roleB)); -+ ASSERT_OK(graph.addRoleToRole(roleA, roleD)); - ASSERT_OK(graph.addRoleToRole(roleB, roleC)); - - ASSERT_OK(graph.addPrivilegeToRole( -@@ -66,6 +72,7 @@ TEST(RoleParsingTest, BuildRoleBSON) { - roleB, Privilege(ResourcePattern::forExactNamespace(NamespaceString("dbB.foo")), actions))); - ASSERT_OK( - graph.addPrivilegeToRole(roleC, Privilege(ResourcePattern::forClusterResource(), actions))); -+ ASSERT_OK(graph.replaceRestrictionsForRole(roleD, restrictions)); - ASSERT_OK(graph.recomputePrivilegeData()); - - -@@ -78,6 +85,8 @@ TEST(RoleParsingTest, BuildRoleBSON) { - ASSERT_EQUALS("roleA", roleDoc["role"].String()); - ASSERT_EQUALS("dbA", roleDoc["db"].String()); - -+ ASSERT_TRUE(roleDoc["authenticationRestrictions"].Array().empty()); -+ - std::vector privs = roleDoc["privileges"].Array(); - ASSERT_EQUALS(1U, privs.size()); - ASSERT_EQUALS("", privs[0].Obj()["resource"].Obj()["db"].String()); -@@ -89,7 +98,7 @@ TEST(RoleParsingTest, BuildRoleBSON) { - ASSERT_EQUALS("insert", actionElements[1].String()); - - std::vector roles = roleDoc["roles"].Array(); -- ASSERT_EQUALS(2U, roles.size()); -+ ASSERT_EQUALS(3U, roles.size()); - ASSERT_EQUALS("roleC", roles[0].Obj()["role"].String()); - ASSERT_EQUALS("dbC", roles[0].Obj()["db"].String()); - ASSERT_EQUALS("roleB", roles[1].Obj()["role"].String()); -@@ -104,6 +113,8 @@ TEST(RoleParsingTest, BuildRoleBSON) { - ASSERT_EQUALS("roleB", roleDoc["role"].String()); - ASSERT_EQUALS("dbB", roleDoc["db"].String()); - -+ ASSERT_TRUE(roleDoc["authenticationRestrictions"].Array().empty()); -+ - privs = roleDoc["privileges"].Array(); - ASSERT_EQUALS(1U, privs.size()); - ASSERT_EQUALS("dbB", privs[0].Obj()["resource"].Obj()["db"].String()); -@@ -128,6 +139,8 @@ TEST(RoleParsingTest, BuildRoleBSON) { - ASSERT_EQUALS("roleC", roleDoc["role"].String()); - ASSERT_EQUALS("dbC", roleDoc["db"].String()); - -+ ASSERT_TRUE(roleDoc["authenticationRestrictions"].Array().empty()); -+ - privs = roleDoc["privileges"].Array(); - ASSERT_EQUALS(1U, privs.size()); - ASSERT(privs[0].Obj()["resource"].Obj()["cluster"].Bool()); -@@ -140,6 +153,27 @@ TEST(RoleParsingTest, BuildRoleBSON) { - - roles = roleDoc["roles"].Array(); - ASSERT_EQUALS(0U, roles.size()); -+ -+ // Role D -+ doc.reset(); -+ ASSERT_OK(RoleGraph::getBSONForRole(&graph, roleD, doc.root())); -+ roleDoc = doc.getObject(); -+ -+ ASSERT_EQUALS("dbD.roleD", roleDoc["_id"].String()); -+ ASSERT_EQUALS("roleD", roleDoc["role"].String()); -+ ASSERT_EQUALS("dbD", roleDoc["db"].String()); -+ -+ ASSERT_FALSE(roleDoc["authenticationRestrictions"].Array().empty()); -+ auto restrictionObj = BSONArray(roleDoc["authenticationRestrictions"].Obj()); -+ SharedRestrictionDocument parsedRestrictions = -+ uassertStatusOK(parseAuthenticationRestriction(restrictionObj)); -+ ASSERT_EQ(restrictions->toString(), parsedRestrictions->toString()); -+ -+ privs = roleDoc["privileges"].Array(); -+ ASSERT_TRUE(privs.empty()); -+ -+ roles = roleDoc["roles"].Array(); -+ ASSERT_EQUALS(0U, roles.size()); - } - - // Tests adding and removing roles from other roles, the RoleNameIterator, and the diff --git a/inconsistent-tabs.patch b/inconsistent-tabs.patch deleted file mode 100644 index 2d86825814ef0ee93d2dab21b19da1f24d00e72b..0000000000000000000000000000000000000000 --- a/inconsistent-tabs.patch +++ /dev/null @@ -1,12 +0,0 @@ -diff -uprN mongo-r4.0.1_raw/SConstruct mongo-r4.0.1/SConstruct ---- mongo-r4.0.1_raw/SConstruct 2018-07-28 02:14:20.000000000 +0800 -+++ mongo-r4.0.1/SConstruct 2020-01-10 17:33:08.800735290 +0800 -@@ -3320,7 +3320,7 @@ def doConfigure(myenv): - - outputIndex = next((idx for idx in [0,1] if conf.CheckAltivecVbpermqOutput(idx)), None) - if outputIndex is not None: -- conf.env.SetConfigHeaderDefine("MONGO_CONFIG_ALTIVEC_VEC_VBPERMQ_OUTPUT_INDEX", outputIndex) -+ conf.env.SetConfigHeaderDefine("MONGO_CONFIG_ALTIVEC_VEC_VBPERMQ_OUTPUT_INDEX", outputIndex) - else: - myenv.ConfError("Running on ppc64le, but can't find a correct vec_vbpermq output index. Compiler or platform not supported") - diff --git a/mongodb.spec b/mongodb.spec index 1efa8610482e02dc1d276a5c1edd6c77f82511a7..fbd562696b7f181c1b0470b62c21800eeecadd4c 100644 --- a/mongodb.spec +++ b/mongodb.spec @@ -1,20 +1,18 @@ %global __python %{__python3} Name: mongodb -Version: 4.0.1 -Release: 4 +Version: 4.5.0 +Release: 1 Summary: The global cloud database service for modern applications License: AGPLv3 and zlib and ASL 2.0 URL: http://www.mongodb.org Source0: https://github.com/mongodb/mongo/archive/r%{version}.tar.gz Source1: mongod.conf Source2: mongod.service -Patch0000: inconsistent-tabs.patch -Patch0001: python3-buildscripts-tests.patch -Patch6000: CVE-2020-7921.patch BuildRequires: gcc-c++ >= 5.3.0 boost-devel >= 1.56 gperftools-devel libpcap-devel libstemmer-devel BuildRequires: openssl-devel pcre-devel python3-scons snappy-devel yaml-cpp-devel zlib-devel systemd BuildRequires: valgrind-devel libcurl-devel python3-devel python3-yaml python3-requests python3-cheetah +BuildRequires: python3-psutil ExclusiveArch: x86_64 aarch64 %description @@ -118,7 +116,7 @@ if test $1 -ge 1; then fi %files -%doc GNU-AGPL-3.0.txt APACHE-2.0.txt +%doc APACHE-2.0.txt %{_bindir}/{mongo,mongobridge} %files server @@ -139,6 +137,9 @@ fi %{_mandir}/man1/{mongo.1*,mongod.1*,mongos.1*} %changelog +* Fri Jun 5 2020 Captain Wei - 4.5.0-1 +- update package version + * Thu May 21 2020 yaokai13 - 4.0.1-4 - Type: cves - ID: CVE-2020-7921 diff --git a/python3-buildscripts-tests.patch b/python3-buildscripts-tests.patch deleted file mode 100644 index 17eeeb44dec8e3ba1dde577e8ec647dc06a2a29b..0000000000000000000000000000000000000000 --- a/python3-buildscripts-tests.patch +++ /dev/null @@ -1,1676 +0,0 @@ -diff --git a/SConstruct b/SConstruct -index b76f4876eb..1f6870284c 100644 ---- a/SConstruct -+++ b/SConstruct -@@ -425,7 +425,7 @@ win_version_min_choices = { - } - - add_option('win-version-min', -- choices=win_version_min_choices.keys(), -+ choices=list(win_version_min_choices.keys()), - default=None, - help='minimum Windows version to support', - type='choice', -@@ -551,7 +551,7 @@ except ValueError as e: - def variable_shlex_converter(val): - # If the argument is something other than a string, propogate - # it literally. -- if not isinstance(val, basestring): -+ if not isinstance(val, str): - return val - parse_mode = get_option('variable-parse-mode') - if parse_mode == 'auto': -@@ -887,7 +887,7 @@ SConsignFile(str(sconsDataDir.File('sconsign'))) - def printLocalInfo(): - import sys, SCons - print( "scons version: " + SCons.__version__ ) -- print( "python version: " + " ".join( [ `i` for i in sys.version_info ] ) ) -+ print( "python version: " + " ".join( [ str(i) for i in sys.version_info ] ) ) - - printLocalInfo() - -@@ -2029,7 +2029,7 @@ def doConfigure(myenv): - # to make them real errors. - cloned.Append(CCFLAGS=['-Werror']) - conf = Configure(cloned, help=False, custom_tests = { -- 'CheckFlag' : lambda(ctx) : CheckFlagTest(ctx, tool, extension, flag) -+ 'CheckFlag' : lambda ctx : CheckFlagTest(ctx, tool, extension, flag) - }) - available = conf.CheckFlag() - conf.Finish() -@@ -2503,7 +2503,7 @@ def doConfigure(myenv): - # Select those unique black files that are associated with the - # currently enabled sanitizers, but filter out those that are - # zero length. -- blackfiles = {v for (k, v) in blackfiles_map.iteritems() if k in sanitizer_list} -+ blackfiles = {v for (k, v) in blackfiles_map.items() if k in sanitizer_list} - blackfiles = [f for f in blackfiles if os.stat(f.path).st_size != 0] - - # Filter out any blacklist options that the toolchain doesn't support. -diff --git a/buildscripts/clang_format.py b/buildscripts/clang_format.py -index af3a53d29f..564f7a4171 100755 ---- a/buildscripts/clang_format.py -+++ b/buildscripts/clang_format.py -@@ -20,7 +20,7 @@ import sys - import tarfile - import tempfile - import threading --import urllib2 -+import urllib.request, urllib.error, urllib.parse - from distutils import spawn # pylint: disable=no-name-in-module - from optparse import OptionParser - from multiprocessing import cpu_count -@@ -96,11 +96,11 @@ def get_clang_format_from_cache_and_extract(url, tarball_ext): - num_tries = 5 - for attempt in range(num_tries): - try: -- resp = urllib2.urlopen(url) -+ resp = urllib.request.urlopen(url) - with open(temp_tar_file, 'wb') as fh: - fh.write(resp.read()) - break -- except urllib2.URLError: -+ except urllib.error.URLError: - if attempt == num_tries - 1: - raise - continue -diff --git a/buildscripts/cpplint.py b/buildscripts/cpplint.py -index 6979cbcd4e..bc9ff038fd 100755 ---- a/buildscripts/cpplint.py -+++ b/buildscripts/cpplint.py -@@ -835,7 +835,7 @@ class _CppLintState(object): - - def PrintErrorCounts(self): - """Print a summary of errors by category, and the total.""" -- for category, count in self.errors_by_category.iteritems(): -+ for category, count in self.errors_by_category.items(): - sys.stderr.write('Category \'%s\' errors found: %d\n' % - (category, count)) - sys.stderr.write('Total errors found: %d\n' % self.error_count) -@@ -1388,7 +1388,7 @@ def FindEndOfExpressionInLine(line, startpos, stack): - On finding an unclosed expression: (-1, None) - Otherwise: (-1, new stack at end of this line) - """ -- for i in xrange(startpos, len(line)): -+ for i in range(startpos, len(line)): - char = line[i] - if char in '([{': - # Found start of parenthesized expression, push to expression stack -@@ -1681,7 +1681,7 @@ def CheckForCopyright(filename, lines, error): - - # We'll say it should occur by line 10. Don't forget there's a - # dummy line at the front. -- for line in xrange(1, min(len(lines), 11)): -+ for line in range(1, min(len(lines), 11)): - if re.search(r'Copyright', lines[line], re.I): break - else: # means no copyright line was found - error(filename, 0, 'legal/copyright', 5, -@@ -1832,7 +1832,7 @@ def CheckForBadCharacters(filename, lines, error): - error: The function to call with any errors found. - """ - for linenum, line in enumerate(lines): -- if u'\ufffd' in line: -+ if '\ufffd' in line: - error(filename, linenum, 'readability/utf8', 5, - 'Line contains invalid UTF-8 (or Unicode replacement character).') - if '\0' in line: -@@ -2878,7 +2878,7 @@ def CheckForFunctionLengths(filename, clean_lines, linenum, - - if starting_func: - body_found = False -- for start_linenum in xrange(linenum, clean_lines.NumLines()): -+ for start_linenum in range(linenum, clean_lines.NumLines()): - start_line = lines[start_linenum] - joined_line += ' ' + start_line.lstrip() - if Search(r'(;|})', start_line): # Declarations and trivial functions -@@ -3355,7 +3355,7 @@ def CheckBracesSpacing(filename, clean_lines, linenum, error): - trailing_text = '' - if endpos > -1: - trailing_text = endline[endpos:] -- for offset in xrange(endlinenum + 1, -+ for offset in range(endlinenum + 1, - min(endlinenum + 3, clean_lines.NumLines() - 1)): - trailing_text += clean_lines.elided[offset] - if not Match(r'^[\s}]*[{.;,)<>\]:]', trailing_text): -@@ -3524,7 +3524,7 @@ def IsRValueType(clean_lines, nesting_state, linenum, column): - - # Look for the previous 'for(' in the previous lines. - before_text = match_symbol.group(1) -- for i in xrange(start - 1, max(start - 6, 0), -1): -+ for i in range(start - 1, max(start - 6, 0), -1): - before_text = clean_lines.elided[i] + before_text - if Search(r'for\s*\([^{};]*$', before_text): - # This is the condition inside a for-loop -@@ -3651,12 +3651,12 @@ def IsRValueAllowed(clean_lines, linenum): - True if line is within the region where RValue references are allowed. - """ - # Allow region marked by PUSH/POP macros -- for i in xrange(linenum, 0, -1): -+ for i in range(linenum, 0, -1): - line = clean_lines.elided[i] - if Match(r'GOOGLE_ALLOW_RVALUE_REFERENCES_(?:PUSH|POP)', line): - if not line.endswith('PUSH'): - return False -- for j in xrange(linenum, clean_lines.NumLines(), 1): -+ for j in range(linenum, clean_lines.NumLines(), 1): - line = clean_lines.elided[j] - if Match(r'GOOGLE_ALLOW_RVALUE_REFERENCES_(?:PUSH|POP)', line): - return line.endswith('POP') -@@ -4136,7 +4136,7 @@ def CheckCheck(filename, clean_lines, linenum, error): - expression = lines[linenum][start_pos + 1:end_pos - 1] - else: - expression = lines[linenum][start_pos + 1:] -- for i in xrange(linenum + 1, end_line): -+ for i in range(linenum + 1, end_line): - expression += lines[i] - expression += last_line[0:end_pos - 1] - -@@ -4264,7 +4264,7 @@ def GetLineWidth(line): - The width of the line in column positions, accounting for Unicode - combining characters and wide characters. - """ -- if isinstance(line, unicode): -+ if isinstance(line, str): - width = 0 - for uc in unicodedata.normalize('NFC', line): - if unicodedata.east_asian_width(uc) in ('W', 'F'): -@@ -4617,7 +4617,7 @@ def _GetTextInside(text, start_pattern): - - # Give opening punctuations to get the matching close-punctuations. - matching_punctuation = {'(': ')', '{': '}', '[': ']'} -- closing_punctuation = set(matching_punctuation.itervalues()) -+ closing_punctuation = set(matching_punctuation.values()) - - # Find the position to start extracting text. - match = re.search(start_pattern, text, re.M) -@@ -4943,7 +4943,7 @@ def IsDerivedFunction(clean_lines, linenum): - virt-specifier. - """ - # Scan back a few lines for start of current function -- for i in xrange(linenum, max(-1, linenum - 10), -1): -+ for i in range(linenum, max(-1, linenum - 10), -1): - match = Match(r'^([^()]*\w+)\(', clean_lines.elided[i]) - if match: - # Look for "override" after the matching closing parenthesis -@@ -4964,7 +4964,7 @@ def IsInitializerList(clean_lines, linenum): - True if current line appears to be inside constructor initializer - list, False otherwise. - """ -- for i in xrange(linenum, 1, -1): -+ for i in range(linenum, 1, -1): - line = clean_lines.elided[i] - if i == linenum: - remove_function_body = Match(r'^(.*)\{\s*$', line) -@@ -5060,7 +5060,7 @@ def CheckForNonConstReference(filename, clean_lines, linenum, - # Found the matching < on an earlier line, collect all - # pieces up to current line. - line = '' -- for i in xrange(startline, linenum + 1): -+ for i in range(startline, linenum + 1): - line += clean_lines.elided[i].strip() - - # Check for non-const references in function parameters. A single '&' may -@@ -5084,7 +5084,7 @@ def CheckForNonConstReference(filename, clean_lines, linenum, - # appear inside the second set of parentheses on the current line as - # opposed to the first set. - if linenum > 0: -- for i in xrange(linenum - 1, max(0, linenum - 10), -1): -+ for i in range(linenum - 1, max(0, linenum - 10), -1): - previous_line = clean_lines.elided[i] - if not Search(r'[),]\s*$', previous_line): - break -@@ -5115,7 +5115,7 @@ def CheckForNonConstReference(filename, clean_lines, linenum, - # Don't see a whitelisted function on this line. Actually we - # didn't see any function name on this line, so this is likely a - # multi-line parameter list. Try a bit harder to catch this case. -- for i in xrange(2): -+ for i in range(2): - if (linenum > i and - Search(whitelisted_functions, clean_lines.elided[linenum - i - 1])): - return -@@ -5277,7 +5277,7 @@ def CheckCStyleCast(filename, clean_lines, linenum, cast_type, pattern, error): - # Try expanding current context to see if we one level of - # parentheses inside a macro. - if linenum > 0: -- for i in xrange(linenum - 1, max(0, linenum - 5), -1): -+ for i in range(linenum - 1, max(0, linenum - 5), -1): - context = clean_lines.elided[i] + context - if Match(r'.*\b[_A-Z][_A-Z0-9]*\s*\((?:\([^()]*\)|[^()])*$', context): - return False -@@ -5534,7 +5534,7 @@ def CheckForIncludeWhatYouUse(filename, clean_lines, include_state, error, - required = {} # A map of header name to linenumber and the template entity. - # Example of required: { '': (1219, 'less<>') } - -- for linenum in xrange(clean_lines.NumLines()): -+ for linenum in range(clean_lines.NumLines()): - line = clean_lines.elided[linenum] - if not line or line[0] == '#': - continue -@@ -5583,7 +5583,7 @@ def CheckForIncludeWhatYouUse(filename, clean_lines, include_state, error, - - # include_dict is modified during iteration, so we iterate over a copy of - # the keys. -- header_keys = include_dict.keys() -+ header_keys = list(include_dict.keys()) - for header in header_keys: - (same_module, common_path) = FilesBelongToSameModule(abs_filename, header) - fullpath = common_path + header -@@ -5678,7 +5678,7 @@ def CheckRedundantVirtual(filename, clean_lines, linenum, error): - end_col = -1 - end_line = -1 - start_col = len(virtual.group(1)) -- for start_line in xrange(linenum, min(linenum + 3, clean_lines.NumLines())): -+ for start_line in range(linenum, min(linenum + 3, clean_lines.NumLines())): - line = clean_lines.elided[start_line][start_col:] - parameter_list = Match(r'^([^(]*)\(', line) - if parameter_list: -@@ -5693,7 +5693,7 @@ def CheckRedundantVirtual(filename, clean_lines, linenum, error): - - # Look for "override" or "final" after the parameter list - # (possibly on the next few lines). -- for i in xrange(end_line, min(end_line + 3, clean_lines.NumLines())): -+ for i in range(end_line, min(end_line + 3, clean_lines.NumLines())): - line = clean_lines.elided[i][end_col:] - match = Search(r'\b(override|final)\b', line) - if match: -@@ -5920,7 +5920,7 @@ def ProcessFileData(filename, file_extension, lines, error, - - RemoveMultiLineComments(filename, lines, error) - clean_lines = CleansedLines(lines) -- for line in xrange(clean_lines.NumLines()): -+ for line in range(clean_lines.NumLines()): - ProcessLine(filename, file_extension, clean_lines, line, - include_state, function_state, nesting_state, error, - extra_check_functions) -diff --git a/buildscripts/errorcodes.py b/buildscripts/errorcodes.py -index f9162917db..60cdb2b50f 100755 ---- a/buildscripts/errorcodes.py -+++ b/buildscripts/errorcodes.py -@@ -7,12 +7,15 @@ Optionally replaces zero codes in source code with new distinct values. - - from __future__ import absolute_import - from __future__ import print_function -+from __future__ import unicode_literals - -+import io - import bisect - import os.path - import sys - from collections import defaultdict, namedtuple - from optparse import OptionParser -+from functools import reduce - - # Get relative imports to work when the package is not installed on the PYTHONPATH. - if __name__ == "__main__" and __package__ is None: -@@ -56,7 +59,7 @@ def parse_source_files(callback): - if list_files: - print('scanning file: ' + source_file) - -- with open(source_file) as fh: -+ with open(source_file, encoding="utf-8") as fh: - text = fh.read() - - if not any([zz in text for zz in quick]): -diff --git a/buildscripts/eslint.py b/buildscripts/eslint.py -index bb36531164..eb20dda9dd 100755 ---- a/buildscripts/eslint.py -+++ b/buildscripts/eslint.py -@@ -20,7 +20,7 @@ import sys - import tarfile - import tempfile - import threading --import urllib -+import urllib.request, urllib.parse, urllib.error - from distutils import spawn # pylint: disable=no-name-in-module - from optparse import OptionParser - -@@ -84,7 +84,7 @@ def get_eslint_from_cache(dest_file, platform, arch): - - # Download the file - print("Downloading ESLint %s from %s, saving to %s" % (ESLINT_VERSION, url, temp_tar_file)) -- urllib.urlretrieve(url, temp_tar_file) -+ urllib.request.urlretrieve(url, temp_tar_file) - - eslint_distfile = ESLINT_SOURCE_TAR_BASE.substitute(platform=platform, arch=arch) - extract_eslint(temp_tar_file, eslint_distfile) -diff --git a/buildscripts/idl/idl/binder.py b/buildscripts/idl/idl/binder.py -index bf8c188151..eb98466d90 100644 ---- a/buildscripts/idl/idl/binder.py -+++ b/buildscripts/idl/idl/binder.py -@@ -727,7 +727,7 @@ def _validate_enum_int(ctxt, idl_enum): - min_value = min(int_values_set) - max_value = max(int_values_set) - -- valid_int = {x for x in xrange(min_value, max_value + 1)} -+ valid_int = {x for x in range(min_value, max_value + 1)} - - if valid_int != int_values_set: - ctxt.add_enum_non_continuous_range_error(idl_enum, idl_enum.name) -diff --git a/buildscripts/idl/idl/bson.py b/buildscripts/idl/idl/bson.py -index 21fb8961f5..10df6ed4c4 100644 ---- a/buildscripts/idl/idl/bson.py -+++ b/buildscripts/idl/idl/bson.py -@@ -87,7 +87,7 @@ def cpp_bson_type_name(name): - def list_valid_types(): - # type: () -> List[unicode] - """Return a list of supported bson types.""" -- return [a for a in _BSON_TYPE_INFORMATION.iterkeys()] -+ return [a for a in _BSON_TYPE_INFORMATION.keys()] - - - def is_valid_bindata_subtype(name): -diff --git a/buildscripts/idl/idl/cpp_types.py b/buildscripts/idl/idl/cpp_types.py -index d275872ca5..81033a251c 100644 ---- a/buildscripts/idl/idl/cpp_types.py -+++ b/buildscripts/idl/idl/cpp_types.py -@@ -28,6 +28,7 @@ from . import writer - - _STD_ARRAY_UINT8_16 = 'std::array' - -+ABC = ABCMeta(str('ABC'), (object,), {'__slots__': ()}) - - def is_primitive_scalar_type(cpp_type): - # type: (unicode) -> bool -@@ -75,11 +76,9 @@ def _qualify_array_type(cpp_type): - return "std::vector<%s>" % (cpp_type) - - --class CppTypeBase(object): -+class CppTypeBase(ABC): - """Base type for C++ Type information.""" - -- __metaclass__ = ABCMeta -- - def __init__(self, field): - # type: (ast.Field) -> None - """Construct a CppTypeBase.""" -@@ -521,11 +520,9 @@ def get_cpp_type(field): - return cpp_type_info - - --class BsonCppTypeBase(object): -+class BsonCppTypeBase(ABC): - """Base type for custom C++ support for BSON Types information.""" - -- __metaclass__ = ABCMeta -- - def __init__(self, field): - # type: (ast.Field) -> None - """Construct a BsonCppTypeBase.""" -diff --git a/buildscripts/idl/idl/enum_types.py b/buildscripts/idl/idl/enum_types.py -index 9435136451..727990bef6 100644 ---- a/buildscripts/idl/idl/enum_types.py -+++ b/buildscripts/idl/idl/enum_types.py -@@ -29,11 +29,11 @@ from . import common - from . import syntax - from . import writer - -+ABC = ABCMeta(str('ABC'), (object,), {'__slots__': ()}) - --class EnumTypeInfoBase(object): -- """Base type for enumeration type information.""" - -- __metaclass__ = ABCMeta -+class EnumTypeInfoBase(ABC): -+ """Base type for enumeration type information.""" - - def __init__(self, idl_enum): - # type: (Union[syntax.Enum,ast.Enum]) -> None -@@ -115,8 +115,6 @@ class EnumTypeInfoBase(object): - class _EnumTypeInt(EnumTypeInfoBase): - """Type information for integer enumerations.""" - -- __metaclass__ = ABCMeta -- - def get_cpp_type_name(self): - # type: () -> unicode - return common.title_case(self._enum.name) -@@ -190,8 +188,6 @@ def _get_constant_enum_name(idl_enum, enum_value): - class _EnumTypeString(EnumTypeInfoBase): - """Type information for string enumerations.""" - -- __metaclass__ = ABCMeta -- - def get_cpp_type_name(self): - # type: () -> unicode - return common.template_args("${enum_name}Enum", enum_name=common.title_case( -diff --git a/buildscripts/idl/idl/generator.py b/buildscripts/idl/idl/generator.py -index 82c22b45b8..4e099e9ffc 100644 ---- a/buildscripts/idl/idl/generator.py -+++ b/buildscripts/idl/idl/generator.py -@@ -33,6 +33,7 @@ from . import enum_types - from . import struct_types - from . import writer - -+ABC = ABCMeta(str('ABC'), (object,), {'__slots__': ()}) - - def _get_field_member_name(field): - # type: (ast.Field) -> unicode -@@ -122,11 +123,9 @@ def _get_all_fields(struct): - return sorted([field for field in all_fields], key=lambda f: f.cpp_name) - - --class _FieldUsageCheckerBase(object): -+class _FieldUsageCheckerBase(ABC): - """Check for duplicate fields, and required fields as needed.""" - -- __metaclass__ = ABCMeta -- - def __init__(self, indented_writer): - # type: (writer.IndentedTextWriter) -> None - """Create a field usage checker.""" -@@ -1588,8 +1587,8 @@ def _generate_header(spec, file_name): - str_value = generate_header_str(spec) - - # Generate structs -- with io.open(file_name, mode='wb') as file_handle: -- file_handle.write(str_value.encode()) -+ with io.open(file_name, mode='w') as file_handle: -+ file_handle.write(str_value) - - - def generate_source_str(spec, target_arch, header_file_name): -@@ -1611,8 +1610,8 @@ def _generate_source(spec, target_arch, file_name, header_file_name): - str_value = generate_source_str(spec, target_arch, header_file_name) - - # Generate structs -- with io.open(file_name, mode='wb') as file_handle: -- file_handle.write(str_value.encode()) -+ with io.open(file_name, mode='w') as file_handle: -+ file_handle.write(str_value) - - - def generate_code(spec, target_arch, output_base_dir, header_file_name, source_file_name): -diff --git a/buildscripts/idl/idl/parser.py b/buildscripts/idl/idl/parser.py -index 052f9a2178..4cf09295c4 100644 ---- a/buildscripts/idl/idl/parser.py -+++ b/buildscripts/idl/idl/parser.py -@@ -31,6 +31,7 @@ from . import cpp_types - from . import errors - from . import syntax - -+ABC = ABCMeta(str('ABC'), (object,), {'__slots__': ()}) - - class _RuleDesc(object): - """ -@@ -548,11 +549,9 @@ def _parse(stream, error_file_name): - return syntax.IDLParsedSpec(spec, None) - - --class ImportResolverBase(object): -+class ImportResolverBase(ABC): - """Base class for resolving imported files.""" - -- __metaclass__ = ABCMeta -- - def __init__(self): - # type: () -> None - """Construct a ImportResolver.""" -diff --git a/buildscripts/idl/idl/struct_types.py b/buildscripts/idl/idl/struct_types.py -index 8e055fe8c4..b57542bb44 100644 ---- a/buildscripts/idl/idl/struct_types.py -+++ b/buildscripts/idl/idl/struct_types.py -@@ -24,6 +24,7 @@ from . import common - from . import cpp_types - from . import writer - -+ABC = ABCMeta(str('ABC'), (object,), {'__slots__': ()}) - - class ArgumentInfo(object): - """Class that encapsulates information about an argument to a method.""" -@@ -114,11 +115,9 @@ class MethodInfo(object): - args=args) - - --class StructTypeInfoBase(object): -+class StructTypeInfoBase(ABC): - """Base class for struct and command code generation.""" - -- __metaclass__ = ABCMeta -- - @abstractmethod - def get_constructor_method(self): - # type: () -> MethodInfo -diff --git a/buildscripts/idl/idl/syntax.py b/buildscripts/idl/idl/syntax.py -index 049114b5d9..76643e68a1 100644 ---- a/buildscripts/idl/idl/syntax.py -+++ b/buildscripts/idl/idl/syntax.py -@@ -82,7 +82,7 @@ def _item_and_type(dic): - # type: (Dict[Any, List[Any]]) -> Iterator[Tuple[Any, Any]] - """Return an Iterator of (key, value) pairs from a dictionary.""" - return itertools.chain.from_iterable( -- (_zip_scalar(value, key) for (key, value) in dic.viewitems())) -+ (_zip_scalar(value, key) for (key, value) in dic.items())) - - - class SymbolTable(object): -diff --git a/buildscripts/idl/tests/test_binder.py b/buildscripts/idl/tests/test_binder.py -index d8b6fc1630..1dd5afde4d 100644 ---- a/buildscripts/idl/tests/test_binder.py -+++ b/buildscripts/idl/tests/test_binder.py -@@ -74,7 +74,7 @@ class TestBinder(testcase.IDLTestcase): - cpp_includes: - - 'bar' - - 'foo'""")) -- self.assertEquals(spec.globals.cpp_namespace, "something") -+ self.assertEqual(spec.globals.cpp_namespace, "something") - self.assertListEqual(spec.globals.cpp_includes, ['bar', 'foo']) - - def test_type_positive(self): -diff --git a/buildscripts/linter/base.py b/buildscripts/linter/base.py -index f22f59e4f0..09931a673b 100644 ---- a/buildscripts/linter/base.py -+++ b/buildscripts/linter/base.py -@@ -5,12 +5,11 @@ from __future__ import print_function - from abc import ABCMeta, abstractmethod - from typing import Dict, List, Optional - -+ABC = ABCMeta(str('ABC'), (object,), {'__slots__': ()}) - --class LinterBase(object): -+class LinterBase(ABC): - """Base Class for all linters.""" - -- __metaclass__ = ABCMeta -- - def __init__(self, cmd_name, required_version, cmd_location=None): - # type: (str, str, Optional[str]) -> None - """ -diff --git a/buildscripts/linter/git.py b/buildscripts/linter/git.py -index b4a6898604..d803e1b584 100644 ---- a/buildscripts/linter/git.py -+++ b/buildscripts/linter/git.py -@@ -175,7 +175,7 @@ def get_files_to_check_from_patch(patches, filter_function): - - lines = [] # type: List[str] - for patch in patches: -- with open(patch, "rb") as infile: -+ with open(patch, "r") as infile: - lines += infile.readlines() - - candidates = [check.match(line).group(1) for line in lines if check.match(line)] -diff --git a/buildscripts/linter/parallel.py b/buildscripts/linter/parallel.py -index 0648bfb16e..361da0c559 100644 ---- a/buildscripts/linter/parallel.py -+++ b/buildscripts/linter/parallel.py -@@ -2,7 +2,12 @@ - from __future__ import absolute_import - from __future__ import print_function - --import Queue -+try: -+ import queue -+except ImportError: -+ #Python 2 -+ import Queue as queue -+ - import threading - import time - from multiprocessing import cpu_count -@@ -17,7 +22,7 @@ def parallel_process(items, func): - except NotImplementedError: - cpus = 1 - -- task_queue = Queue.Queue() # type: Queue.Queue -+ task_queue = queue.Queue() # type: queue.Queue - - # Use a list so that worker function will capture this variable - pp_event = threading.Event() -@@ -30,7 +35,7 @@ def parallel_process(items, func): - while not pp_event.is_set(): - try: - item = task_queue.get_nowait() -- except Queue.Empty: -+ except queue.Empty: - # if the queue is empty, exit the worker thread - pp_event.set() - return -diff --git a/buildscripts/resmokeconfig/loggers/__init__.py b/buildscripts/resmokeconfig/loggers/__init__.py -index 5342639c56..7444783cb6 100644 ---- a/buildscripts/resmokeconfig/loggers/__init__.py -+++ b/buildscripts/resmokeconfig/loggers/__init__.py -@@ -16,7 +16,7 @@ def _get_named_loggers(): - named_loggers = {} - - try: -- (root, _dirs, files) = os.walk(dirname).next() -+ (root, _dirs, files) = next(os.walk(dirname)) - for filename in files: - (short_name, ext) = os.path.splitext(filename) - if ext in (".yml", ".yaml"): -diff --git a/buildscripts/resmokeconfig/suites/__init__.py b/buildscripts/resmokeconfig/suites/__init__.py -index 87d378616b..4321f53658 100644 ---- a/buildscripts/resmokeconfig/suites/__init__.py -+++ b/buildscripts/resmokeconfig/suites/__init__.py -@@ -16,7 +16,7 @@ def _get_named_suites(): - named_suites = {} - - try: -- (root, _dirs, files) = os.walk(dirname).next() -+ (root, _dirs, files) = next(os.walk(dirname)) - for filename in files: - (short_name, ext) = os.path.splitext(filename) - if ext in (".yml", ".yaml"): -diff --git a/buildscripts/resmokelib/config.py b/buildscripts/resmokelib/config.py -index 66753c389d..4fe50a8176 100644 ---- a/buildscripts/resmokelib/config.py -+++ b/buildscripts/resmokelib/config.py -@@ -62,7 +62,7 @@ DEFAULTS = { - "repeat": 1, - "report_failure_status": "fail", - "report_file": None, -- "seed": long(time.time() * 256), # Taken from random.py code in Python 2.7. -+ "seed": int(time.time() * 256), # Taken from random.py code in Python 2.7. - "service_executor": None, - "shell_conn_string": None, - "shell_port": None, -diff --git a/buildscripts/resmokelib/core/process.py b/buildscripts/resmokelib/core/process.py -index 84c067d8e3..956c4f9e42 100644 ---- a/buildscripts/resmokelib/core/process.py -+++ b/buildscripts/resmokelib/core/process.py -@@ -182,8 +182,8 @@ class Process(object): - finally: - win32api.CloseHandle(mongo_signal_handle) - -- print "Failed to cleanly exit the program, calling TerminateProcess() on PID: " +\ -- str(self._process.pid) -+ print("Failed to cleanly exit the program, calling TerminateProcess() on PID: " +\ -+ str(self._process.pid)) - - # Adapted from implementation of Popen.terminate() in subprocess.py of Python 2.7 - # because earlier versions do not catch exceptions. -diff --git a/buildscripts/resmokelib/logging/buildlogger.py b/buildscripts/resmokelib/logging/buildlogger.py -index 2e48101d51..7d44980ad8 100644 ---- a/buildscripts/resmokelib/logging/buildlogger.py -+++ b/buildscripts/resmokelib/logging/buildlogger.py -@@ -261,7 +261,7 @@ class BuildloggerServer(object): - """Initialize BuildloggerServer.""" - tmp_globals = {} - self.config = {} -- execfile(_BUILDLOGGER_CONFIG, tmp_globals, self.config) -+ exec(compile(open(_BUILDLOGGER_CONFIG).read(), _BUILDLOGGER_CONFIG, 'exec'), tmp_globals, self.config) - - # Rename "slavename" to "username" if present. - if "slavename" in self.config and "username" not in self.config: -diff --git a/buildscripts/resmokelib/selector.py b/buildscripts/resmokelib/selector.py -index d878710f1d..5bf1a0881d 100644 ---- a/buildscripts/resmokelib/selector.py -+++ b/buildscripts/resmokelib/selector.py -@@ -71,7 +71,7 @@ class TestFileExplorer(object): - A list of paths as a list(str). - """ - tests = [] -- with open(root_file_path, "rb") as filep: -+ with open(root_file_path, "r") as filep: - for test_path in filep: - test_path = test_path.strip() - tests.append(test_path) -@@ -310,7 +310,7 @@ def make_expression(conf): - elif isinstance(conf, dict): - if len(conf) != 1: - raise ValueError("Tag matching expressions should only contain one key") -- key = conf.keys()[0] -+ key = next(iter(conf.keys())) - value = conf[key] - if key == "$allOf": - return _AllOfExpression(_make_expression_list(value)) -diff --git a/buildscripts/resmokelib/testing/executor.py b/buildscripts/resmokelib/testing/executor.py -index 79ccb17786..fbd0a71919 100644 ---- a/buildscripts/resmokelib/testing/executor.py -+++ b/buildscripts/resmokelib/testing/executor.py -@@ -62,7 +62,7 @@ class TestSuiteExecutor(object): # pylint: disable=too-many-instance-attributes - jobs_to_start = self.num_tests - - # Must be done after getting buildlogger configuration. -- self._jobs = [self._make_job(job_num) for job_num in xrange(jobs_to_start)] -+ self._jobs = [self._make_job(job_num) for job_num in range(jobs_to_start)] - - def run(self): - """Execute the test suite. -@@ -275,7 +275,7 @@ class TestSuiteExecutor(object): # pylint: disable=too-many-instance-attributes - queue.put(test_case) - - # Add sentinel value for each job to indicate when there are no more items to process. -- for _ in xrange(len(self._jobs)): -+ for _ in range(len(self._jobs)): - queue.put(None) - - return queue -diff --git a/buildscripts/resmokelib/testing/fixtures/interface.py b/buildscripts/resmokelib/testing/fixtures/interface.py -index 9b4e69c112..3927ed85af 100644 ---- a/buildscripts/resmokelib/testing/fixtures/interface.py -+++ b/buildscripts/resmokelib/testing/fixtures/interface.py -@@ -3,6 +3,7 @@ - from __future__ import absolute_import - - import os.path -+import six - import time - - import pymongo -@@ -25,10 +26,10 @@ def make_fixture(class_name, *args, **kwargs): - return _FIXTURES[class_name](*args, **kwargs) - - --class Fixture(object): -- """Base class for all fixtures.""" -- -- __metaclass__ = registry.make_registry_metaclass(_FIXTURES) # type: ignore -+class Fixture(six.with_metaclass(registry.make_registry_metaclass(_FIXTURES), object)): -+ """ -+ Base class for all fixtures. -+ """ - - # We explicitly set the 'REGISTERED_NAME' attribute so that PyLint realizes that the attribute - # is defined for all subclasses of Fixture. -diff --git a/buildscripts/resmokelib/testing/fixtures/replicaset.py b/buildscripts/resmokelib/testing/fixtures/replicaset.py -index 2cf58d9fc9..166ecc13fc 100644 ---- a/buildscripts/resmokelib/testing/fixtures/replicaset.py -+++ b/buildscripts/resmokelib/testing/fixtures/replicaset.py -@@ -77,11 +77,11 @@ class ReplicaSetFixture(interface.ReplFixture): # pylint: disable=too-many-inst - self.replset_name = self.mongod_options.get("replSet", "rs") - - if not self.nodes: -- for i in xrange(self.num_nodes): -+ for i in range(self.num_nodes): - node = self._new_mongod(i, self.replset_name) - self.nodes.append(node) - -- for i in xrange(self.num_nodes): -+ for i in range(self.num_nodes): - if self.linear_chain and i > 0: - self.nodes[i].mongod_options["set_parameters"][ - "failpoint.forceSyncSourceCandidate"] = { -diff --git a/buildscripts/resmokelib/testing/fixtures/shardedcluster.py b/buildscripts/resmokelib/testing/fixtures/shardedcluster.py -index 70c1eaa432..b8345c38b4 100644 ---- a/buildscripts/resmokelib/testing/fixtures/shardedcluster.py -+++ b/buildscripts/resmokelib/testing/fixtures/shardedcluster.py -@@ -66,7 +66,7 @@ class ShardedClusterFixture(interface.Fixture): # pylint: disable=too-many-inst - self.configsvr.setup() - - if not self.shards: -- for i in xrange(self.num_shards): -+ for i in range(self.num_shards): - if self.num_rs_nodes_per_shard is None: - shard = self._new_standalone_shard(i) - elif isinstance(self.num_rs_nodes_per_shard, int): -diff --git a/buildscripts/resmokelib/testing/hooks/interface.py b/buildscripts/resmokelib/testing/hooks/interface.py -index 0c2f65077b..88baeeea86 100644 ---- a/buildscripts/resmokelib/testing/hooks/interface.py -+++ b/buildscripts/resmokelib/testing/hooks/interface.py -@@ -9,6 +9,8 @@ from ... import errors - from ...logging import loggers - from ...utils import registry - -+import six -+ - _HOOKS = {} # type: ignore - - -@@ -21,11 +23,8 @@ def make_hook(class_name, *args, **kwargs): - return _HOOKS[class_name](*args, **kwargs) - - --class Hook(object): -+class Hook(six.with_metaclass(registry.make_registry_metaclass(_HOOKS), object)): - """Common interface all Hooks will inherit from.""" -- -- __metaclass__ = registry.make_registry_metaclass(_HOOKS) # type: ignore -- - REGISTERED_NAME = registry.LEAVE_UNREGISTERED - - def __init__(self, hook_logger, fixture, description): -diff --git a/buildscripts/resmokelib/testing/suite.py b/buildscripts/resmokelib/testing/suite.py -index 1a57b6c771..c216ef8118 100644 ---- a/buildscripts/resmokelib/testing/suite.py -+++ b/buildscripts/resmokelib/testing/suite.py -@@ -234,7 +234,7 @@ class Suite(object): # pylint: disable=too-many-instance-attributes - sb.append("Executed %d times in %0.2f seconds:" % (num_iterations, total_time_taken)) - - combined_summary = _summary.Summary(0, 0.0, 0, 0, 0, 0) -- for iteration in xrange(num_iterations): -+ for iteration in range(num_iterations): - # Summarize each execution as a bulleted list of results. - bulleter_sb = [] - summary = self._summarize_report(reports[iteration], start_times[iteration], -diff --git a/buildscripts/resmokelib/testing/summary.py b/buildscripts/resmokelib/testing/summary.py -index dc92e0b5b3..5b49fbfd04 100644 ---- a/buildscripts/resmokelib/testing/summary.py -+++ b/buildscripts/resmokelib/testing/summary.py -@@ -12,6 +12,6 @@ Summary = collections.namedtuple( - def combine(summary1, summary2): - """Return a summary representing the sum of 'summary1' and 'summary2'.""" - args = [] -- for i in xrange(len(Summary._fields)): -+ for i in range(len(Summary._fields)): - args.append(summary1[i] + summary2[i]) - return Summary._make(args) -diff --git a/buildscripts/resmokelib/testing/testcases/interface.py b/buildscripts/resmokelib/testing/testcases/interface.py -index 183e69f9d3..4642547c53 100644 ---- a/buildscripts/resmokelib/testing/testcases/interface.py -+++ b/buildscripts/resmokelib/testing/testcases/interface.py -@@ -7,6 +7,7 @@ from __future__ import absolute_import - - import os - import os.path -+import six - import unittest - - from ... import logging -@@ -22,11 +23,8 @@ def make_test_case(test_kind, *args, **kwargs): - return _TEST_CASES[test_kind](*args, **kwargs) - - --class TestCase(unittest.TestCase): -+class TestCase(six.with_metaclass(registry.make_registry_metaclass(_TEST_CASES), unittest.TestCase)): - """A test case to execute.""" -- -- __metaclass__ = registry.make_registry_metaclass(_TEST_CASES) # type: ignore -- - REGISTERED_NAME = registry.LEAVE_UNREGISTERED - - def __init__(self, logger, test_kind, test_name): -@@ -36,10 +34,10 @@ class TestCase(unittest.TestCase): - if not isinstance(logger, logging.Logger): - raise TypeError("logger must be a Logger instance") - -- if not isinstance(test_kind, basestring): -+ if not isinstance(test_kind, str): - raise TypeError("test_kind must be a string") - -- if not isinstance(test_name, basestring): -+ if not isinstance(test_name, str): - raise TypeError("test_name must be a string") - - # When the TestCase is created by the TestSuiteExecutor (through a call to make_test_case()) -diff --git a/buildscripts/resmokelib/testing/testcases/jstest.py b/buildscripts/resmokelib/testing/testcases/jstest.py -index 3cb4ee0d50..15469ba50e 100644 ---- a/buildscripts/resmokelib/testing/testcases/jstest.py -+++ b/buildscripts/resmokelib/testing/testcases/jstest.py -@@ -199,7 +199,7 @@ class JSTestCase(interface.ProcessTestCase): - test_cases = [] - try: - # If there are multiple clients, make a new thread for each client. -- for thread_id in xrange(self.num_clients): -+ for thread_id in range(self.num_clients): - logger = self.logger.new_test_thread_logger(self.test_kind, str(thread_id)) - test_case = self._create_test_case_for_thread(logger, thread_id) - test_cases.append(test_case) -diff --git a/buildscripts/resmokelib/utils/__init__.py b/buildscripts/resmokelib/utils/__init__.py -index 6b6a76d1f4..6bef14b5a0 100644 ---- a/buildscripts/resmokelib/utils/__init__.py -+++ b/buildscripts/resmokelib/utils/__init__.py -@@ -48,10 +48,10 @@ def rmtree(path, **kwargs): - See https://github.com/pypa/setuptools/issues/706. - """ - if is_windows(): -- if not isinstance(path, unicode): -- path = unicode(path, "utf-8") -+ if not isinstance(path, str): -+ path = str(path, "utf-8") - else: -- if isinstance(path, unicode): -+ if isinstance(path, str): - path = path.encode("utf-8") - shutil.rmtree(path, **kwargs) - -@@ -72,12 +72,12 @@ def remove_if_exists(path): - - def is_string_list(lst): - """Return true if 'lst' is a list of strings, and false otherwise.""" -- return isinstance(lst, list) and all(isinstance(x, basestring) for x in lst) -+ return isinstance(lst, list) and all(isinstance(x, str) for x in lst) - - - def is_string_set(value): - """Return true if 'value' is a set of strings, and false otherwise.""" -- return isinstance(value, set) and all(isinstance(x, basestring) for x in value) -+ return isinstance(value, set) and all(isinstance(x, str) for x in value) - - - def is_js_file(filename): -diff --git a/buildscripts/resmokelib/utils/archival.py b/buildscripts/resmokelib/utils/archival.py -index 8ccb3127f4..c8eecdcb70 100644 ---- a/buildscripts/resmokelib/utils/archival.py -+++ b/buildscripts/resmokelib/utils/archival.py -@@ -1,8 +1,13 @@ - """Archival utility.""" - --from __future__ import absolute_import - --import Queue -+ -+try: -+ import queue -+except ImportError: -+ #Python 2 -+ import Queue as queue -+ - import collections - import json - import math -@@ -45,7 +50,7 @@ def file_list_size(files): - def directory_size(directory): - """Return size (in bytes) of files in 'directory' tree.""" - dir_bytes = 0 -- for root_dir, _, files in os.walk(unicode(directory)): -+ for root_dir, _, files in os.walk(str(directory)): - for name in files: - full_name = os.path.join(root_dir, name) - try: -@@ -103,7 +108,7 @@ class Archival(object): # pylint: disable=too-many-instance-attributes - self._lock = threading.Lock() - - # Start the worker thread to update the 'archival_json_file'. -- self._archive_file_queue = Queue.Queue() -+ self._archive_file_queue = queue.Queue() - self._archive_file_worker = threading.Thread(target=self._update_archive_file_wkr, - args=(self._archive_file_queue, - logger), name="archive_file_worker") -@@ -115,7 +120,7 @@ class Archival(object): # pylint: disable=too-many-instance-attributes - self.s3_client = s3_client - - # Start the worker thread which uploads the archive. -- self._upload_queue = Queue.Queue() -+ self._upload_queue = queue.Queue() - self._upload_worker = threading.Thread(target=self._upload_to_s3_wkr, - args=(self._upload_queue, self._archive_file_queue, - logger, self.s3_client), name="upload_worker") -diff --git a/buildscripts/resmokelib/utils/globstar.py b/buildscripts/resmokelib/utils/globstar.py -index 1e016875f9..d57cb41f41 100644 ---- a/buildscripts/resmokelib/utils/globstar.py -+++ b/buildscripts/resmokelib/utils/globstar.py -@@ -134,7 +134,7 @@ def _list_dir(pathname): - """ - - try: -- (_root, dirs, files) = os.walk(pathname).next() -+ (_root, dirs, files) = next(os.walk(pathname)) - return (dirs, files) - except StopIteration: - return None # 'pathname' directory does not exist -diff --git a/buildscripts/resmokelib/utils/jscomment.py b/buildscripts/resmokelib/utils/jscomment.py -index 67758197c5..f4c4a4d6c5 100644 ---- a/buildscripts/resmokelib/utils/jscomment.py -+++ b/buildscripts/resmokelib/utils/jscomment.py -@@ -36,7 +36,7 @@ def get_tags(pathname): - # TODO: it might be worth supporting the block (indented) style of YAML lists in - # addition to the flow (bracketed) style - tags = yaml.safe_load(_strip_jscomments(match.group(1))) -- if not isinstance(tags, list) and all(isinstance(tag, basestring) for tag in tags): -+ if not isinstance(tags, list) and all(isinstance(tag, str) for tag in tags): - raise TypeError("Expected a list of string tags, but got '%s'" % (tags)) - return tags - except yaml.YAMLError as err: -diff --git a/buildscripts/resmokelib/utils/queue.py b/buildscripts/resmokelib/utils/queue.py -index c77692138b..57a635f45e 100644 ---- a/buildscripts/resmokelib/utils/queue.py -+++ b/buildscripts/resmokelib/utils/queue.py -@@ -8,7 +8,12 @@ See https://bugs.python.org/issue1167930 for more details. - - from __future__ import absolute_import - --import Queue as _Queue -+try: -+ import queue as _Queue -+except ImportError: -+ #Python 2 -+ import Queue as _Queue -+ - import time - - # Exception that is raised when get_nowait() is called on an empty Queue. -diff --git a/buildscripts/utils.py b/buildscripts/utils.py -index 5073b26ad8..0ac19aaba1 100644 ---- a/buildscripts/utils.py -+++ b/buildscripts/utils.py -@@ -139,8 +139,8 @@ def find_python(min_version=(2, 5)): - # In case the version of Python is somehow missing sys.version_info or sys.executable. - pass - -- version = re.compile(r"[Pp]ython ([\d\.]+)", re.MULTILINE) -- binaries = ("python27", "python2.7", "python26", "python2.6", "python25", "python2.5", "python") -+ version = re.compile(r'[Pp]ython ([\d\.]+)', re.MULTILINE) -+ binaries = ('python3', 'python27', 'python2.7', 'python26', 'python2.6', 'python25', 'python2.5', 'python') - for binary in binaries: - try: - out, err = subprocess.Popen([binary, "-V"], stdout=subprocess.PIPE, -@@ -166,7 +166,7 @@ def replace_with_repr(unicode_error): - # repr() of the offending bytes into the decoded string - # at the position they occurred - offender = unicode_error.object[unicode_error.start:unicode_error.end] -- return (unicode(repr(offender).strip("'").strip('"')), unicode_error.end) -+ return (str(repr(offender).strip("'").strip('"')), unicode_error.end) - - - codecs.register_error("repr", replace_with_repr) -diff --git a/site_scons/libdeps.py b/site_scons/libdeps.py -index f002c4f067..3447e5fef3 100644 ---- a/site_scons/libdeps.py -+++ b/site_scons/libdeps.py -@@ -122,7 +122,7 @@ def __get_libdeps(node): - marked.add(n.target_node) - tsorted.append(n.target_node) - -- except DependencyCycleError, e: -+ except DependencyCycleError as e: - if len(e.cycle_nodes) == 1 or e.cycle_nodes[0] != e.cycle_nodes[-1]: - e.cycle_nodes.insert(0, n.target_node) - raise -@@ -150,7 +150,7 @@ def __get_syslibdeps(node): - for lib in __get_libdeps(node): - for syslib in node.get_env().Flatten(lib.get_env().get(syslibdeps_env_var, [])): - if syslib: -- if type(syslib) in (str, unicode) and syslib.startswith(missing_syslibdep): -+ if type(syslib) in (str, str) and syslib.startswith(missing_syslibdep): - print("Target '%s' depends on the availability of a " - "system provided library for '%s', " - "but no suitable library was found during configuration." % -@@ -209,7 +209,7 @@ def get_syslibdeps(source, target, env, for_signature): - # they're believed to represent library short names, that should be prefixed with -l - # or the compiler-specific equivalent. I.e., 'm' becomes '-lm', but 'File("m.a") is passed - # through whole cloth. -- if type(d) in (str, unicode): -+ if type(d) in (str, str): - result.append('%s%s%s' % (lib_link_prefix, d, lib_link_suffix)) - else: - result.append(d) -diff --git a/site_scons/mongo/__init__.py b/site_scons/mongo/__init__.py -index 510bd7bcc2..f77478092b 100644 ---- a/site_scons/mongo/__init__.py -+++ b/site_scons/mongo/__init__.py -@@ -5,4 +5,4 @@ - def print_build_failures(): - from SCons.Script import GetBuildFailures - for bf in GetBuildFailures(): -- print "%s failed: %s" % (bf.node, bf.errstr) -+ print("%s failed: %s" % (bf.node, bf.errstr)) -diff --git a/site_scons/mongo/generators.py b/site_scons/mongo/generators.py -index c07e86a4d1..5958e6923b 100644 ---- a/site_scons/mongo/generators.py -+++ b/site_scons/mongo/generators.py -@@ -1,6 +1,6 @@ - # -*- mode: python; -*- - --import md5 -+import hashlib - - # Default and alternative generator definitions go here. - -@@ -44,7 +44,7 @@ def default_variant_dir_generator(target, source, env, for_signature): - - # Hash the named options and their values, and take the first 8 characters of the hash as - # the variant name -- hasher = md5.md5() -+ hasher = hashlib.md5() - for option in variant_options: - hasher.update(option) - hasher.update(str(env.GetOption(option))) -diff --git a/site_scons/site_tools/dagger/__init__.py b/site_scons/site_tools/dagger/__init__.py -index f05228cfe4..f10b4027e1 100644 ---- a/site_scons/site_tools/dagger/__init__.py -+++ b/site_scons/site_tools/dagger/__init__.py -@@ -5,7 +5,7 @@ import logging - - import SCons - --import dagger -+from . import dagger - - def generate(env, **kwargs): - """The entry point for our tool. However, the builder for -diff --git a/site_scons/site_tools/dagger/dagger.py b/site_scons/site_tools/dagger/dagger.py -index bace834783..8c55937ce8 100644 ---- a/site_scons/site_tools/dagger/dagger.py -+++ b/site_scons/site_tools/dagger/dagger.py -@@ -40,8 +40,8 @@ import sys - - import SCons - --import graph --import graph_consts -+from . import graph -+from . import graph_consts - - - LIB_DB = [] # Stores every SCons library nodes -@@ -240,7 +240,7 @@ def write_obj_db(target, source, env): - for obj in OBJ_DB: - __generate_file_rels(obj, g) - -- for exe in EXE_DB.keys(): -+ for exe in list(EXE_DB.keys()): - __generate_exe_rels(exe, g) - - # target is given as a list of target SCons nodes - this builder is only responsible for -diff --git a/site_scons/site_tools/dagger/graph.py b/site_scons/site_tools/dagger/graph.py -index 5ebe6f4506..379d5245e6 100644 ---- a/site_scons/site_tools/dagger/graph.py -+++ b/site_scons/site_tools/dagger/graph.py -@@ -4,11 +4,13 @@ import abc - import json - import copy - --import graph_consts -+from . import graph_consts - - if sys.version_info >= (3, 0): - basestring = str - -+ABC = abc.ABCMeta('ABC', (object,), {'__slots__': ()}) -+ - class Graph(object): - """Graph class for storing the build dependency graph. The graph stores the - directed edges as a nested dict of { RelationshipType: {From_Node: Set of -@@ -141,7 +143,7 @@ class Graph(object): - node_dict["id"] = id - node_dict["node"] = {} - -- for property, value in vars(node).iteritems(): -+ for property, value in vars(node).items(): - if isinstance(value, set): - node_dict["node"][property] = list(value) - else: -@@ -170,10 +172,9 @@ class Graph(object): - sum(len(x) for x in self._edges.values()), hash(self)) - - --class NodeInterface(object): -+class NodeInterface(ABC): - """Abstract base class for all Node Objects - All nodes must have an id and name - """ -- __metaclass__ = abc.ABCMeta - - @abc.abstractproperty - def id(self): -@@ -190,7 +191,7 @@ class NodeLib(NodeInterface): - def __init__(self, id, name, input=None): - if isinstance(input, dict): - should_fail = False -- for k, v in input.iteritems(): -+ for k, v in input.items(): - try: - if isinstance(v, list): - setattr(self, k, set(v)) -@@ -310,7 +311,7 @@ class NodeSymbol(NodeInterface): - if isinstance(input, dict): - should_fail = False - -- for k, v in input.iteritems(): -+ for k, v in input.items(): - try: - if isinstance(v, list): - setattr(self, k, set(v)) -@@ -435,7 +436,7 @@ class NodeFile(NodeInterface): - def __init__(self, id, name, input=None): - if isinstance(input, dict): - should_fail = False -- for k, v in input.iteritems(): -+ for k, v in input.items(): - try: - if isinstance(v, list): - setattr(self, k, set(v)) -@@ -551,7 +552,7 @@ class NodeExe(NodeInterface): - def __init__(self, id, name, input=None): - if isinstance(input, dict): - should_fail = False -- for k, v in input.iteritems(): -+ for k, v in input.items(): - try: - if isinstance(v, list): - setattr(self, k, set(v)) -diff --git a/site_scons/site_tools/dagger/graph_consts.py b/site_scons/site_tools/dagger/graph_consts.py -index 81fe86d75c..a922a4f3f6 100644 ---- a/site_scons/site_tools/dagger/graph_consts.py -+++ b/site_scons/site_tools/dagger/graph_consts.py -@@ -17,8 +17,8 @@ NODE_SYM = 2 - NODE_FILE = 3 - NODE_EXE = 4 - --RELATIONSHIP_TYPES = range(1, 9) --NODE_TYPES = range(1, 5) -+RELATIONSHIP_TYPES = list(range(1, 9)) -+NODE_TYPES = list(range(1, 5)) - - - """Error/query codes""" -diff --git a/site_scons/site_tools/dagger/graph_test.py b/site_scons/site_tools/dagger/graph_test.py -index bc84f5868c..6c0168cf97 100644 ---- a/site_scons/site_tools/dagger/graph_test.py -+++ b/site_scons/site_tools/dagger/graph_test.py -@@ -5,8 +5,8 @@ from JSON - - import json - import unittest --import graph --import graph_consts -+from . import graph -+from . import graph_consts - - - def generate_graph(): -@@ -122,15 +122,15 @@ class TestGraphMethods(unittest.TestCase, CustomAssertions): - node = graph.NodeLib("test_node", "test_node") - self.g._nodes = {"test_node": node} - -- self.assertEquals(self.g.get_node("test_node"), node) -+ self.assertEqual(self.g.get_node("test_node"), node) - -- self.assertEquals(self.g.get_node("missing_node"), None) -+ self.assertEqual(self.g.get_node("missing_node"), None) - - def test_add_node(self): - node = graph.NodeLib("test_node", "test_node") - self.g.add_node(node) - -- self.assertEquals(self.g.get_node("test_node"), node) -+ self.assertEqual(self.g.get_node("test_node"), node) - - self.assertRaises(ValueError, self.g.add_node, node) - -@@ -153,16 +153,16 @@ class TestGraphMethods(unittest.TestCase, CustomAssertions): - self.g.add_edge(graph_consts.LIB_FIL, self.from_node_lib.id, - self.to_node_file.id) - -- self.assertEquals(self.g.edges[graph_consts.LIB_LIB][ -+ self.assertEqual(self.g.edges[graph_consts.LIB_LIB][ - self.from_node_lib.id], set([self.to_node_lib.id])) - -- self.assertEquals(self.g.edges[graph_consts.LIB_SYM][ -+ self.assertEqual(self.g.edges[graph_consts.LIB_SYM][ - self.from_node_lib.id], set([self.to_node_sym.id])) - -- self.assertEquals(self.g.edges[graph_consts.LIB_FIL][ -+ self.assertEqual(self.g.edges[graph_consts.LIB_FIL][ - self.from_node_lib.id], set([self.to_node_file.id])) - -- self.assertEquals(self.to_node_lib.dependent_libs, -+ self.assertEqual(self.to_node_lib.dependent_libs, - set([self.from_node_lib.id])) - - def test_add_edge_files(self): -@@ -173,14 +173,14 @@ class TestGraphMethods(unittest.TestCase, CustomAssertions): - self.g.add_edge(graph_consts.FIL_LIB, self.from_node_file.id, - self.to_node_lib.id) - -- self.assertEquals(self.g.edges[graph_consts.FIL_FIL][ -+ self.assertEqual(self.g.edges[graph_consts.FIL_FIL][ - self.from_node_file.id], set([self.to_node_file.id])) -- self.assertEquals(self.g.edges[graph_consts.FIL_SYM][ -+ self.assertEqual(self.g.edges[graph_consts.FIL_SYM][ - self.from_node_file.id], set([self.to_node_sym.id])) -- self.assertEquals(self.g.edges[graph_consts.FIL_LIB][ -+ self.assertEqual(self.g.edges[graph_consts.FIL_LIB][ - self.from_node_file.id], set([self.to_node_lib.id])) - -- self.assertEquals(self.to_node_file.dependent_files, -+ self.assertEqual(self.to_node_file.dependent_files, - set([self.from_node_file.id])) - - def test_export_to_json(self): -@@ -188,7 +188,7 @@ class TestGraphMethods(unittest.TestCase, CustomAssertions): - generated_graph.export_to_json("export_test.json") - generated = open("export_test.json", "r") - correct = open("test_graph.json", "r") -- self.assertEquals(json.load(generated), json.load(correct)) -+ self.assertEqual(json.load(generated), json.load(correct)) - generated.close() - correct.close() - -@@ -205,7 +205,7 @@ class TestGraphMethods(unittest.TestCase, CustomAssertions): - self.assertNodeEquals( - graph_fromJSON.get_node(id), correct_graph.get_node(id)) - -- self.assertEquals(graph_fromJSON.edges, correct_graph.edges) -+ self.assertEqual(graph_fromJSON.edges, correct_graph.edges) - - - if __name__ == '__main__': -diff --git a/site_scons/site_tools/distsrc.py b/site_scons/site_tools/distsrc.py -index 861f5d9e2e..d2dff0b612 100644 ---- a/site_scons/site_tools/distsrc.py -+++ b/site_scons/site_tools/distsrc.py -@@ -20,7 +20,7 @@ import shutil - import tarfile - import time - import zipfile --import StringIO -+import io - - from distutils.spawn import find_executable - -@@ -82,7 +82,7 @@ class DistSrcTarArchive(DistSrcArchive): - - def append_file_contents(self, filename, file_contents, - mtime=time.time(), -- mode=0644, -+ mode=0o644, - uname="root", - gname="root"): - file_metadata = tarfile.TarInfo(name=filename) -@@ -91,7 +91,7 @@ class DistSrcTarArchive(DistSrcArchive): - file_metadata.uname = uname - file_metadata.gname = gname - file_metadata.size = len(file_contents) -- file_buf = StringIO.StringIO(file_contents) -+ file_buf = io.StringIO(file_contents) - if self.archive_mode == 'r': - self.archive_file.close() - self.archive_file = tarfile.open( -@@ -119,7 +119,7 @@ class DistSrcZipArchive(DistSrcArchive): - name=key, - size=item_data.file_size, - mtime=time.mktime(fixed_time), -- mode=0775 if is_dir else 0664, -+ mode=0o775 if is_dir else 0o664, - type=tarfile.DIRTYPE if is_dir else tarfile.REGTYPE, - uid=0, - gid=0, -@@ -129,7 +129,7 @@ class DistSrcZipArchive(DistSrcArchive): - - def append_file_contents(self, filename, file_contents, - mtime=time.time(), -- mode=0644, -+ mode=0o644, - uname="root", - gname="root"): - self.archive_file.writestr(filename, file_contents) -@@ -139,7 +139,7 @@ class DistSrcZipArchive(DistSrcArchive): - - def build_error_action(msg): - def error_stub(target=None, source=None, env=None): -- print msg -+ print(msg) - env.Exit(1) - return [ error_stub ] - -@@ -162,7 +162,7 @@ def distsrc_action_generator(source, target, env, for_signature): - - target_ext = str(target[0])[-3:] - if not target_ext in [ 'zip', 'tar' ]: -- print "Invalid file format for distsrc. Must be tar or zip file" -+ print("Invalid file format for distsrc. Must be tar or zip file") - env.Exit(1) - - git_cmd = "\"%s\" archive --format %s --output %s --prefix ${MONGO_DIST_SRC_PREFIX} HEAD" % ( -diff --git a/site_scons/site_tools/icecream.py b/site_scons/site_tools/icecream.py -index 9838b63349..fdf0c26030 100644 ---- a/site_scons/site_tools/icecream.py -+++ b/site_scons/site_tools/icecream.py -@@ -99,7 +99,7 @@ def generate(env): - suffixes = _CSuffixes + _CXXSuffixes - for object_builder in SCons.Tool.createObjBuilders(env): - emitterdict = object_builder.builder.emitter -- for suffix in emitterdict.iterkeys(): -+ for suffix in emitterdict.keys(): - if not suffix in suffixes: - continue - base = emitterdict[suffix] -diff --git a/site_scons/site_tools/idl_tool.py b/site_scons/site_tools/idl_tool.py -index c0455c2110..519583b6ca 100755 ---- a/site_scons/site_tools/idl_tool.py -+++ b/site_scons/site_tools/idl_tool.py -@@ -47,7 +47,7 @@ def idl_scanner(node, env, path): - - deps_list = deps_str.splitlines() - -- nodes_deps_list = [ env.File(d) for d in deps_list] -+ nodes_deps_list = [ env.File(d.decode("utf-8")) for d in deps_list] - nodes_deps_list.extend(env.Glob('#buildscripts/idl/*.py')) - nodes_deps_list.extend(env.Glob('#buildscripts/idl/idl/*.py')) - -diff --git a/site_scons/site_tools/jstoh.py b/site_scons/site_tools/jstoh.py -index 26eb6cbbf2..9c71e0c061 100644 ---- a/site_scons/site_tools/jstoh.py -+++ b/site_scons/site_tools/jstoh.py -@@ -1,3 +1,5 @@ -+from __future__ import unicode_literals -+ - import os - import sys - -@@ -39,7 +41,7 @@ def jsToHeader(target, source): - - text = '\n'.join(h) - -- with open(outFile, 'wb') as out: -+ with open(outFile, 'w') as out: - try: - out.write(text) - finally: -@@ -48,7 +50,7 @@ def jsToHeader(target, source): - - if __name__ == "__main__": - if len(sys.argv) < 3: -- print "Must specify [target] [source] " -+ print("Must specify [target] [source] ") - sys.exit(1) - - jsToHeader(sys.argv[1], sys.argv[2:]) -diff --git a/site_scons/site_tools/mongo_benchmark.py b/site_scons/site_tools/mongo_benchmark.py -index b2a1750e3d..47a190dfdd 100644 ---- a/site_scons/site_tools/mongo_benchmark.py -+++ b/site_scons/site_tools/mongo_benchmark.py -@@ -14,7 +14,7 @@ def benchmark_list_builder_action(env, target, source): - ofile = open(str(target[0]), 'wb') - try: - for s in _benchmarks: -- print '\t' + str(s) -+ print('\t' + str(s)) - ofile.write('%s\n' % s) - finally: - ofile.close() -diff --git a/site_scons/site_tools/mongo_integrationtest.py b/site_scons/site_tools/mongo_integrationtest.py -index 0ced90c949..aeda674991 100644 ---- a/site_scons/site_tools/mongo_integrationtest.py -+++ b/site_scons/site_tools/mongo_integrationtest.py -@@ -12,10 +12,10 @@ def register_integration_test(env, test): - env.Alias('$INTEGRATION_TEST_ALIAS', installed_test) - - def integration_test_list_builder_action(env, target, source): -- ofile = open(str(target[0]), 'wb') -+ ofile = open(str(target[0]), 'w') - try: - for s in _integration_tests: -- print '\t' + str(s) -+ print('\t' + str(s)) - ofile.write('%s\n' % s) - finally: - ofile.close() -diff --git a/site_scons/site_tools/mongo_unittest.py b/site_scons/site_tools/mongo_unittest.py -index 2ad0f51bfd..1ca644c611 100644 ---- a/site_scons/site_tools/mongo_unittest.py -+++ b/site_scons/site_tools/mongo_unittest.py -@@ -11,10 +11,10 @@ def register_unit_test(env, test): - env.Alias('$UNITTEST_ALIAS', test) - - def unit_test_list_builder_action(env, target, source): -- ofile = open(str(target[0]), 'wb') -+ ofile = open(str(target[0]), 'w') - try: - for s in _unittests: -- print '\t' + str(s) -+ print('\t' + str(s)) - ofile.write('%s\n' % s) - finally: - ofile.close() -diff --git a/site_scons/site_tools/split_dwarf.py b/site_scons/site_tools/split_dwarf.py -index 95130c9e9a..c02d78619f 100644 ---- a/site_scons/site_tools/split_dwarf.py -+++ b/site_scons/site_tools/split_dwarf.py -@@ -52,7 +52,7 @@ def generate(env): - - for object_builder in SCons.Tool.createObjBuilders(env): - emitterdict = object_builder.builder.emitter -- for suffix in emitterdict.iterkeys(): -+ for suffix in emitterdict.keys(): - if not suffix in suffixes: - continue - base = emitterdict[suffix] -diff --git a/site_scons/site_tools/thin_archive.py b/site_scons/site_tools/thin_archive.py -index 511c0ef6e5..0d8a83b83a 100644 ---- a/site_scons/site_tools/thin_archive.py -+++ b/site_scons/site_tools/thin_archive.py -@@ -41,7 +41,7 @@ def exists(env): - for line in pipe.stdout: - if isgnu: - continue # consume all data -- isgnu = re.search(r'^GNU ar', line) -+ isgnu = re.search(b'^GNU ar', line) - - return bool(isgnu) - -diff --git a/site_scons/site_tools/xcode.py b/site_scons/site_tools/xcode.py -index 9ec68c3547..5ddebb2e00 100644 ---- a/site_scons/site_tools/xcode.py -+++ b/site_scons/site_tools/xcode.py -@@ -9,4 +9,4 @@ def generate(env): - - if 'DEVELOPER_DIR' in os.environ: - env['ENV']['DEVELOPER_DIR'] = os.environ['DEVELOPER_DIR'] -- print "NOTE: Xcode detected; propagating DEVELOPER_DIR from shell environment to subcommands" -+ print("NOTE: Xcode detected; propagating DEVELOPER_DIR from shell environment to subcommands") -diff --git a/src/mongo/SConscript b/src/mongo/SConscript -index c8f925b7c8..e18a3829f1 100644 ---- a/src/mongo/SConscript -+++ b/src/mongo/SConscript -@@ -154,7 +154,7 @@ js_engine_ver = get_option("js-engine") if get_option("server-js") == "on" else - - # On windows, we need to escape the backslashes in the command-line - # so that windows paths look okay. --cmd_line = " ".join(sys.argv).encode('string-escape') -+cmd_line = " ".join(sys.argv).encode('unicode_escape') - if env.TargetOSIs('windows'): - cmd_line = cmd_line.replace('\\', r'\\') - -@@ -644,7 +644,7 @@ env.Append(MODULE_BANNERS = [distsrc.File('README'), - - # If no module has introduced a file named LICENSE-Enterprise.txt then this - # is a Community build, so inject the AGPL and the Community license --if sum(itertools.imap(lambda x: x.name == "LICENSE-Enterprise.txt", env['MODULE_BANNERS'])) == 0: -+if sum(map(lambda x: x.name == "LICENSE-Enterprise.txt", env['MODULE_BANNERS'])) == 0: - env.Append(MODULE_BANNERS = [distsrc.File('GNU-AGPL-3.0'), - distsrc.File('LICENSE-Community.txt')]) - -@@ -664,7 +664,7 @@ module_banner_transforms = ["--transform %s=$SERVER_DIST_BASENAME" % d for d in - # Allow modules to map original file name directories to subdirectories - # within the archive (e.g. { "src/mongo/db/modules/enterprise/docs": "snmp"}) - archive_addition_transforms = [] --for full_dir, archive_dir in env["ARCHIVE_ADDITION_DIR_MAP"].items(): -+for full_dir, archive_dir in list(env["ARCHIVE_ADDITION_DIR_MAP"].items()): - archive_addition_transforms.append("--transform \"%s=$SERVER_DIST_BASENAME/%s\"" % - (full_dir, archive_dir)) - -diff --git a/src/mongo/base/generate_error_codes.py b/src/mongo/base/generate_error_codes.py -index 577108c7ec..e9a1dfa552 100644 ---- a/src/mongo/base/generate_error_codes.py -+++ b/src/mongo/base/generate_error_codes.py -@@ -26,6 +26,8 @@ - # delete this exception statement from all source files in the program, - # then also delete it in the license file. - -+from __future__ import unicode_literals -+ - """Generate error_codes.{h,cpp} from error_codes.err. - - Format of error_codes.err: -@@ -98,7 +100,7 @@ def main(argv): - categories=error_classes, - ) - -- with open(output, 'wb') as outfile: -+ with open(output, 'w') as outfile: - outfile.write(text) - - def die(message=None): -diff --git a/src/mongo/db/auth/generate_action_types.py b/src/mongo/db/auth/generate_action_types.py -index b712b29666..39252ed293 100755 ---- a/src/mongo/db/auth/generate_action_types.py -+++ b/src/mongo/db/auth/generate_action_types.py -@@ -227,7 +227,7 @@ def hasDuplicateActionTypes(actionTypes): - prevActionType = sortedActionTypes[0] - for actionType in sortedActionTypes[1:]: - if actionType == prevActionType: -- print 'Duplicate actionType %s\n' % actionType -+ print('Duplicate actionType %s\n' % actionType) - didFail = True - prevActionType = actionType - -@@ -240,7 +240,7 @@ def parseActionTypesFromFile(actionTypesFilename): - - if __name__ == "__main__": - if len(sys.argv) != 4: -- print "Usage: generate_action_types.py
" -+ print("Usage: generate_action_types.py
") - sys.exit(-1) - - actionTypes = parseActionTypesFromFile(sys.argv[1]) -diff --git a/src/mongo/db/fts/generate_stop_words.py b/src/mongo/db/fts/generate_stop_words.py -index 31603eb92e..ae4ad6ccbd 100644 ---- a/src/mongo/db/fts/generate_stop_words.py -+++ b/src/mongo/db/fts/generate_stop_words.py -@@ -1,7 +1,7 @@ - import sys - - def generate( header, source, language_files ): -- out = open( header, "wb" ) -+ out = open( header, "w" ) - out.write( """ - #pragma once - #include -@@ -18,7 +18,7 @@ namespace fts { - - - -- out = open( source, "wb" ) -+ out = open( source, "w" ) - out.write( '#include "%s"' % header.rpartition( "/" )[2].rpartition( "\\" )[2] ) - out.write( """ - namespace mongo { -@@ -34,7 +34,7 @@ namespace fts { - out.write( ' // %s\n' % l_file ) - out.write( ' {\n' ) - out.write( ' const char* const words[] = {\n' ) -- for word in open( l_file, "rb" ): -+ for word in open( l_file, "r" ): - out.write( ' "%s",\n' % word.strip() ) - out.write( ' };\n' ) - out.write( ' const size_t wordcnt = sizeof(words) / sizeof(words[0]);\n' ) -diff --git a/src/mongo/db/fts/unicode/gen_diacritic_map.py b/src/mongo/db/fts/unicode/gen_diacritic_map.py -index 08cfa95cda..7c623aff60 100644 ---- a/src/mongo/db/fts/unicode/gen_diacritic_map.py -+++ b/src/mongo/db/fts/unicode/gen_diacritic_map.py -@@ -45,7 +45,7 @@ def add_diacritic_mapping(codepoint): - # c : recomposed unicode character with diacritics removed - a = chr(codepoint) - d = normalize('NFD', a) -- r = u'' -+ r = '' - - for i in range(len(d)): - if ord(d[i]) not in diacritics: -diff --git a/src/mongo/util/generate_icu_init_cpp.py b/src/mongo/util/generate_icu_init_cpp.py -index 8ae084aeec..7c576f6ffe 100755 ---- a/src/mongo/util/generate_icu_init_cpp.py -+++ b/src/mongo/util/generate_icu_init_cpp.py -@@ -26,6 +26,9 @@ - # delete this exception statement from all source files in the program, - # then also delete it in the license file. - -+from __future__ import unicode_literals -+ -+import array - import optparse - import os - import sys -@@ -110,8 +113,8 @@ MONGO_INITIALIZER(LoadICUData)(InitializerContext* context) { - ''' - decimal_encoded_data = '' - with open(data_file_path, 'rb') as data_file: -- decimal_encoded_data = ','.join([str(ord(byte)) for byte in data_file.read()]) -- with open(cpp_file_path, 'wb') as cpp_file: -+ decimal_encoded_data = ','.join([str(byte) for byte in array.array("B", data_file.read()).tolist()]) -+ with open(cpp_file_path, 'w') as cpp_file: - cpp_file.write(source_template % dict(decimal_encoded_data=decimal_encoded_data)) - - if __name__ == '__main__': diff --git a/r4.0.1.tar.gz b/r4.5.0.tar.gz similarity index 78% rename from r4.0.1.tar.gz rename to r4.5.0.tar.gz index 91694bfab71afd756680cc5f06031762c26c86f0..f8d372748d4713dd0b82313604a1c869d36963f2 100644 Binary files a/r4.0.1.tar.gz and b/r4.5.0.tar.gz differ