Blob Blame History Raw
diff --git a/SConstruct b/SConstruct
index f949f65..78eab69 100644
--- a/SConstruct
+++ b/SConstruct
@@ -383,7 +383,7 @@ win_version_min_choices = {
 }
 
 add_option('win-version-min',
-    choices=win_version_min_choices.keys(),
+    choices=list(win_version_min_choices.keys()),
     default=None,
     help='minimum Windows version to support',
     type='choice',
@@ -492,7 +492,7 @@ except ValueError as e:
 def variable_shlex_converter(val):
     # If the argument is something other than a string, propogate
     # it literally.
-    if not isinstance(val, basestring):
+    if not isinstance(val, str):
         return val
     parse_mode = get_option('variable-parse-mode')
     if parse_mode == 'auto':
@@ -820,7 +820,7 @@ SConsignFile(str(sconsDataDir.File('sconsign')))
 def printLocalInfo():
     import sys, SCons
     print( "scons version: " + SCons.__version__ )
-    print( "python version: " + " ".join( [ `i` for i in sys.version_info ] ) )
+    print( "python version: " + " ".join( [ str(i) for i in sys.version_info ] ) )
 
 printLocalInfo()
 
@@ -1930,7 +1930,7 @@ def doConfigure(myenv):
         # to make them real errors.
         cloned.Append(CCFLAGS=['-Werror'])
         conf = Configure(cloned, help=False, custom_tests = {
-                'CheckFlag' : lambda(ctx) : CheckFlagTest(ctx, tool, extension, flag)
+                'CheckFlag' : lambda ctx : CheckFlagTest(ctx, tool, extension, flag)
         })
         available = conf.CheckFlag()
         conf.Finish()
@@ -2402,7 +2402,7 @@ def doConfigure(myenv):
             "undefined" : myenv.File("#etc/ubsan.blacklist"),
         }
 
-        blackfiles = set([v for (k, v) in blackfiles_map.iteritems() if k in sanitizer_list])
+        blackfiles = set([v for (k, v) in blackfiles_map.items() if k in sanitizer_list])
         blacklist_options=["-fsanitize-blacklist=%s" % blackfile
                            for blackfile in blackfiles
                            if os.stat(blackfile.path).st_size != 0]
diff --git a/buildscripts/aggregate_tracefiles.py b/buildscripts/aggregate_tracefiles.py
index 8f1db78..0b4bfd2 100644
--- a/buildscripts/aggregate_tracefiles.py
+++ b/buildscripts/aggregate_tracefiles.py
@@ -16,7 +16,7 @@ def aggregate(inputs, output):
 
     args += ['-o', output]
 
-    print ' '.join(args)
+    print(' '.join(args))
     
     return subprocess.call(args)  
 
diff --git a/buildscripts/buildlogger.py b/buildscripts/buildlogger.py
index 163c10a..fd2c095 100644
--- a/buildscripts/buildlogger.py
+++ b/buildscripts/buildlogger.py
@@ -45,8 +45,8 @@ import subprocess
 import sys
 import time
 import traceback
-import urllib2
-import utils
+import urllib.request, urllib.error, urllib.parse
+from . import utils
 
 # suppress deprecation warnings that happen when
 # we import the 'buildbot.tac' file below
@@ -82,7 +82,7 @@ for path in possible_paths:
     if os.path.isfile(credentials_path):
         credentials = {}
         try:
-            execfile(credentials_path, credentials, credentials)
+            exec(compile(open(credentials_path).read(), credentials_path, 'exec'), credentials, credentials)
             username = credentials.get('slavename', credentials.get('username'))
             password = credentials.get('passwd', credentials.get('password'))
             break
@@ -94,14 +94,14 @@ URL_ROOT = os.environ.get('BUILDLOGGER_URL', 'http://buildlogs.mongodb.org/')
 TIMEOUT_SECONDS = 10
 socket.setdefaulttimeout(TIMEOUT_SECONDS)
 
-auth_handler = urllib2.HTTPBasicAuthHandler()
+auth_handler = urllib.request.HTTPBasicAuthHandler()
 auth_handler.add_password(
     realm='buildlogs',
     uri=URL_ROOT,
     user=username,
     passwd=password)
 
-url_opener = urllib2.build_opener(auth_handler, urllib2.HTTPErrorProcessor())
+url_opener = urllib.request.build_opener(auth_handler, urllib2.HTTPErrorProcessor())
 
 def url(endpoint):
     if not endpoint.endswith('/'):
@@ -115,10 +115,10 @@ def post(endpoint, data, headers=None):
     headers = headers or {}
     headers.update({'Content-Type': 'application/json; charset=utf-8'})
 
-    req = urllib2.Request(url=url(endpoint), data=data, headers=headers)
+    req = urllib.request.Request(url=url(endpoint), data=data, headers=headers)
     try:
         response = url_opener.open(req)
-    except urllib2.URLError:
+    except urllib.error.URLError:
         import traceback
         traceback.print_exc(file=sys.stderr)
         sys.stderr.flush()
@@ -145,7 +145,7 @@ def traceback_to_stderr(func):
     def wrapper(*args, **kwargs):
         try:
             return func(*args, **kwargs)
-        except urllib2.HTTPError, err:
+        except urllib.error.HTTPError as err:
             sys.stderr.write('error: HTTP code %d\n----\n' % err.code)
             if hasattr(err, 'hdrs'):
                 for k, v in err.hdrs.items():
diff --git a/buildscripts/burn_in_tests.py b/buildscripts/burn_in_tests.py
index a78c905..2255e97 100644
--- a/buildscripts/burn_in_tests.py
+++ b/buildscripts/burn_in_tests.py
@@ -16,7 +16,7 @@ import re
 import requests
 import shlex
 import sys
-import urlparse
+import urllib.parse
 import yaml
 
 
@@ -114,7 +114,7 @@ def find_last_activated_task(revisions, variant, branch_name):
     evg_cfg = read_evg_config()
     if evg_cfg is not None and "api_server_host" in evg_cfg:
         api_server = "{url.scheme}://{url.netloc}".format(
-            url=urlparse.urlparse(evg_cfg["api_server_host"]))
+            url=urllib.parse.urlparse(evg_cfg["api_server_host"]))
     else:
         api_server = API_SERVER_DEFAULT
 
@@ -161,7 +161,7 @@ def find_changed_tests(branch_name, base_commit, max_revisions, buildvariant, ch
             # commit among 'revs_to_check' that's been activated in Evergreen. We handle this by
             # only considering tests changed in the current commit.
             last_activated = "HEAD"
-        print "Comparing current branch against", last_activated
+        print("Comparing current branch against", last_activated)
         revisions = callo(["git", "rev-list", base_commit + "..." + last_activated]).splitlines()
         base_commit = last_activated
     else:
@@ -169,10 +169,10 @@ def find_changed_tests(branch_name, base_commit, max_revisions, buildvariant, ch
 
     revision_count = len(revisions)
     if revision_count > max_revisions:
-        print "There are too many revisions included (%d)." % revision_count, \
+        print("There are too many revisions included (%d)." % revision_count, \
               "This is likely because your base branch is not " + branch_name + ".", \
               "You can allow us to review more than 25 revisions by using", \
-              "the --maxRevisions option."
+              "the --maxRevisions option.")
         return changed_tests
 
     changed_files = callo(["git", "diff", "--name-only", base_commit]).splitlines()
@@ -279,7 +279,7 @@ def create_task_list(evergreen_conf, buildvariant, suites, exclude_tasks):
 
     evg_buildvariant = evergreen_conf.get_variant(buildvariant)
     if not evg_buildvariant:
-        print "Buildvariant", buildvariant, "not found in", evergreen_conf.path
+        print("Buildvariant", buildvariant, "not found in", evergreen_conf.path)
         sys.exit(1)
 
     # Find all the buildvariant task's resmoke_args.
@@ -366,9 +366,9 @@ def main():
         evergreen_conf = evergreen.EvergreenProjectConfig(values.evergreen_file)
 
         if values.buildvariant is None:
-            print "Option buildVariant must be specified to find changed tests.\n", \
+            print("Option buildVariant must be specified to find changed tests.\n", \
                   "Select from the following: \n" \
-                  "\t", "\n\t".join(sorted(evergreen_conf.variant_names))
+                  "\t", "\n\t".join(sorted(evergreen_conf.variant_names)))
             sys.exit(1)
 
         changed_tests = find_changed_tests(values.branch,
@@ -380,7 +380,7 @@ def main():
         changed_tests = filter_tests(changed_tests, exclude_tests)
         # If there are no changed tests, exit cleanly.
         if not changed_tests:
-            print "No new or modified tests found."
+            print("No new or modified tests found.")
             _write_report_file({}, values.test_list_outfile)
             sys.exit(0)
         suites = resmokelib.parser.get_suites(values, changed_tests)
@@ -403,7 +403,7 @@ def main():
             try:
                 subprocess.check_call(resmoke_cmd, shell=False)
             except subprocess.CalledProcessError as err:
-                print "Resmoke returned an error with task:", task
+                print("Resmoke returned an error with task:", task)
                 _save_report_data(test_results, values.report_file, task)
                 _write_report_file(test_results, values.report_file)
                 sys.exit(err.returncode)
diff --git a/buildscripts/ciconfig/evergreen.py b/buildscripts/ciconfig/evergreen.py
index 9a4f7a9..9cb6c3f 100644
--- a/buildscripts/ciconfig/evergreen.py
+++ b/buildscripts/ciconfig/evergreen.py
@@ -30,7 +30,7 @@ class EvergreenProjectConfig(object):
     @property
     def task_names(self):
         """The list of task names."""
-        return self._tasks_by_name.keys()
+        return list(self._tasks_by_name.keys())
 
     def get_task(self, task_name):
         """Return the task with the given name as a Task instance."""
@@ -52,7 +52,7 @@ class EvergreenProjectConfig(object):
     @property
     def variant_names(self):
         """The list of build variant names."""
-        return self._variants_by_name.keys()
+        return list(self._variants_by_name.keys())
 
     def get_variant(self, variant_name):
         """Return the variant with the given name as a Variant instance."""
diff --git a/buildscripts/ciconfig/tags.py b/buildscripts/ciconfig/tags.py
index 418d0e3..bfdb25c 100644
--- a/buildscripts/ciconfig/tags.py
+++ b/buildscripts/ciconfig/tags.py
@@ -12,7 +12,7 @@ import yaml
 
 # Setup to preserve order in yaml.dump, see https://stackoverflow.com/a/8661021
 def _represent_dict_order(self, data):
-    return self.represent_mapping("tag:yaml.org,2002:map", data.items())
+    return self.represent_mapping("tag:yaml.org,2002:map", list(data.items()))
 
 yaml.add_representer(collections.OrderedDict, _represent_dict_order)
 # End setup
@@ -55,11 +55,11 @@ class TagsConfig(object):
 
     def get_test_kinds(self):
         """List the test kinds."""
-        return self._conf.keys()
+        return list(self._conf.keys())
 
     def get_test_patterns(self, test_kind):
         """List the test patterns under 'test_kind'."""
-        return getdefault(self._conf, test_kind, {}).keys()
+        return list(getdefault(self._conf, test_kind, {}).keys())
 
     def get_tags(self, test_kind, test_pattern):
         """List the tags under 'test_kind' and 'test_pattern'."""
diff --git a/buildscripts/clang_format.py b/buildscripts/clang_format.py
index cf9884d..c0f4140 100755
--- a/buildscripts/clang_format.py
+++ b/buildscripts/clang_format.py
@@ -20,7 +20,7 @@ import sys
 import tarfile
 import tempfile
 import threading
-import urllib2
+import urllib.request, urllib.error, urllib.parse
 from distutils import spawn
 from optparse import OptionParser
 from multiprocessing import cpu_count
@@ -96,11 +96,11 @@ def get_clang_format_from_cache_and_extract(url, tarball_ext):
     num_tries = 5
     for attempt in range(num_tries):
         try:
-            resp = urllib2.urlopen(url)
+            resp = urllib.request.urlopen(url)
             with open(temp_tar_file, 'wb') as f:
               f.write(resp.read())
             break
-        except urllib2.URLError:
+        except urllib.error.URLError:
             if attempt == num_tries - 1:
                 raise
             continue
@@ -436,7 +436,7 @@ def reformat_branch(clang_format, commit_prior_to_reformat, commit_after_reforma
 
         # Check if anything needed reformatting, and if so amend the commit
         if not repo.is_working_tree_dirty():
-            print ("Commit %s needed no reformatting" % commit_hash)
+            print("Commit %s needed no reformatting" % commit_hash)
         else:
             repo.commit(["--all", "--amend", "--no-edit"])
 
diff --git a/buildscripts/cleanbb.py b/buildscripts/cleanbb.py
index b599dc8..485109c 100644
--- a/buildscripts/cleanbb.py
+++ b/buildscripts/cleanbb.py
@@ -79,7 +79,7 @@ def tryToRemove(path):
 
 def cleanup( root , nokill ):
     if nokill:
-        print "nokill requested, not killing anybody"
+        print("nokill requested, not killing anybody")
     else:
         if killprocs( root=root ) > 0:
             time.sleep(3)
diff --git a/buildscripts/cpplint.py b/buildscripts/cpplint.py
index c6aa51b..ed9cdb3 100755
--- a/buildscripts/cpplint.py
+++ b/buildscripts/cpplint.py
@@ -835,7 +835,7 @@ class _CppLintState(object):
 
   def PrintErrorCounts(self):
     """Print a summary of errors by category, and the total."""
-    for category, count in self.errors_by_category.iteritems():
+    for category, count in self.errors_by_category.items():
       sys.stderr.write('Category \'%s\' errors found: %d\n' %
                        (category, count))
     sys.stderr.write('Total errors found: %d\n' % self.error_count)
@@ -1388,7 +1388,7 @@ def FindEndOfExpressionInLine(line, startpos, stack):
     On finding an unclosed expression: (-1, None)
     Otherwise: (-1, new stack at end of this line)
   """
-  for i in xrange(startpos, len(line)):
+  for i in range(startpos, len(line)):
     char = line[i]
     if char in '([{':
       # Found start of parenthesized expression, push to expression stack
@@ -1687,7 +1687,7 @@ def CheckForCopyright(filename, lines, error):
 
   # We'll say it should occur by line 10. Don't forget there's a
   # dummy line at the front.
-  for line in xrange(1, min(len(lines), 11)):
+  for line in range(1, min(len(lines), 11)):
     if re.search(r'Copyright', lines[line], re.I): break
   else:                       # means no copyright line was found
     error(filename, 0, 'legal/copyright', 5,
@@ -1838,7 +1838,7 @@ def CheckForBadCharacters(filename, lines, error):
     error: The function to call with any errors found.
   """
   for linenum, line in enumerate(lines):
-    if u'\ufffd' in line:
+    if '\ufffd' in line:
       error(filename, linenum, 'readability/utf8', 5,
             'Line contains invalid UTF-8 (or Unicode replacement character).')
     if '\0' in line:
@@ -2884,7 +2884,7 @@ def CheckForFunctionLengths(filename, clean_lines, linenum,
 
   if starting_func:
     body_found = False
-    for start_linenum in xrange(linenum, clean_lines.NumLines()):
+    for start_linenum in range(linenum, clean_lines.NumLines()):
       start_line = lines[start_linenum]
       joined_line += ' ' + start_line.lstrip()
       if Search(r'(;|})', start_line):  # Declarations and trivial functions
@@ -3361,7 +3361,7 @@ def CheckBracesSpacing(filename, clean_lines, linenum, error):
     trailing_text = ''
     if endpos > -1:
       trailing_text = endline[endpos:]
-    for offset in xrange(endlinenum + 1,
+    for offset in range(endlinenum + 1,
                          min(endlinenum + 3, clean_lines.NumLines() - 1)):
       trailing_text += clean_lines.elided[offset]
     if not Match(r'^[\s}]*[{.;,)<>\]:]', trailing_text):
@@ -3530,7 +3530,7 @@ def IsRValueType(clean_lines, nesting_state, linenum, column):
 
     # Look for the previous 'for(' in the previous lines.
     before_text = match_symbol.group(1)
-    for i in xrange(start - 1, max(start - 6, 0), -1):
+    for i in range(start - 1, max(start - 6, 0), -1):
       before_text = clean_lines.elided[i] + before_text
     if Search(r'for\s*\([^{};]*$', before_text):
       # This is the condition inside a for-loop
@@ -3657,12 +3657,12 @@ def IsRValueAllowed(clean_lines, linenum):
     True if line is within the region where RValue references are allowed.
   """
   # Allow region marked by PUSH/POP macros
-  for i in xrange(linenum, 0, -1):
+  for i in range(linenum, 0, -1):
     line = clean_lines.elided[i]
     if Match(r'GOOGLE_ALLOW_RVALUE_REFERENCES_(?:PUSH|POP)', line):
       if not line.endswith('PUSH'):
         return False
-      for j in xrange(linenum, clean_lines.NumLines(), 1):
+      for j in range(linenum, clean_lines.NumLines(), 1):
         line = clean_lines.elided[j]
         if Match(r'GOOGLE_ALLOW_RVALUE_REFERENCES_(?:PUSH|POP)', line):
           return line.endswith('POP')
@@ -4142,7 +4142,7 @@ def CheckCheck(filename, clean_lines, linenum, error):
     expression = lines[linenum][start_pos + 1:end_pos - 1]
   else:
     expression = lines[linenum][start_pos + 1:]
-    for i in xrange(linenum + 1, end_line):
+    for i in range(linenum + 1, end_line):
       expression += lines[i]
     expression += last_line[0:end_pos - 1]
 
@@ -4270,7 +4270,7 @@ def GetLineWidth(line):
     The width of the line in column positions, accounting for Unicode
     combining characters and wide characters.
   """
-  if isinstance(line, unicode):
+  if isinstance(line, str):
     width = 0
     for uc in unicodedata.normalize('NFC', line):
       if unicodedata.east_asian_width(uc) in ('W', 'F'):
@@ -4623,7 +4623,7 @@ def _GetTextInside(text, start_pattern):
 
   # Give opening punctuations to get the matching close-punctuations.
   matching_punctuation = {'(': ')', '{': '}', '[': ']'}
-  closing_punctuation = set(matching_punctuation.itervalues())
+  closing_punctuation = set(matching_punctuation.values())
 
   # Find the position to start extracting text.
   match = re.search(start_pattern, text, re.M)
@@ -4949,7 +4949,7 @@ def IsDerivedFunction(clean_lines, linenum):
     virt-specifier.
   """
   # Scan back a few lines for start of current function
-  for i in xrange(linenum, max(-1, linenum - 10), -1):
+  for i in range(linenum, max(-1, linenum - 10), -1):
     match = Match(r'^([^()]*\w+)\(', clean_lines.elided[i])
     if match:
       # Look for "override" after the matching closing parenthesis
@@ -4970,7 +4970,7 @@ def IsInitializerList(clean_lines, linenum):
     True if current line appears to be inside constructor initializer
     list, False otherwise.
   """
-  for i in xrange(linenum, 1, -1):
+  for i in range(linenum, 1, -1):
     line = clean_lines.elided[i]
     if i == linenum:
       remove_function_body = Match(r'^(.*)\{\s*$', line)
@@ -5066,7 +5066,7 @@ def CheckForNonConstReference(filename, clean_lines, linenum,
           # Found the matching < on an earlier line, collect all
           # pieces up to current line.
           line = ''
-          for i in xrange(startline, linenum + 1):
+          for i in range(startline, linenum + 1):
             line += clean_lines.elided[i].strip()
 
   # Check for non-const references in function parameters.  A single '&' may
@@ -5090,7 +5090,7 @@ def CheckForNonConstReference(filename, clean_lines, linenum,
   # appear inside the second set of parentheses on the current line as
   # opposed to the first set.
   if linenum > 0:
-    for i in xrange(linenum - 1, max(0, linenum - 10), -1):
+    for i in range(linenum - 1, max(0, linenum - 10), -1):
       previous_line = clean_lines.elided[i]
       if not Search(r'[),]\s*$', previous_line):
         break
@@ -5121,7 +5121,7 @@ def CheckForNonConstReference(filename, clean_lines, linenum,
     # Don't see a whitelisted function on this line.  Actually we
     # didn't see any function name on this line, so this is likely a
     # multi-line parameter list.  Try a bit harder to catch this case.
-    for i in xrange(2):
+    for i in range(2):
       if (linenum > i and
           Search(whitelisted_functions, clean_lines.elided[linenum - i - 1])):
         return
@@ -5283,7 +5283,7 @@ def CheckCStyleCast(filename, clean_lines, linenum, cast_type, pattern, error):
   # Try expanding current context to see if we one level of
   # parentheses inside a macro.
   if linenum > 0:
-    for i in xrange(linenum - 1, max(0, linenum - 5), -1):
+    for i in range(linenum - 1, max(0, linenum - 5), -1):
       context = clean_lines.elided[i] + context
   if Match(r'.*\b[_A-Z][_A-Z0-9]*\s*\((?:\([^()]*\)|[^()])*$', context):
     return False
@@ -5540,7 +5540,7 @@ def CheckForIncludeWhatYouUse(filename, clean_lines, include_state, error,
   required = {}  # A map of header name to linenumber and the template entity.
                  # Example of required: { '<functional>': (1219, 'less<>') }
 
-  for linenum in xrange(clean_lines.NumLines()):
+  for linenum in range(clean_lines.NumLines()):
     line = clean_lines.elided[linenum]
     if not line or line[0] == '#':
       continue
@@ -5589,7 +5589,7 @@ def CheckForIncludeWhatYouUse(filename, clean_lines, include_state, error,
 
   # include_dict is modified during iteration, so we iterate over a copy of
   # the keys.
-  header_keys = include_dict.keys()
+  header_keys = list(include_dict.keys())
   for header in header_keys:
     (same_module, common_path) = FilesBelongToSameModule(abs_filename, header)
     fullpath = common_path + header
@@ -5684,7 +5684,7 @@ def CheckRedundantVirtual(filename, clean_lines, linenum, error):
   end_col = -1
   end_line = -1
   start_col = len(virtual.group(1))
-  for start_line in xrange(linenum, min(linenum + 3, clean_lines.NumLines())):
+  for start_line in range(linenum, min(linenum + 3, clean_lines.NumLines())):
     line = clean_lines.elided[start_line][start_col:]
     parameter_list = Match(r'^([^(]*)\(', line)
     if parameter_list:
@@ -5699,7 +5699,7 @@ def CheckRedundantVirtual(filename, clean_lines, linenum, error):
 
   # Look for "override" or "final" after the parameter list
   # (possibly on the next few lines).
-  for i in xrange(end_line, min(end_line + 3, clean_lines.NumLines())):
+  for i in range(end_line, min(end_line + 3, clean_lines.NumLines())):
     line = clean_lines.elided[i][end_col:]
     match = Search(r'\b(override|final)\b', line)
     if match:
@@ -5926,7 +5926,7 @@ def ProcessFileData(filename, file_extension, lines, error,
 
   RemoveMultiLineComments(filename, lines, error)
   clean_lines = CleansedLines(lines)
-  for line in xrange(clean_lines.NumLines()):
+  for line in range(clean_lines.NumLines()):
     ProcessLine(filename, file_extension, clean_lines, line,
                 include_state, function_state, nesting_state, error,
                 extra_check_functions)
diff --git a/buildscripts/errorcodes.py b/buildscripts/errorcodes.py
index cc46789..7351e6a 100755
--- a/buildscripts/errorcodes.py
+++ b/buildscripts/errorcodes.py
@@ -5,13 +5,16 @@
 Parses .cpp files for assertions and verifies assertion codes are distinct.
 Optionally replaces zero codes in source code with new distinct values.
 """
+from __future__ import unicode_literals
+import io
 
 import bisect
 import os
 import sys
-import utils
+from . import utils
 from collections import defaultdict, namedtuple
 from optparse import OptionParser
+from functools import reduce
 
 try:
     import regex as re
@@ -66,9 +69,9 @@ def parseSourceFiles( callback ):
 
     for sourceFile in utils.getAllSourceFiles(prefix='src/mongo/'):
         if list_files:
-            print 'scanning file: ' + sourceFile
+            print('scanning file: ' + sourceFile)
 
-        with open(sourceFile) as f:
+        with io.open(sourceFile, encoding="utf-8") as f:
             text = f.read()
 
             if not any([zz in text for zz in quick]):
@@ -159,7 +162,7 @@ def readErrorCodes():
 
     parseSourceFiles( checkDups )
 
-    if seen.has_key("0"):
+    if "0" in seen:
         code = "0"
         bad = seen[code]
         errors.append( bad )
@@ -167,7 +170,7 @@ def readErrorCodes():
         print( "ZERO_CODE:" )
         print( "  %s:%d:%d:%s" % (bad.sourceFile, line, col, bad.lines) )
 
-    for code, locations in dups.items():
+    for code, locations in list(dups.items()):
         print( "DUPLICATE IDS: %s" % code )
         for loc in locations:
             line, col = getLineAndColumnForPosition(loc)
@@ -189,19 +192,19 @@ def replaceBadCodes( errors, nextCode ):
 
     for loc in skip_errors:
         line, col = getLineAndColumnForPosition(loc)
-        print ("SKIPPING NONZERO code=%s: %s:%d:%d"
+        print("SKIPPING NONZERO code=%s: %s:%d:%d"
                 % (loc.code, loc.sourceFile, line, col))
 
     # Dedupe, sort, and reverse so we don't have to update offsets as we go.
     for assertLoc in reversed(sorted(set(zero_errors))):
         (sourceFile, byteOffset, lines, code) = assertLoc
         lineNum, _ = getLineAndColumnForPosition(assertLoc)
-        print "UPDATING_FILE: %s:%s" % (sourceFile, lineNum)
+        print("UPDATING_FILE: %s:%s" % (sourceFile, lineNum))
 
         ln = lineNum - 1
 
         with open(sourceFile, 'r+') as f:
-            print "LINE_%d_BEFORE:%s" % (lineNum, f.readlines()[ln].rstrip())
+            print("LINE_%d_BEFORE:%s" % (lineNum, f.readlines()[ln].rstrip()))
 
             f.seek(0)
             text = f.read()
@@ -212,7 +215,7 @@ def replaceBadCodes( errors, nextCode ):
             f.write(text[byteOffset+1:])
             f.seek(0)
 
-            print "LINE_%d_AFTER :%s" % (lineNum, f.readlines()[ln].rstrip())
+            print("LINE_%d_AFTER :%s" % (lineNum, f.readlines()[ln].rstrip()))
         nextCode += 1
 
 
@@ -281,7 +284,7 @@ def main():
     elif options.replace:
         replaceBadCodes(errors, next)
     else:
-        print ERROR_HELP
+        print(ERROR_HELP)
         sys.exit(1)
 
 
diff --git a/buildscripts/eslint.py b/buildscripts/eslint.py
index c1ab04f..d5c6aef 100755
--- a/buildscripts/eslint.py
+++ b/buildscripts/eslint.py
@@ -18,7 +18,7 @@ import sys
 import tarfile
 import tempfile
 import threading
-import urllib
+import urllib.request, urllib.parse, urllib.error
 from distutils import spawn
 from optparse import OptionParser
 
@@ -81,7 +81,7 @@ def get_eslint_from_cache(dest_file, platform, arch):
     # Download the file
     print("Downloading ESLint %s from %s, saving to %s" % (ESLINT_VERSION,
                                                            url, temp_tar_file))
-    urllib.urlretrieve(url, temp_tar_file)
+    urllib.request.urlretrieve(url, temp_tar_file)
 
     eslint_distfile = ESLINT_SOURCE_TAR_BASE.substitute(platform=platform, arch=arch)
     extract_eslint(temp_tar_file, eslint_distfile)
diff --git a/buildscripts/gdb/mongo_lock.py b/buildscripts/gdb/mongo_lock.py
index 98dc66d..9538c29 100644
--- a/buildscripts/gdb/mongo_lock.py
+++ b/buildscripts/gdb/mongo_lock.py
@@ -231,8 +231,8 @@ def find_mutex_holder(graph, thread_dict, show):
                                                             mutex_waiter_lwpid))
     if graph:
         graph.add_edge(Thread(mutex_waiter_id, mutex_waiter_lwpid),
-                       Lock(long(mutex_value), "Mutex"))
-        graph.add_edge(Lock(long(mutex_value), "Mutex"), Thread(mutex_holder_id, mutex_holder))
+                       Lock(int(mutex_value), "Mutex"))
+        graph.add_edge(Lock(int(mutex_value), "Mutex"), Thread(mutex_holder_id, mutex_holder))
 
 
 def find_lock_manager_holders(graph, thread_dict, show):
@@ -262,8 +262,8 @@ def find_lock_manager_holders(graph, thread_dict, show):
                 lock_head, lock_request["mode"], lock_thread_id, lock_thread_lwpid) +
                 " waited on by thread 0x{:x} (LWP {})".format(thread_dict[lwpid], lwpid))
         if graph:
-            graph.add_edge(Thread(thread_dict[lwpid], lwpid), Lock(long(lock_head), "MongoDB lock"))
-            graph.add_edge(Lock(long(lock_head), "MongoDB lock"),
+            graph.add_edge(Thread(thread_dict[lwpid], lwpid), Lock(int(lock_head), "MongoDB lock"))
+            graph.add_edge(Lock(int(lock_head), "MongoDB lock"),
                            Thread(lock_thread_id, lock_thread_lwpid))
         lock_request_ptr = lock_request["next"]
 
diff --git a/buildscripts/hang_analyzer.py b/buildscripts/hang_analyzer.py
index d554e67..0cca026 100755
--- a/buildscripts/hang_analyzer.py
+++ b/buildscripts/hang_analyzer.py
@@ -12,7 +12,7 @@ A prototype hang analyzer for Evergreen integration to help investigate test tim
 Supports Linux, MacOS X, Solaris, and Windows.
 """
 
-import StringIO
+import io
 import csv
 import glob
 import itertools
@@ -177,7 +177,7 @@ class WindowsProcessList(object):
 
         ret = callo([ps, "/FO", "CSV"], logger)
 
-        b = StringIO.StringIO(ret)
+        b = io.StringIO(ret)
         csvReader = csv.reader(b)
 
         p = [[int(row[1]), row[0]] for row in csvReader if row[1] != "PID"]
@@ -270,7 +270,7 @@ class DarwinProcessList(object):
 
         ret = callo([ps, "-axco", "pid,comm"], logger)
 
-        b = StringIO.StringIO(ret)
+        b = io.StringIO(ret)
         csvReader = csv.reader(b, delimiter=' ', quoting=csv.QUOTE_NONE, skipinitialspace=True)
 
         p = [[int(row[0]), row[1]] for row in csvReader if row[0] != "PID"]
@@ -411,7 +411,7 @@ class LinuxProcessList(object):
 
         ret = callo([ps, "-eo", "pid,args"], logger)
 
-        b = StringIO.StringIO(ret)
+        b = io.StringIO(ret)
         csvReader = csv.reader(b, delimiter=' ', quoting=csv.QUOTE_NONE, skipinitialspace=True)
 
         p = [[int(row[0]), os.path.split(row[1])[1]] for row in csvReader if row[0] != "PID"]
@@ -433,7 +433,7 @@ class SolarisProcessList(object):
 
         ret = callo([ps, "-eo", "pid,args"], logger)
 
-        b = StringIO.StringIO(ret)
+        b = io.StringIO(ret)
         csvReader = csv.reader(b, delimiter=' ', quoting=csv.QUOTE_NONE, skipinitialspace=True)
 
         p = [[int(row[0]), os.path.split(row[1])[1]] for row in csvReader if row[0] != "PID"]
@@ -545,7 +545,7 @@ def signal_process(logger, pid, signalnum):
 
         logger.info("Waiting for process to report")
         time.sleep(5)
-    except OSError, e:
+    except OSError as e:
         logger.error("Hit OS error trying to signal process: %s" % str(e))
 
     except AttributeError:
diff --git a/buildscripts/idl/idl/binder.py b/buildscripts/idl/idl/binder.py
index 354acca..9612e39 100644
--- a/buildscripts/idl/idl/binder.py
+++ b/buildscripts/idl/idl/binder.py
@@ -608,7 +608,7 @@ def _validate_enum_int(ctxt, idl_enum):
     min_value = min(int_values_set)
     max_value = max(int_values_set)
 
-    valid_int = {x for x in xrange(min_value, max_value + 1)}
+    valid_int = {x for x in range(min_value, max_value + 1)}
 
     if valid_int != int_values_set:
         ctxt.add_enum_non_continuous_range_error(idl_enum, idl_enum.name)
diff --git a/buildscripts/idl/idl/bson.py b/buildscripts/idl/idl/bson.py
index 214b67a..b84421d 100644
--- a/buildscripts/idl/idl/bson.py
+++ b/buildscripts/idl/idl/bson.py
@@ -141,7 +141,7 @@ def cpp_bson_type_name(name):
 def list_valid_types():
     # type: () -> List[unicode]
     """Return a list of supported bson types."""
-    return [a for a in _BSON_TYPE_INFORMATION.iterkeys()]
+    return [a for a in _BSON_TYPE_INFORMATION.keys()]
 
 
 def is_valid_bindata_subtype(name):
diff --git a/buildscripts/idl/idl/cpp_types.py b/buildscripts/idl/idl/cpp_types.py
index aafcf87..e989664 100644
--- a/buildscripts/idl/idl/cpp_types.py
+++ b/buildscripts/idl/idl/cpp_types.py
@@ -28,6 +28,7 @@ from . import writer
 
 _STD_ARRAY_UINT8_16 = 'std::array<std::uint8_t,16>'
 
+ABC = ABCMeta(str('ABC'), (object,), {'__slots__': ()})
 
 def is_primitive_scalar_type(cpp_type):
     # type: (unicode) -> bool
@@ -75,11 +76,9 @@ def _qualify_array_type(cpp_type):
     return "std::vector<%s>" % (cpp_type)
 
 
-class CppTypeBase(object):
+class CppTypeBase(ABC):
     """Base type for C++ Type information."""
 
-    __metaclass__ = ABCMeta
-
     def __init__(self, field):
         # type: (ast.Field) -> None
         """Construct a CppTypeBase."""
@@ -537,11 +536,9 @@ def get_cpp_type(field):
     return cpp_type_info
 
 
-class BsonCppTypeBase(object):
+class BsonCppTypeBase(ABC):
     """Base type for custom C++ support for BSON Types information."""
 
-    __metaclass__ = ABCMeta
-
     def __init__(self, field):
         # type: (ast.Field) -> None
         """Construct a BsonCppTypeBase."""
diff --git a/buildscripts/idl/idl/enum_types.py b/buildscripts/idl/idl/enum_types.py
index 3caed6f..f17c926 100644
--- a/buildscripts/idl/idl/enum_types.py
+++ b/buildscripts/idl/idl/enum_types.py
@@ -29,11 +29,11 @@ from . import common
 from . import syntax
 from . import writer
 
+ABC = ABCMeta(str('ABC'), (object,), {'__slots__': ()})
 
-class EnumTypeInfoBase(object):
-    """Base type for enumeration type information."""
 
-    __metaclass__ = ABCMeta
+class EnumTypeInfoBase(ABC):
+    """Base type for enumeration type information."""
 
     def __init__(self, idl_enum):
         # type: (Union[syntax.Enum,ast.Enum]) -> None
@@ -108,8 +108,6 @@ class EnumTypeInfoBase(object):
 class _EnumTypeInt(EnumTypeInfoBase):
     """Type information for integer enumerations."""
 
-    __metaclass__ = ABCMeta
-
     def __init__(self, idl_enum):
         # type: (Union[syntax.Enum,ast.Enum]) -> None
         super(_EnumTypeInt, self).__init__(idl_enum)
@@ -189,8 +187,6 @@ def _get_constant_enum_name(idl_enum, enum_value):
 class _EnumTypeString(EnumTypeInfoBase):
     """Type information for string enumerations."""
 
-    __metaclass__ = ABCMeta
-
     def __init__(self, idl_enum):
         # type: (Union[syntax.Enum,ast.Enum]) -> None
         super(_EnumTypeString, self).__init__(idl_enum)
diff --git a/buildscripts/idl/idl/generator.py b/buildscripts/idl/idl/generator.py
index 9f591ec..8f97abf 100644
--- a/buildscripts/idl/idl/generator.py
+++ b/buildscripts/idl/idl/generator.py
@@ -33,6 +33,7 @@ from . import enum_types
 from . import struct_types
 from . import writer
 
+ABC = ABCMeta(str('ABC'), (object,), {'__slots__': ()})
 
 def _get_field_member_name(field):
     # type: (ast.Field) -> unicode
@@ -105,11 +106,9 @@ def _get_bson_type_check(bson_element, ctxt_name, field):
         return '%s.checkAndAssertTypes(%s, %s)' % (ctxt_name, bson_element, type_list)
 
 
-class _FieldUsageCheckerBase(object):
+class _FieldUsageCheckerBase(ABC):
     """Check for duplicate fields, and required fields as needed."""
 
-    __metaclass__ = ABCMeta
-
     def __init__(self, indented_writer):
         # type: (writer.IndentedTextWriter) -> None
         """Create a field usage checker."""
@@ -1470,8 +1469,8 @@ def _generate_header(spec, file_name):
     header.generate(spec)
 
     # Generate structs
-    with io.open(file_name, mode='wb') as file_handle:
-        file_handle.write(stream.getvalue().encode())
+    with io.open(file_name, mode='w') as file_handle:
+        file_handle.write(stream.getvalue())
 
 
 def _generate_source(spec, file_name, header_file_name):
@@ -1485,8 +1484,8 @@ def _generate_source(spec, file_name, header_file_name):
     source.generate(spec, header_file_name)
 
     # Generate structs
-    with io.open(file_name, mode='wb') as file_handle:
-        file_handle.write(stream.getvalue().encode())
+    with io.open(file_name, mode='w') as file_handle:
+        file_handle.write(stream.getvalue())
 
 
 def generate_code(spec, output_base_dir, header_file_name, source_file_name):
diff --git a/buildscripts/idl/idl/parser.py b/buildscripts/idl/idl/parser.py
index fd0af9b..ac36a3a 100644
--- a/buildscripts/idl/idl/parser.py
+++ b/buildscripts/idl/idl/parser.py
@@ -30,6 +30,7 @@ from . import common
 from . import errors
 from . import syntax
 
+ABC = ABCMeta(str('ABC'), (object,), {'__slots__': ()})
 
 class _RuleDesc(object):
     """
@@ -486,11 +487,9 @@ def _parse(stream, error_file_name):
         return syntax.IDLParsedSpec(spec, None)
 
 
-class ImportResolverBase(object):
+class ImportResolverBase(ABC):
     """Base class for resolving imported files."""
 
-    __metaclass__ = ABCMeta
-
     def __init__(self):
         # type: () -> None
         """Construct a ImportResolver."""
diff --git a/buildscripts/idl/idl/struct_types.py b/buildscripts/idl/idl/struct_types.py
index 9e2a950..cd46e7c 100644
--- a/buildscripts/idl/idl/struct_types.py
+++ b/buildscripts/idl/idl/struct_types.py
@@ -23,6 +23,7 @@ from . import ast
 from . import common
 from . import writer
 
+ABC = ABCMeta(str('ABC'), (object,), {'__slots__': ()})
 
 class ArgumentInfo(object):
     """Class that encapsulates information about an argument to a method."""
@@ -125,11 +126,9 @@ class MethodInfo(object):
             "${method_name}(${args});", method_name=self.method_name, args=args)
 
 
-class StructTypeInfoBase(object):
+class StructTypeInfoBase(ABC):
     """Base class for struct and command code generation."""
 
-    __metaclass__ = ABCMeta
-
     @abstractmethod
     def get_constructor_method(self):
         # type: () -> MethodInfo
diff --git a/buildscripts/idl/idl/syntax.py b/buildscripts/idl/idl/syntax.py
index 056d2e9..ff9a395 100644
--- a/buildscripts/idl/idl/syntax.py
+++ b/buildscripts/idl/idl/syntax.py
@@ -82,7 +82,7 @@ def _item_and_type(dic):
     # type: (Dict[Any, List[Any]]) -> Iterator[Tuple[Any, Any]]
     """Return an Iterator of (key, value) pairs from a dictionary."""
     return itertools.chain.from_iterable((_zip_scalar(value, key)
-                                          for (key, value) in dic.viewitems()))
+                                          for (key, value) in dic.items()))
 
 
 class SymbolTable(object):
diff --git a/buildscripts/idl/tests/test_binder.py b/buildscripts/idl/tests/test_binder.py
index 5502b69..b0f4ba4 100644
--- a/buildscripts/idl/tests/test_binder.py
+++ b/buildscripts/idl/tests/test_binder.py
@@ -72,7 +72,7 @@ class TestBinder(testcase.IDLTestcase):
             cpp_includes:
                 - 'bar'
                 - 'foo'"""))
-        self.assertEquals(spec.globals.cpp_namespace, "something")
+        self.assertEqual(spec.globals.cpp_namespace, "something")
         self.assertListEqual(spec.globals.cpp_includes, ['bar', 'foo'])
 
     def test_type_positive(self):
diff --git a/buildscripts/lint.py b/buildscripts/lint.py
index d4061a9..b1ca5b6 100644
--- a/buildscripts/lint.py
+++ b/buildscripts/lint.py
@@ -2,8 +2,8 @@
 import sys
 import codecs
 
-import cpplint
-import utils
+from . import cpplint
+from . import utils
 
 class CheckForConfigH:
     def __init__(self):
diff --git a/buildscripts/linter/base.py b/buildscripts/linter/base.py
index ae78d52..7988876 100644
--- a/buildscripts/linter/base.py
+++ b/buildscripts/linter/base.py
@@ -5,12 +5,11 @@ from __future__ import print_function
 from abc import ABCMeta, abstractmethod
 from typing import Dict, List, Optional
 
+ABC = ABCMeta(str('ABC'), (object,), {'__slots__': ()})
 
-class LinterBase(object):
+class LinterBase(ABC):
     """Base Class for all linters."""
 
-    __metaclass__ = ABCMeta
-
     def __init__(self, cmd_name, required_version):
         # type: (str, str) -> None
         """
diff --git a/buildscripts/linter/git.py b/buildscripts/linter/git.py
index edde6d0..4680e2f 100644
--- a/buildscripts/linter/git.py
+++ b/buildscripts/linter/git.py
@@ -175,7 +175,7 @@ def get_files_to_check_from_patch(patches, filter_function):
 
     lines = []  # type: List[str]
     for patch in patches:
-        with open(patch, "rb") as infile:
+        with open(patch, "r") as infile:
             lines += infile.readlines()
 
     candidates = [check.match(line).group(1) for line in lines if check.match(line)]
diff --git a/buildscripts/linter/parallel.py b/buildscripts/linter/parallel.py
index 0648bfb..361da0c 100644
--- a/buildscripts/linter/parallel.py
+++ b/buildscripts/linter/parallel.py
@@ -2,7 +2,12 @@
 from __future__ import absolute_import
 from __future__ import print_function
 
-import Queue
+try:
+  import queue
+except ImportError:
+  #Python 2
+  import Queue as queue
+
 import threading
 import time
 from multiprocessing import cpu_count
@@ -17,7 +22,7 @@ def parallel_process(items, func):
     except NotImplementedError:
         cpus = 1
 
-    task_queue = Queue.Queue()  # type: Queue.Queue
+    task_queue = queue.Queue()  # type: queue.Queue
 
     # Use a list so that worker function will capture this variable
     pp_event = threading.Event()
@@ -30,7 +35,7 @@ def parallel_process(items, func):
         while not pp_event.is_set():
             try:
                 item = task_queue.get_nowait()
-            except Queue.Empty:
+            except queue.Empty:
                 # if the queue is empty, exit the worker thread
                 pp_event.set()
                 return
diff --git a/buildscripts/make_archive.py b/buildscripts/make_archive.py
index 2671fa5..91e2bf0 100755
--- a/buildscripts/make_archive.py
+++ b/buildscripts/make_archive.py
@@ -95,14 +95,14 @@ def make_tar_archive(opts):
         enclosing_file_directory = os.path.dirname(temp_file_location)
         if not os.path.exists(enclosing_file_directory):
             os.makedirs(enclosing_file_directory)
-        print "copying %s => %s" % (input_filename, temp_file_location)
+        print("copying %s => %s" % (input_filename, temp_file_location))
         if os.path.isdir(input_filename):
             shutil.copytree(input_filename, temp_file_location)
         else:
             shutil.copy2(input_filename, temp_file_location)
         tar_command.append(preferred_filename)
 
-    print " ".join(tar_command)
+    print(" ".join(tar_command))
     # execute the full tar command
     run_directory = os.path.join(os.getcwd(), enclosing_archive_directory)
     proc = Popen(tar_command, stdout=PIPE, stderr=STDOUT, bufsize=0, cwd=run_directory)
@@ -165,7 +165,7 @@ def parse_options(args):
         opts.transformations = [
             xform.replace(os.path.altsep or os.path.sep, os.path.sep).split('=', 1)
             for xform in opts.transformations]
-    except Exception, e:
+    except Exception as e:
         parser.error(e)
 
     return opts
diff --git a/buildscripts/make_vcxproj.py b/buildscripts/make_vcxproj.py
index e4c2d7a..693352f 100644
--- a/buildscripts/make_vcxproj.py
+++ b/buildscripts/make_vcxproj.py
@@ -241,12 +241,12 @@ class ProjFileGenerator(object):
 
 def main():
     if len(sys.argv) != 2:
-        print r"Usage: python buildscripts\make_vcxproj.py FILE_NAME"
+        print(r"Usage: python buildscripts\make_vcxproj.py FILE_NAME")
         return
 
     with ProjFileGenerator(sys.argv[1]) as projfile:
-        with open("compile_commands.json", "rb") as sjh:
-            contents = sjh.read().decode('utf-8')
+        with open("compile_commands.json", "r") as sjh:
+            contents = sjh.read()
             commands = json.loads(contents)
 
         for command in commands:
diff --git a/buildscripts/mongosymb.py b/buildscripts/mongosymb.py
index 4da0535..b0aba4d 100755
--- a/buildscripts/mongosymb.py
+++ b/buildscripts/mongosymb.py
@@ -36,7 +36,7 @@ def symbolize_frames(trace_doc, dbg_path_resolver, symbolizer_path=None, dsym_hi
         """Makes a map from binary load address to description of library from the somap, which is
         a list of dictionaries describing individual loaded libraries.
         """
-        return { so_entry["b"] : so_entry for so_entry in somap_list if so_entry.has_key("b") }
+        return { so_entry["b"] : so_entry for so_entry in somap_list if "b" in so_entry }
 
     base_addr_map = make_base_addr_map(trace_doc["processInfo"]["somap"])
 
@@ -50,7 +50,7 @@ def symbolize_frames(trace_doc, dbg_path_resolver, symbolizer_path=None, dsym_hi
             addr_base = frame["b"]
         else:
             addr_base = soinfo.get("vmaddr", "0")
-        addr = long(addr_base, 16) + long(frame["o"], 16)
+        addr = int(addr_base, 16) + int(frame["o"], 16)
         # addr currently points to the return address which is the one *after* the call. x86 is
         # variable length so going backwards is difficult. However llvm-symbolizer seems to do the
         # right thing if we just subtract 1 byte here. This has the downside of also adjusting the
diff --git a/buildscripts/msitrim.py b/buildscripts/msitrim.py
index 45ca8d4..b5781ed 100644
--- a/buildscripts/msitrim.py
+++ b/buildscripts/msitrim.py
@@ -29,7 +29,7 @@ def exec_update(query, column, value):
     view.Close()
 
 
-print "Trimming MSI"
+print("Trimming MSI")
 
 db = msilib.OpenDatabase(args.file.name, msilib.MSIDBOPEN_DIRECT)
 
diff --git a/buildscripts/packager-enterprise.py b/buildscripts/packager-enterprise.py
index 8629be1..202a676 100755
--- a/buildscripts/packager-enterprise.py
+++ b/buildscripts/packager-enterprise.py
@@ -30,7 +30,7 @@ import argparse
 import errno
 import getopt
 from glob import glob
-import packager
+from . import packager
 import os
 import re
 import shutil
@@ -39,7 +39,7 @@ import subprocess
 import sys
 import tempfile
 import time
-import urlparse
+import urllib.parse
 
 # The MongoDB names for the architectures we support.
 ARCH_CHOICES=["x86_64", "ppc64le", "s390x", "arm64"]
@@ -153,7 +153,7 @@ def main(argv):
     if prefix is None:
       prefix=tempfile.mkdtemp()
 
-    print "Working in directory %s" % prefix
+    print("Working in directory %s" % prefix)
 
     os.chdir(prefix)
     try:
@@ -210,7 +210,7 @@ def unpack_binaries_into(build_os, arch, spec, where):
             os.rename("%s/%s" % (release_dir, releasefile), releasefile)
         os.rmdir(release_dir)
     except Exception:
-        exc=sys.exc_value
+        exc=sys.exc_info()[1]
         os.chdir(rootdir)
         raise exc
     os.chdir(rootdir)
@@ -226,7 +226,7 @@ def make_package(distro, build_os, arch, spec, srcdir):
     # directory, so the debian directory is needed in all cases (and
     # innocuous in the debianoids' sdirs).
     for pkgdir in ["debian", "rpm"]:
-        print "Copying packaging files from %s to %s" % ("%s/%s" % (srcdir, pkgdir), sdir)
+        print("Copying packaging files from %s to %s" % ("%s/%s" % (srcdir, pkgdir), sdir))
         # FIXME: sh-dash-cee is bad. See if tarfile can do this.
         packager.sysassert(["sh", "-c", "(cd \"%s\" && git archive %s %s/ ) | (cd \"%s\" && tar xvf -)" % (srcdir, spec.metadata_gitspec(), pkgdir, sdir)])
     # Splat the binaries and snmp files under sdir.  The "build" stages of the
@@ -304,7 +304,7 @@ def move_repos_into_place(src, dst):
             os.mkdir(dname)
             break
         except OSError:
-            exc=sys.exc_value
+            exc=sys.exc_info()[1]
             if exc.errno == errno.EEXIST:
                 pass
             else:
@@ -324,7 +324,7 @@ def move_repos_into_place(src, dst):
             os.symlink(dname, tmpnam)
             break
         except OSError: # as exc: # Python >2.5
-            exc=sys.exc_value
+            exc=sys.exc_info()[1]
             if exc.errno == errno.EEXIST:
                 pass
             else:
@@ -342,7 +342,7 @@ def move_repos_into_place(src, dst):
                os.symlink(os.readlink(dst), oldnam)
                break
            except OSError: # as exc: # Python >2.5
-               exc=sys.exc_value
+               exc=sys.exc_info()[1]
                if exc.errno == errno.EEXIST:
                    pass
                else:
diff --git a/buildscripts/packager.py b/buildscripts/packager.py
index c821da9..19d34b3 100755
--- a/buildscripts/packager.py
+++ b/buildscripts/packager.py
@@ -343,7 +343,7 @@ def main(argv):
     prefix = args.prefix
     if prefix is None:
       prefix = tempfile.mkdtemp()
-    print "Working in directory %s" % prefix
+    print("Working in directory %s" % prefix)
 
     os.chdir(prefix)
     try:
@@ -382,14 +382,14 @@ def crossproduct(*seqs):
 
 def sysassert(argv):
     """Run argv and assert that it exited with status 0."""
-    print "In %s, running %s" % (os.getcwd(), " ".join(argv))
+    print("In %s, running %s" % (os.getcwd(), " ".join(argv)))
     sys.stdout.flush()
     sys.stderr.flush()
     assert(subprocess.Popen(argv).wait()==0)
 
 def backtick(argv):
     """Run argv and return its output string."""
-    print "In %s, running %s" % (os.getcwd(), " ".join(argv))
+    print("In %s, running %s" % (os.getcwd(), " ".join(argv)))
     sys.stdout.flush()
     sys.stderr.flush()
     return subprocess.Popen(argv, stdout=subprocess.PIPE).communicate()[0]
@@ -421,11 +421,11 @@ def unpack_binaries_into(build_os, arch, spec, where):
         sysassert(["tar", "xvzf", rootdir+"/"+tarfile(build_os, arch, spec)])
         release_dir = glob('mongodb-linux-*')[0]
         for releasefile in "bin", "GNU-AGPL-3.0", "README", "THIRD-PARTY-NOTICES", "MPL-2":
-            print "moving file: %s/%s" % (release_dir, releasefile)
+            print("moving file: %s/%s" % (release_dir, releasefile))
             os.rename("%s/%s" % (release_dir, releasefile), releasefile)
         os.rmdir(release_dir)
     except Exception:
-        exc=sys.exc_value
+        exc=sys.exc_info()[1]
         os.chdir(rootdir)
         raise exc
     os.chdir(rootdir)
@@ -441,7 +441,7 @@ def make_package(distro, build_os, arch, spec, srcdir):
     # directory, so the debian directory is needed in all cases (and
     # innocuous in the debianoids' sdirs).
     for pkgdir in ["debian", "rpm"]:
-        print "Copying packaging files from %s to %s" % ("%s/%s" % (srcdir, pkgdir), sdir)
+        print("Copying packaging files from %s to %s" % ("%s/%s" % (srcdir, pkgdir), sdir))
         # FIXME: sh-dash-cee is bad. See if tarfile can do this.
         sysassert(["sh", "-c", "(cd \"%s\" && git archive %s %s/ ) | (cd \"%s\" && tar xvf -)" % (srcdir, spec.metadata_gitspec(), pkgdir, sdir)])
     # Splat the binaries under sdir.  The "build" stages of the
@@ -574,7 +574,7 @@ def move_repos_into_place(src, dst):
             os.mkdir(dname)
             break
         except OSError:
-            exc=sys.exc_value
+            exc=sys.exc_info()[1]
             if exc.errno == errno.EEXIST:
                 pass
             else:
@@ -594,7 +594,7 @@ def move_repos_into_place(src, dst):
             os.symlink(dname, tmpnam)
             break
         except OSError: # as exc: # Python >2.5
-            exc=sys.exc_value
+            exc=sys.exc_info()[1]
             if exc.errno == errno.EEXIST:
                 pass
             else:
@@ -612,7 +612,7 @@ def move_repos_into_place(src, dst):
                os.symlink(os.readlink(dst), oldnam)
                break
            except OSError: # as exc: # Python >2.5
-               exc=sys.exc_value
+               exc=sys.exc_info()[1]
                if exc.errno == errno.EEXIST:
                    pass
                else:
@@ -765,7 +765,7 @@ def ensure_dir(filename):
     try:
         os.makedirs(dirpart)
     except OSError: # as exc: # Python >2.5
-        exc=sys.exc_value
+        exc=sys.exc_info()[1]
         if exc.errno == errno.EEXIST:
             pass
         else:
diff --git a/buildscripts/resmokeconfig/loggers/__init__.py b/buildscripts/resmokeconfig/loggers/__init__.py
index 6511d49..454f675 100644
--- a/buildscripts/resmokeconfig/loggers/__init__.py
+++ b/buildscripts/resmokeconfig/loggers/__init__.py
@@ -21,7 +21,7 @@ def _get_named_loggers():
     named_loggers = {}
 
     try:
-        (root, _dirs, files) = os.walk(dirname).next()
+        (root, _dirs, files) = next(os.walk(dirname))
         for filename in files:
             (short_name, ext) = os.path.splitext(filename)
             if ext in (".yml", ".yaml"):
diff --git a/buildscripts/resmokeconfig/suites/__init__.py b/buildscripts/resmokeconfig/suites/__init__.py
index e075dd2..2ca2187 100644
--- a/buildscripts/resmokeconfig/suites/__init__.py
+++ b/buildscripts/resmokeconfig/suites/__init__.py
@@ -21,7 +21,7 @@ def _get_named_suites():
     named_suites = {}
 
     try:
-        (root, _dirs, files) = os.walk(dirname).next()
+        (root, _dirs, files) = next(os.walk(dirname))
         for filename in files:
             (short_name, ext) = os.path.splitext(filename)
             if ext in (".yml", ".yaml"):
diff --git a/buildscripts/resmokelib/config.py b/buildscripts/resmokelib/config.py
index 1dcd7d7..8a15651 100644
--- a/buildscripts/resmokelib/config.py
+++ b/buildscripts/resmokelib/config.py
@@ -58,7 +58,7 @@ DEFAULTS = {
     "repeat": 1,
     "reportFailureStatus": "fail",
     "reportFile": None,
-    "seed": long(time.time() * 256),  # Taken from random.py code in Python 2.7.
+    "seed": int(time.time() * 256),  # Taken from random.py code in Python 2.7.
     "serviceExecutor": None,
     "shellReadMode": None,
     "shellWriteMode": None,
diff --git a/buildscripts/resmokelib/core/process.py b/buildscripts/resmokelib/core/process.py
index 03fb849..e70f90a 100644
--- a/buildscripts/resmokelib/core/process.py
+++ b/buildscripts/resmokelib/core/process.py
@@ -196,8 +196,8 @@ class Process(object):
                 finally:
                     win32api.CloseHandle(mongo_signal_handle)
 
-                print "Failed to cleanly exit the program, calling TerminateProcess() on PID: " +\
-                    str(self._process.pid)
+                print("Failed to cleanly exit the program, calling TerminateProcess() on PID: " +\
+                    str(self._process.pid))
 
             # Adapted from implementation of Popen.terminate() in subprocess.py of Python 2.7
             # because earlier versions do not catch exceptions.
diff --git a/buildscripts/resmokelib/logging/buildlogger.py b/buildscripts/resmokelib/logging/buildlogger.py
index a577d64..d5405bd 100644
--- a/buildscripts/resmokelib/logging/buildlogger.py
+++ b/buildscripts/resmokelib/logging/buildlogger.py
@@ -205,7 +205,7 @@ class BuildloggerServer(object):
     def __init__(self):
         tmp_globals = {}
         self.config = {}
-        execfile(_BUILDLOGGER_CONFIG, tmp_globals, self.config)
+        exec(compile(open(_BUILDLOGGER_CONFIG).read(), _BUILDLOGGER_CONFIG, 'exec'), tmp_globals, self.config)
 
         # Rename "slavename" to "username" if present.
         if "slavename" in self.config and "username" not in self.config:
diff --git a/buildscripts/resmokelib/selector.py b/buildscripts/resmokelib/selector.py
index 3fff181..4395c3b 100644
--- a/buildscripts/resmokelib/selector.py
+++ b/buildscripts/resmokelib/selector.py
@@ -66,7 +66,7 @@ class TestFileExplorer(object):
             A list of paths as a list(str).
         """
         tests = []
-        with open(root_file_path, "rb") as filep:
+        with open(root_file_path, "r") as filep:
             for test_path in filep:
                 test_path = test_path.strip()
                 tests.append(test_path)
@@ -294,7 +294,7 @@ def make_expression(conf):
     elif isinstance(conf, dict):
         if len(conf) != 1:
             raise ValueError("Tag matching expressions should only contain one key")
-        key = conf.keys()[0]
+        key = next(iter(conf.keys()))
         value = conf[key]
         if key == "$allOf":
             return _AllOfExpression(_make_expression_list(value))
diff --git a/buildscripts/resmokelib/testing/executor.py b/buildscripts/resmokelib/testing/executor.py
index cc66556..db8d385 100644
--- a/buildscripts/resmokelib/testing/executor.py
+++ b/buildscripts/resmokelib/testing/executor.py
@@ -64,7 +64,7 @@ class TestSuiteExecutor(object):
             jobs_to_start = num_tests
 
         # Must be done after getting buildlogger configuration.
-        self._jobs = [self._make_job(job_num) for job_num in xrange(jobs_to_start)]
+        self._jobs = [self._make_job(job_num) for job_num in range(jobs_to_start)]
 
     def run(self):
         """
@@ -290,7 +290,7 @@ class TestSuiteExecutor(object):
             queue.put(test_case)
 
         # Add sentinel value for each job to indicate when there are no more items to process.
-        for _ in xrange(len(self._jobs)):
+        for _ in range(len(self._jobs)):
             queue.put(None)
 
         return queue
diff --git a/buildscripts/resmokelib/testing/fixtures/interface.py b/buildscripts/resmokelib/testing/fixtures/interface.py
index 6dffa24..dfeeb13 100644
--- a/buildscripts/resmokelib/testing/fixtures/interface.py
+++ b/buildscripts/resmokelib/testing/fixtures/interface.py
@@ -4,6 +4,7 @@ Interface of the different fixtures for executing JSTests against.
 
 from __future__ import absolute_import
 
+import six
 import time
 
 import pymongo
@@ -27,13 +28,11 @@ def make_fixture(class_name, *args, **kwargs):
     return _FIXTURES[class_name](*args, **kwargs)
 
 
-class Fixture(object):
+class Fixture(six.with_metaclass(registry.make_registry_metaclass(_FIXTURES), object)):
     """
     Base class for all fixtures.
     """
 
-    __metaclass__ = registry.make_registry_metaclass(_FIXTURES)
-
     # We explicitly set the 'REGISTERED_NAME' attribute so that PyLint realizes that the attribute
     # is defined for all subclasses of Fixture.
     REGISTERED_NAME = "Fixture"
diff --git a/buildscripts/resmokelib/testing/fixtures/replicaset.py b/buildscripts/resmokelib/testing/fixtures/replicaset.py
index 025ce25..678ef45 100644
--- a/buildscripts/resmokelib/testing/fixtures/replicaset.py
+++ b/buildscripts/resmokelib/testing/fixtures/replicaset.py
@@ -75,7 +75,7 @@ class ReplicaSetFixture(interface.ReplFixture):
         self.replset_name = self.mongod_options.get("replSet", "rs")
 
         if not self.nodes:
-            for i in xrange(self.num_nodes):
+            for i in range(self.num_nodes):
                 node = self._new_mongod(i, self.replset_name)
                 self.nodes.append(node)
 
diff --git a/buildscripts/resmokelib/testing/fixtures/shardedcluster.py b/buildscripts/resmokelib/testing/fixtures/shardedcluster.py
index 4f90d16..eb96b8b 100644
--- a/buildscripts/resmokelib/testing/fixtures/shardedcluster.py
+++ b/buildscripts/resmokelib/testing/fixtures/shardedcluster.py
@@ -85,7 +85,7 @@ class ShardedClusterFixture(interface.Fixture):
             self.configsvr.setup()
 
         if not self.shards:
-            for i in xrange(self.num_shards):
+            for i in range(self.num_shards):
                 if self.num_rs_nodes_per_shard is None:
                     shard = self._new_standalone_shard(i)
                 elif isinstance(self.num_rs_nodes_per_shard, int):
diff --git a/buildscripts/resmokelib/testing/hooks/interface.py b/buildscripts/resmokelib/testing/hooks/interface.py
index 6ca4ae7..a225a1b 100644
--- a/buildscripts/resmokelib/testing/hooks/interface.py
+++ b/buildscripts/resmokelib/testing/hooks/interface.py
@@ -7,6 +7,7 @@ from __future__ import absolute_import
 from ... import logging
 from ...utils import registry
 
+import six
 
 _HOOKS = {}
 
@@ -22,13 +23,11 @@ def make_custom_behavior(class_name, *args, **kwargs):
     return _HOOKS[class_name](*args, **kwargs)
 
 
-class CustomBehavior(object):
+class CustomBehavior(six.with_metaclass(registry.make_registry_metaclass(_HOOKS), object)):
     """
     The common interface all CustomBehaviors will inherit from.
     """
 
-    __metaclass__ = registry.make_registry_metaclass(_HOOKS)
-
     REGISTERED_NAME = registry.LEAVE_UNREGISTERED
 
     @staticmethod
diff --git a/buildscripts/resmokelib/testing/suite.py b/buildscripts/resmokelib/testing/suite.py
index 132a2d7..07262d1 100644
--- a/buildscripts/resmokelib/testing/suite.py
+++ b/buildscripts/resmokelib/testing/suite.py
@@ -262,7 +262,7 @@ class Suite(object):
         sb.append("Executed %d times in %0.2f seconds:" % (num_iterations, total_time_taken))
 
         combined_summary = _summary.Summary(0, 0.0, 0, 0, 0, 0)
-        for iteration in xrange(num_iterations):
+        for iteration in range(num_iterations):
             # Summarize each execution as a bulleted list of results.
             bulleter_sb = []
             summary = self._summarize_report(
diff --git a/buildscripts/resmokelib/testing/summary.py b/buildscripts/resmokelib/testing/summary.py
index bb44472..54da218 100644
--- a/buildscripts/resmokelib/testing/summary.py
+++ b/buildscripts/resmokelib/testing/summary.py
@@ -17,6 +17,6 @@ def combine(summary1, summary2):
     Returns a summary representing the sum of 'summary1' and 'summary2'.
     """
     args = []
-    for i in xrange(len(Summary._fields)):
+    for i in range(len(Summary._fields)):
         args.append(summary1[i] + summary2[i])
     return Summary._make(args)
diff --git a/buildscripts/resmokelib/testing/testcases/interface.py b/buildscripts/resmokelib/testing/testcases/interface.py
index be7f14a..f736bd5 100644
--- a/buildscripts/resmokelib/testing/testcases/interface.py
+++ b/buildscripts/resmokelib/testing/testcases/interface.py
@@ -7,6 +7,7 @@ from __future__ import absolute_import
 
 import os
 import os.path
+import six
 import unittest
 
 from ... import config
@@ -27,13 +28,11 @@ def make_test_case(test_kind, *args, **kwargs):
     return _TEST_CASES[test_kind](*args, **kwargs)
 
 
-class TestCase(unittest.TestCase):
+class TestCase(six.with_metaclass(registry.make_registry_metaclass(_TEST_CASES), unittest.TestCase)):
     """
     A test case to execute.
     """
 
-    __metaclass__ = registry.make_registry_metaclass(_TEST_CASES)
-
     REGISTERED_NAME = registry.LEAVE_UNREGISTERED
 
     def __init__(self, logger, test_kind, test_name):
@@ -46,10 +45,10 @@ class TestCase(unittest.TestCase):
         if not isinstance(logger, logging.Logger):
             raise TypeError("logger must be a Logger instance")
 
-        if not isinstance(test_kind, basestring):
+        if not isinstance(test_kind, str):
             raise TypeError("test_kind must be a string")
 
-        if not isinstance(test_name, basestring):
+        if not isinstance(test_name, str):
             raise TypeError("test_name must be a string")
 
         # When the TestCase is created by the TestSuiteExecutor (through a call to make_test_case())
diff --git a/buildscripts/resmokelib/testing/testcases/jstest.py b/buildscripts/resmokelib/testing/testcases/jstest.py
index adb2828..528a974 100644
--- a/buildscripts/resmokelib/testing/testcases/jstest.py
+++ b/buildscripts/resmokelib/testing/testcases/jstest.py
@@ -235,7 +235,7 @@ class JSTestCase(interface.TestCase):
         test_cases = []
         try:
             # If there are multiple clients, make a new thread for each client.
-            for thread_id in xrange(self.num_clients):
+            for thread_id in range(self.num_clients):
                 logger = self.logger.new_test_thread_logger(self.test_kind, str(thread_id))
                 test_case = self._create_test_case_for_thread(logger, thread_id)
                 test_cases.append(test_case)
diff --git a/buildscripts/resmokelib/utils/__init__.py b/buildscripts/resmokelib/utils/__init__.py
index fa782f3..3ce73c9 100644
--- a/buildscripts/resmokelib/utils/__init__.py
+++ b/buildscripts/resmokelib/utils/__init__.py
@@ -43,14 +43,14 @@ def is_string_list(lst):
     """
     Returns true if 'lst' is a list of strings, and false otherwise.
     """
-    return isinstance(lst, list) and all(isinstance(x, basestring) for x in lst)
+    return isinstance(lst, list) and all(isinstance(x, str) for x in lst)
 
 
 def is_string_set(value):
     """
     Returns true if 'value' is a set of strings, and false otherwise.
     """
-    return isinstance(value, set) and all(isinstance(x, basestring) for x in value)
+    return isinstance(value, set) and all(isinstance(x, str) for x in value)
 
 
 def is_js_file(filename):
diff --git a/buildscripts/resmokelib/utils/globstar.py b/buildscripts/resmokelib/utils/globstar.py
index 644ebfe..52100d7 100644
--- a/buildscripts/resmokelib/utils/globstar.py
+++ b/buildscripts/resmokelib/utils/globstar.py
@@ -145,7 +145,7 @@ def _list_dir(pathname):
     """
 
     try:
-        (_root, dirs, files) = os.walk(pathname).next()
+        (_root, dirs, files) = next(os.walk(pathname))
         return (dirs, files)
     except StopIteration:
         return None  # 'pathname' directory does not exist
diff --git a/buildscripts/resmokelib/utils/jscomment.py b/buildscripts/resmokelib/utils/jscomment.py
index 18da788..a393c43 100644
--- a/buildscripts/resmokelib/utils/jscomment.py
+++ b/buildscripts/resmokelib/utils/jscomment.py
@@ -39,7 +39,7 @@ def get_tags(pathname):
                 # TODO: it might be worth supporting the block (indented) style of YAML lists in
                 #       addition to the flow (bracketed) style
                 tags = yaml.safe_load(_strip_jscomments(match.group(1)))
-                if not isinstance(tags, list) and all(isinstance(tag, basestring) for tag in tags):
+                if not isinstance(tags, list) and all(isinstance(tag, str) for tag in tags):
                     raise TypeError("Expected a list of string tags, but got '%s'" % (tags))
                 return tags
             except yaml.YAMLError as err:
diff --git a/buildscripts/resmokelib/utils/queue.py b/buildscripts/resmokelib/utils/queue.py
index 80da5e2..41d23d5 100644
--- a/buildscripts/resmokelib/utils/queue.py
+++ b/buildscripts/resmokelib/utils/queue.py
@@ -9,15 +9,20 @@ See https://bugs.python.org/issue1167930 for more details.
 
 from __future__ import absolute_import
 
-import Queue
+try:
+  import queue
+except ImportError:
+  #Python 2
+  import Queue as queue
+
 import time
 
 
 # Exception that is raised when get_nowait() is called on an empty Queue.
-Empty = Queue.Empty
+Empty = queue.Empty
 
 
-class Queue(Queue.Queue):
+class Queue(queue.Queue):
     """
     A multi-producer, multi-consumer queue.
     """
diff --git a/buildscripts/setup_multiversion_mongodb.py b/buildscripts/setup_multiversion_mongodb.py
index f3cb804..86bea8f 100755
--- a/buildscripts/setup_multiversion_mongodb.py
+++ b/buildscripts/setup_multiversion_mongodb.py
@@ -189,7 +189,7 @@ class MultiVersionDownloader(object):
 
         urls = []
         requested_version_parts = get_version_parts(version)
-        for link_version, link_url in self.links.iteritems():
+        for link_version, link_url in self.links.items():
             link_version_parts = get_version_parts(link_version)
             if link_version_parts[:len(requested_version_parts)] == requested_version_parts:
                 # The 'link_version' is a candidate for the requested 'version' if
@@ -220,11 +220,11 @@ class MultiVersionDownloader(object):
             else:
                 print("Falling back to generic architecture.")
 
-        urls.sort(key=lambda (version, _): get_version_parts(version, for_sorting=True))
+        urls.sort(key=lambda version__: get_version_parts(version__[0], for_sorting=True))
         full_version = urls[-1][0]
         url = urls[-1][1]
         extract_dir = url.split("/")[-1][:-4]
-        file_suffix = os.path.splitext(urlparse.urlparse(url).path)[1]
+        file_suffix = os.path.splitext(urllib.parse.urlparse(url).path)[1]
 
         # Only download if we don't already have the directory.
         # Note, we cannot detect if 'latest' has already been downloaded, as the name
diff --git a/buildscripts/smoke.py b/buildscripts/smoke.py
index 4fdb01d..eda604e 100755
--- a/buildscripts/smoke.py
+++ b/buildscripts/smoke.py
@@ -34,7 +34,7 @@
 #   jobs on the same host at once.  So something's gotta change.
 
 from datetime import datetime
-from itertools import izip
+
 import glob
 import logging
 from optparse import OptionParser
@@ -55,11 +55,11 @@ from pymongo import MongoClient
 from pymongo.errors import OperationFailure
 from pymongo import ReadPreference
 
-import cleanbb
-import utils
+from . import cleanbb
+from . import utils
 
 try:
-    import cPickle as pickle
+    import pickle as pickle
 except ImportError:
     import pickle
 
@@ -133,17 +133,17 @@ class NullMongod(object):
 
 
 def dump_stacks(signal, frame):
-    print "======================================"
-    print "DUMPING STACKS due to SIGUSR1 signal"
-    print "======================================"
+    print("======================================")
+    print("DUMPING STACKS due to SIGUSR1 signal")
+    print("======================================")
     threads = threading.enumerate();
 
-    print "Total Threads: " + str(len(threads))
+    print("Total Threads: " + str(len(threads)))
 
     for id, stack in sys._current_frames().items():
-        print "Thread %d" % (id)
-        print "".join(traceback.format_stack(stack))
-    print "======================================"
+        print("Thread %d" % (id))
+        print("".join(traceback.format_stack(stack)))
+    print("======================================")
 
 
 def buildlogger(cmd, is_global=False):
@@ -196,8 +196,8 @@ class mongod(NullMongod):
         try:
             self.check_mongo_port(int(port))
             return True
-        except Exception,e:
-            print >> sys.stderr, e
+        except Exception as e:
+            print(e, file=sys.stderr)
             return False
         
     def did_mongod_start(self, port=mongod_port, timeout=300):
@@ -207,14 +207,14 @@ class mongod(NullMongod):
             if is_up:
                 return True
             timeout = timeout - 1
-        print >> sys.stderr, "timeout starting mongod"
+        print("timeout starting mongod", file=sys.stderr)
         return False
 
     def start(self):
         global mongod_port
         global mongod
         if self.proc:
-            print >> sys.stderr, "probable bug: self.proc already set in start()"
+            print("probable bug: self.proc already set in start()", file=sys.stderr)
             return
         self.ensure_test_dirs()
         dir_name = smoke_db_prefix + "/data/db/sconsTests/"
@@ -270,7 +270,7 @@ class mongod(NullMongod):
                      '--sslAllowConnectionsWithoutCertificates']
         if self.kwargs.get('rlp_path'):
             argv += ['--basisTechRootDirectory', self.kwargs.get('rlp_path')]
-        print "running " + " ".join(argv)
+        print("running " + " ".join(argv))
         self.proc = self._start(buildlogger(argv, is_global=True))
 
         # If the mongod process is spawned under buildlogger.py, then the first line of output
@@ -352,7 +352,7 @@ class mongod(NullMongod):
 
     def stop(self):
         if not self.proc:
-            print >> sys.stderr, "probable bug: self.proc unset in stop()"
+            print("probable bug: self.proc unset in stop()", file=sys.stderr)
             return
         try:
             if os.sys.platform == "win32" and self.job_object is not None:
@@ -407,9 +407,9 @@ class mongod(NullMongod):
                 self.proc.terminate()
             else:
                 os.kill(self.proc.pid, 15)
-        except Exception, e:
-            print >> sys.stderr, "error shutting down mongod"
-            print >> sys.stderr, e
+        except Exception as e:
+            print("error shutting down mongod", file=sys.stderr)
+            print(e, file=sys.stderr)
         self.proc.wait()
 
         if self._stdout_pipe is not None:
@@ -424,12 +424,12 @@ class mongod(NullMongod):
         # anyway.
         retcode = self.proc.returncode
         if os.sys.platform != "win32" and retcode != 0:
-            raise(Exception('mongod process exited with non-zero code %d' % retcode))
+            raise Exception
 
     def wait_for_repl(self):
-        print "Awaiting replicated (w:2, wtimeout:5min) insert (port:" + str(self.port) + ")"
+        print("Awaiting replicated (w:2, wtimeout:5min) insert (port:" + str(self.port) + ")")
         MongoClient(port=self.port).testing.smokeWait.insert({}, w=2, wtimeout=5*60*1000)
-        print "Replicated write completed -- done wait_for_repl"
+        print("Replicated write completed -- done wait_for_repl")
 
 class Bug(Exception):
     def __str__(self):
@@ -457,7 +457,7 @@ class TestServerFailure(TestFailure):
 def check_db_hashes(master, slave):
     # Need to pause a bit so a slave might catch up...
     if not slave.slave:
-        raise(Bug("slave instance doesn't have slave attribute set"))
+        raise Bug
 
     master.wait_for_repl()
 
@@ -469,7 +469,7 @@ def check_db_hashes(master, slave):
 
     global lost_in_slave, lost_in_master, screwy_in_slave, replicated_collections
 
-    replicated_collections += master.dict.keys()
+    replicated_collections += list(master.dict.keys())
 
     for coll in replicated_collections:
         if coll not in slave.dict and coll not in lost_in_slave:
@@ -489,13 +489,13 @@ def check_db_hashes(master, slave):
                 sDocs = list(sTestDB[coll].find().sort("_id", 1))
                 mDiffDocs = list()
                 sDiffDocs = list()
-                for left, right in izip(mDocs, sDocs):
+                for left, right in zip(mDocs, sDocs):
                     if left != right:
                         mDiffDocs.append(left)
                         sDiffDocs.append(right)
 
                 stats["docs"] = {'master': mDiffDocs, 'slave': sDiffDocs }
-            except Exception, e:
+            except Exception as e:
                 stats["error-docs"] = e;
 
             screwy_in_slave[coll] = stats
@@ -506,7 +506,7 @@ def check_db_hashes(master, slave):
                 mOplog = mTestDB.connection.local[oplog];
                 oplog_entries = list(mOplog.find({"$or": [{"ns":mTestDB[coll].full_name}, \
                                                           {"op":"c"}]}).sort("$natural", 1))
-                print "oplog for %s" % mTestDB[coll].full_name
+                print("oplog for %s" % mTestDB[coll].full_name)
                 for doc in oplog_entries:
                     pprint.pprint(doc, width=200)
 
@@ -745,7 +745,7 @@ def runTest(test, result):
 
     is_mongod_still_up = test_mongod.is_mongod_up(mongod_port)
     if start_mongod and not is_mongod_still_up:
-        print "mongod is not running after test"
+        print("mongod is not running after test")
         result["mongod_running_at_end"] = is_mongod_still_up;
         raise TestServerFailure(path)
 
@@ -754,7 +754,7 @@ def runTest(test, result):
     if r != 0:
         raise TestExitFailure(path, r)
 
-    print ""
+    print("")
 
 def run_tests(tests):
     # FIXME: some suites of tests start their own mongod, so don't
@@ -825,7 +825,7 @@ def run_tests(tests):
                 result = primary.admin.command("ismaster");
                 ismaster = result["ismaster"]
                 if not ismaster:
-                    print "waiting for primary to be available ..."
+                    print("waiting for primary to be available ...")
                     time.sleep(.2)
             
             secondaryUp = False
@@ -835,7 +835,7 @@ def run_tests(tests):
                 result = sConn.admin.command("ismaster");
                 secondaryUp = result["secondary"]
                 if not secondaryUp:
-                    print "waiting for secondary to be available ..."
+                    print("waiting for secondary to be available ...")
                     time.sleep(.2)
 
         if small_oplog or small_oplog_rs:
@@ -857,7 +857,7 @@ def run_tests(tests):
                 if skipTest(test_path):
                     test_result["status"] = "skip"
 
-                    print "skipping " + test_path
+                    print("skipping " + test_path)
                 else:
                     fails.append(test)
                     runTest(test, test_result)
@@ -897,20 +897,20 @@ def run_tests(tests):
                                         use_ssl=use_ssl)
                         master.start()
 
-            except TestFailure, f:
+            except TestFailure as f:
                 test_result["end"] = time.time()
                 test_result["elapsed"] = test_result["end"] - test_result["start"]
                 test_result["error"] = str(f)
                 test_result["status"] = "fail"
                 test_report["results"].append( test_result )
                 try:
-                    print f
+                    print(f)
                     # Record the failing test and re-raise.
                     losers[f.path] = f.status
                     raise f
-                except TestServerFailure, f:
+                except TestServerFailure as f:
                     return 2
-                except TestFailure, f:
+                except TestFailure as f:
                     if not continue_on_failure:
                         return 1
         if isinstance(slave, mongod):
@@ -925,51 +925,51 @@ def run_tests(tests):
 def check_and_report_replication_dbhashes():
     def missing(lst, src, dst):
         if lst:
-            print """The following collections were present in the %s but not the %s
-at the end of testing:""" % (src, dst)
+            print("""The following collections were present in the %s but not the %s
+at the end of testing:""" % (src, dst))
             for db in lst:
-                print db
+                print(db)
 
     missing(lost_in_slave, "master", "slave")
     missing(lost_in_master, "slave", "master")
     if screwy_in_slave:
-        print """The following collections have different hashes in the master and slave:"""
+        print("""The following collections have different hashes in the master and slave:""")
         for coll in screwy_in_slave.keys():
             stats = screwy_in_slave[coll]
             # Counts are "approx" because they are collected after the dbhash runs and may not
             # reflect the states of the collections that were hashed. If the hashes differ, one
             # possibility is that a test exited with writes still in-flight.
-            print "collection: %s\t (master/slave) hashes: %s/%s counts (approx): %i/%i" % (coll, stats['hashes']['master'], stats['hashes']['slave'], stats['counts']['master'], stats['counts']['slave'])
+            print("collection: %s\t (master/slave) hashes: %s/%s counts (approx): %i/%i" % (coll, stats['hashes']['master'], stats['hashes']['slave'], stats['counts']['master'], stats['counts']['slave']))
             if "docs" in stats:
                 if (("master" in stats["docs"] and len(stats["docs"]["master"]) == 0) and
                     ("slave" in stats["docs"] and len(stats["docs"]["slave"]) == 0)):
-                    print "All docs matched!"
+                    print("All docs matched!")
                 else:
-                    print "Different Docs"
-                    print "Master docs:"
+                    print("Different Docs")
+                    print("Master docs:")
                     pprint.pprint(stats["docs"]["master"], indent=2)
-                    print "Slave docs:"
+                    print("Slave docs:")
                     pprint.pprint(stats["docs"]["slave"], indent=2)
             if "error-docs" in stats:
-                print "Error getting docs to diff:"
+                print("Error getting docs to diff:")
                 pprint.pprint(stats["error-docs"])
         return True
 
     if (small_oplog or small_oplog_rs) and not (lost_in_master or lost_in_slave or screwy_in_slave):
-        print "replication ok for %d collections" % (len(replicated_collections))
+        print("replication ok for %d collections" % (len(replicated_collections)))
 
     return False
 
 
 def report():
-    print "%d tests succeeded" % len(winners)
+    print("%d tests succeeded" % len(winners))
     num_missed = len(tests) - (len(winners) + len(losers.keys()))
     if num_missed:
-        print "%d tests didn't get run" % num_missed
+        print("%d tests didn't get run" % num_missed)
     if losers:
-        print "The following tests failed (with exit code):"
+        print("The following tests failed (with exit code):")
         for loser in losers:
-            print "%s\t%d" % (loser, losers[loser])
+            print("%s\t%d" % (loser, losers[loser]))
 
     test_result = { "start": time.time() }
     if check_and_report_replication_dbhashes():
@@ -981,7 +981,7 @@ def report():
         test_report["results"].append( test_result )
 
     if report_file:
-        f = open( report_file, "wb" )
+        f = open( report_file, "w" )
         f.write( json.dumps( test_report ) )
         f.close()
 
@@ -1244,7 +1244,7 @@ def run_old_fails():
         return # This counts as passing so we will run all tests
 
     if ('version' not in state or state['version'] != file_version()):
-        print "warning: old version of failfile.smoke detected. skipping recent fails"
+        print("warning: old version of failfile.smoke detected. skipping recent fails")
         clear_failfile()
         return
 
@@ -1308,7 +1308,7 @@ def main():
     try:
         signal.signal(signal.SIGUSR1, dump_stacks)
     except AttributeError:
-        print "Cannot catch signals on Windows"
+        print("Cannot catch signals on Windows")
 
     parser = OptionParser(usage="usage: smoke.py [OPTIONS] ARGS*")
     parser.add_option('--mode', dest='mode', default='suite',
@@ -1450,19 +1450,19 @@ def main():
 
     if options.ignore_files != None :
         ignore_patt = re.compile( options.ignore_files )
-        print "Ignoring files with pattern: ", ignore_patt
+        print("Ignoring files with pattern: ", ignore_patt)
 
         def ignore_test( test ):
             if ignore_patt.search( test[0] ) != None:
-                print "Ignoring test ", test[0]
+                print("Ignoring test ", test[0])
                 return False
             else:
                 return True
 
-        tests = filter( ignore_test, tests )
+        tests = list(filter( ignore_test, tests ))
 
     if not tests:
-        print "warning: no tests specified"
+        print("warning: no tests specified")
         return
 
     if options.with_cleanbb:
@@ -1480,7 +1480,7 @@ def main():
         test_report["failures"] = len(losers.keys())
         test_report["mongod_running_at_end"] = mongod().is_mongod_up(mongod_port)
         if report_file:
-            f = open( report_file, "wb" )
+            f = open( report_file, "w" )
             f.write( json.dumps( test_report, indent=4, separators=(',', ': ')) )
             f.close()
 
diff --git a/buildscripts/test_failures.py b/buildscripts/test_failures.py
index 63f138a..a4db503 100755
--- a/buildscripts/test_failures.py
+++ b/buildscripts/test_failures.py
@@ -20,7 +20,7 @@ import time
 import warnings
 
 try:
-    from urlparse import urlparse
+    from urllib.parse import urlparse
 except ImportError:
     from urllib.parse import urlparse
 
@@ -31,7 +31,7 @@ import yaml
 LOGGER = logging.getLogger(__name__)
 
 if sys.version_info[0] == 2:
-    _STRING_TYPES = (basestring,)
+    _STRING_TYPES = (str,)
 else:
     _STRING_TYPES = (str,)
 
diff --git a/buildscripts/tests/resmokelib/test_selector.py b/buildscripts/tests/resmokelib/test_selector.py
index ae8b0a9..66eeb40 100644
--- a/buildscripts/tests/resmokelib/test_selector.py
+++ b/buildscripts/tests/resmokelib/test_selector.py
@@ -178,12 +178,12 @@ class TestTestList(unittest.TestCase):
 
     def test_roots_with_unmatching_glob(self):
         glob_roots = ["unknown/subdir1/*.js"]
-        with self.assertRaisesRegexp(ValueError, "Pattern does not match any files: unknown/subdir1/\*.js"):
+        with self.assertRaisesRegex(ValueError, "Pattern does not match any files: unknown/subdir1/\*.js"):
             selector._TestList(self.test_file_explorer, glob_roots)
 
     def test_roots_unknown_file(self):
         roots = ["dir/subdir1/unknown"]
-        with self.assertRaisesRegexp(ValueError, "Unrecognized test file: dir/subdir1/unknown"):
+        with self.assertRaisesRegex(ValueError, "Unrecognized test file: dir/subdir1/unknown"):
             selector._TestList(self.test_file_explorer, roots, tests_are_files=True)
 
     def test_include_files(self):
@@ -208,7 +208,7 @@ class TestTestList(unittest.TestCase):
     def test_exclude_files_no_match(self):
         roots = ["dir/subdir1/*.js", "dir/subdir2/test21.*"]
         test_list = selector._TestList(self.test_file_explorer, roots)
-        with self.assertRaisesRegexp(ValueError, "Unrecognized test file: .*$"):
+        with self.assertRaisesRegex(ValueError, "Unrecognized test file: .*$"):
             test_list.exclude_files(["dir/subdir2/test26.js"])
 
     def test_exclude_files_glob(self):
@@ -420,7 +420,7 @@ class TestFilterTests(unittest.TestCase):
 
     def test_jstest_unknown_file(self):
         config = {"roots": ["dir/subdir1/*.js", "dir/subdir1/unknown"]}
-        with self.assertRaisesRegexp(ValueError, "Unrecognized test file: dir/subdir1/unknown"):
+        with self.assertRaisesRegex(ValueError, "Unrecognized test file: dir/subdir1/unknown"):
             selector.filter_tests("js_test", config, self.test_file_explorer)
 
     def test_json_schema_exclude_files(self):
diff --git a/buildscripts/tests/test_aws_ec2.py b/buildscripts/tests/test_aws_ec2.py
index 40f4333..2de7d1f 100755
--- a/buildscripts/tests/test_aws_ec2.py
+++ b/buildscripts/tests/test_aws_ec2.py
@@ -169,7 +169,7 @@ class AwsEc2ControlStatus(AwsEc2TestCase):
 
         code, ret = self.aws_ec2.control_instance(mode="status", image_id="bad_id")
         self.assertNotEqual(0, code, ret)
-        self.assertRegexpMatches(ret, "Invalid", ret)
+        self.assertRegex(ret, "Invalid", ret)
 
 
 class AwsEc2ControlStart(AwsEc2TestCase):
diff --git a/buildscripts/update_test_lifecycle.py b/buildscripts/update_test_lifecycle.py
index 52bf204..90ab279 100755
--- a/buildscripts/update_test_lifecycle.py
+++ b/buildscripts/update_test_lifecycle.py
@@ -39,7 +39,7 @@ from buildscripts.ciconfig import tags as ci_tags
 LOGGER = logging.getLogger(__name__)
 
 if sys.version_info[0] == 2:
-    _NUMBER_TYPES = (int, long, float)
+    _NUMBER_TYPES = (int, int, float)
 else:
     _NUMBER_TYPES = (int, float)
 
diff --git a/buildscripts/utils.py b/buildscripts/utils.py
index 69a7892..93c697a 100644
--- a/buildscripts/utils.py
+++ b/buildscripts/utils.py
@@ -122,7 +122,7 @@ def getprocesslist():
     raw = ""
     try:
         raw = execsys( "/bin/ps axww" )[0]
-    except Exception,e:
+    except Exception as e:
         print( "can't get processlist: " + str( e ) )
 
     r = re.compile( "[\r\n]+" )
@@ -145,7 +145,7 @@ def choosePathExist( choices , default=None):
     return default
 
 def filterExists(paths):
-    return filter(os.path.exists, paths)
+    return list(filter(os.path.exists, paths))
 
 def ensureDir( name ):
     d = os.path.dirname( name )
@@ -176,7 +176,7 @@ def didMongodStart( port=27017 , timeout=20 ):
         try:
             checkMongoPort( port )
             return True
-        except Exception,e:
+        except Exception as e:
             print( e )
             timeout = timeout - 1
     return False
@@ -213,7 +213,7 @@ def find_python(min_version=(2, 5)):
         pass
 
     version = re.compile(r'[Pp]ython ([\d\.]+)', re.MULTILINE)
-    binaries = ('python27', 'python2.7', 'python26', 'python2.6', 'python25', 'python2.5', 'python')
+    binaries = ('python3', 'python27', 'python2.7', 'python26', 'python2.6', 'python25', 'python2.5', 'python')
     for binary in binaries:
         try:
             out, err = subprocess.Popen([binary, '-V'], stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()
@@ -251,7 +251,7 @@ def run_smoke_command(*args):
 # at the position they occurred
 def replace_with_repr(unicode_error):
     offender = unicode_error.object[unicode_error.start:unicode_error.end]
-    return (unicode(repr(offender).strip("'").strip('"')), unicode_error.end)
+    return (str(repr(offender).strip("'").strip('"')), unicode_error.end)
 
 codecs.register_error('repr', replace_with_repr)
 
diff --git a/site_scons/libdeps.py b/site_scons/libdeps.py
index 632ed29..1641c3b 100644
--- a/site_scons/libdeps.py
+++ b/site_scons/libdeps.py
@@ -122,7 +122,7 @@ def __get_libdeps(node):
                 marked.add(n.target_node)
                 tsorted.append(n.target_node)
 
-            except DependencyCycleError, e:
+            except DependencyCycleError as e:
                 if len(e.cycle_nodes) == 1 or e.cycle_nodes[0] != e.cycle_nodes[-1]:
                     e.cycle_nodes.insert(0, n.target_node)
                 raise
@@ -150,7 +150,7 @@ def __get_syslibdeps(node):
         for lib in __get_libdeps(node):
             for syslib in node.get_env().Flatten(lib.get_env().get(syslibdeps_env_var, [])):
                 if syslib:
-                    if type(syslib) in (str, unicode) and syslib.startswith(missing_syslibdep):
+                    if type(syslib) in (str, str) and syslib.startswith(missing_syslibdep):
                         print("Target '%s' depends on the availability of a "
                               "system provided library for '%s', "
                               "but no suitable library was found during configuration." %
@@ -209,7 +209,7 @@ def get_syslibdeps(source, target, env, for_signature):
         # they're believed to represent library short names, that should be prefixed with -l
         # or the compiler-specific equivalent.  I.e., 'm' becomes '-lm', but 'File("m.a") is passed
         # through whole cloth.
-        if type(d) in (str, unicode):
+        if type(d) in (str, str):
             result.append('%s%s%s' % (lib_link_prefix, d, lib_link_suffix))
         else:
             result.append(d)
diff --git a/site_scons/mongo/__init__.py b/site_scons/mongo/__init__.py
index 510bd7b..f774780 100644
--- a/site_scons/mongo/__init__.py
+++ b/site_scons/mongo/__init__.py
@@ -5,4 +5,4 @@
 def print_build_failures():
     from SCons.Script import GetBuildFailures
     for bf in GetBuildFailures():
-        print "%s failed: %s" % (bf.node, bf.errstr)
+        print("%s failed: %s" % (bf.node, bf.errstr))
diff --git a/site_scons/mongo/generators.py b/site_scons/mongo/generators.py
index c07e86a..5958e69 100644
--- a/site_scons/mongo/generators.py
+++ b/site_scons/mongo/generators.py
@@ -1,6 +1,6 @@
 # -*- mode: python; -*-
 
-import md5
+import hashlib
 
 # Default and alternative generator definitions go here.
 
@@ -44,7 +44,7 @@ def default_variant_dir_generator(target, source, env, for_signature):
 
     # Hash the named options and their values, and take the first 8 characters of the hash as
     # the variant name
-    hasher = md5.md5()
+    hasher = hashlib.md5()
     for option in variant_options:
         hasher.update(option)
         hasher.update(str(env.GetOption(option)))
diff --git a/site_scons/site_tools/dagger/__init__.py b/site_scons/site_tools/dagger/__init__.py
index f05228c..f10b402 100644
--- a/site_scons/site_tools/dagger/__init__.py
+++ b/site_scons/site_tools/dagger/__init__.py
@@ -5,7 +5,7 @@ import logging
 
 import SCons
 
-import dagger
+from . import dagger
 
 def generate(env, **kwargs):
     """The entry point for our tool. However, the builder for
diff --git a/site_scons/site_tools/dagger/dagger.py b/site_scons/site_tools/dagger/dagger.py
index 1eeefe1..03e7603 100644
--- a/site_scons/site_tools/dagger/dagger.py
+++ b/site_scons/site_tools/dagger/dagger.py
@@ -40,8 +40,8 @@ import sys
 
 import SCons
 
-import graph
-import graph_consts
+from . import graph
+from . import graph_consts
 
 
 LIB_DB = [] # Stores every SCons library nodes
@@ -269,7 +269,7 @@ def write_obj_db(target, source, env):
     for obj in OBJ_DB:
         __generate_file_rels(obj, g)
 
-    for exe in EXE_DB.keys():
+    for exe in list(EXE_DB.keys()):
         __generate_exe_rels(exe, g)
 
     # target is given as a list of target SCons nodes - this builder is only responsible for
diff --git a/site_scons/site_tools/dagger/graph.py b/site_scons/site_tools/dagger/graph.py
index 5ebe6f4..379d524 100644
--- a/site_scons/site_tools/dagger/graph.py
+++ b/site_scons/site_tools/dagger/graph.py
@@ -4,11 +4,13 @@ import abc
 import json
 import copy
 
-import graph_consts
+from . import graph_consts
 
 if sys.version_info >= (3, 0):
     basestring = str
 
+ABC = abc.ABCMeta('ABC', (object,), {'__slots__': ()})
+
 class Graph(object):
     """Graph class for storing the build dependency graph. The graph stores the
     directed edges as a nested dict of { RelationshipType: {From_Node: Set of
@@ -141,7 +143,7 @@ class Graph(object):
             node_dict["id"] = id
             node_dict["node"] = {}
 
-            for property, value in vars(node).iteritems():
+            for property, value in vars(node).items():
                 if isinstance(value, set):
                     node_dict["node"][property] = list(value)
                 else:
@@ -170,10 +172,9 @@ class Graph(object):
                 sum(len(x) for x in self._edges.values()), hash(self))
 
 
-class NodeInterface(object):
+class NodeInterface(ABC):
     """Abstract base class for all Node Objects - All nodes must have an id and name
     """
-    __metaclass__ = abc.ABCMeta
 
     @abc.abstractproperty
     def id(self):
@@ -190,7 +191,7 @@ class NodeLib(NodeInterface):
     def __init__(self, id, name, input=None):
         if isinstance(input, dict):
             should_fail = False
-            for k, v in input.iteritems():
+            for k, v in input.items():
                 try:
                     if isinstance(v, list):
                         setattr(self, k, set(v))
@@ -310,7 +311,7 @@ class NodeSymbol(NodeInterface):
         if isinstance(input, dict):
             should_fail = False
 
-            for k, v in input.iteritems():
+            for k, v in input.items():
                 try:
                     if isinstance(v, list):
                         setattr(self, k, set(v))
@@ -435,7 +436,7 @@ class NodeFile(NodeInterface):
     def __init__(self, id, name, input=None):
         if isinstance(input, dict):
             should_fail = False
-            for k, v in input.iteritems():
+            for k, v in input.items():
                 try:
                     if isinstance(v, list):
                         setattr(self, k, set(v))
@@ -551,7 +552,7 @@ class NodeExe(NodeInterface):
     def __init__(self, id, name, input=None):
         if isinstance(input, dict):
             should_fail = False
-            for k, v in input.iteritems():
+            for k, v in input.items():
                 try:
                     if isinstance(v, list):
                         setattr(self, k, set(v))
diff --git a/site_scons/site_tools/dagger/graph_consts.py b/site_scons/site_tools/dagger/graph_consts.py
index 81fe86d..a922a4f 100644
--- a/site_scons/site_tools/dagger/graph_consts.py
+++ b/site_scons/site_tools/dagger/graph_consts.py
@@ -17,8 +17,8 @@ NODE_SYM = 2
 NODE_FILE = 3
 NODE_EXE = 4
 
-RELATIONSHIP_TYPES = range(1, 9)
-NODE_TYPES = range(1, 5)
+RELATIONSHIP_TYPES = list(range(1, 9))
+NODE_TYPES = list(range(1, 5))
 
 
 """Error/query codes"""
diff --git a/site_scons/site_tools/dagger/graph_test.py b/site_scons/site_tools/dagger/graph_test.py
index bc84f58..6c0168c 100644
--- a/site_scons/site_tools/dagger/graph_test.py
+++ b/site_scons/site_tools/dagger/graph_test.py
@@ -5,8 +5,8 @@ from JSON
 
 import json
 import unittest
-import graph
-import graph_consts
+from . import graph
+from . import graph_consts
 
 
 def generate_graph():
@@ -122,15 +122,15 @@ class TestGraphMethods(unittest.TestCase, CustomAssertions):
         node = graph.NodeLib("test_node", "test_node")
         self.g._nodes = {"test_node": node}
 
-        self.assertEquals(self.g.get_node("test_node"), node)
+        self.assertEqual(self.g.get_node("test_node"), node)
 
-        self.assertEquals(self.g.get_node("missing_node"), None)
+        self.assertEqual(self.g.get_node("missing_node"), None)
 
     def test_add_node(self):
         node = graph.NodeLib("test_node", "test_node")
         self.g.add_node(node)
 
-        self.assertEquals(self.g.get_node("test_node"), node)
+        self.assertEqual(self.g.get_node("test_node"), node)
 
         self.assertRaises(ValueError, self.g.add_node, node)
 
@@ -153,16 +153,16 @@ class TestGraphMethods(unittest.TestCase, CustomAssertions):
         self.g.add_edge(graph_consts.LIB_FIL, self.from_node_lib.id,
                         self.to_node_file.id)
 
-        self.assertEquals(self.g.edges[graph_consts.LIB_LIB][
+        self.assertEqual(self.g.edges[graph_consts.LIB_LIB][
             self.from_node_lib.id], set([self.to_node_lib.id]))
 
-        self.assertEquals(self.g.edges[graph_consts.LIB_SYM][
+        self.assertEqual(self.g.edges[graph_consts.LIB_SYM][
             self.from_node_lib.id], set([self.to_node_sym.id]))
 
-        self.assertEquals(self.g.edges[graph_consts.LIB_FIL][
+        self.assertEqual(self.g.edges[graph_consts.LIB_FIL][
             self.from_node_lib.id], set([self.to_node_file.id]))
 
-        self.assertEquals(self.to_node_lib.dependent_libs,
+        self.assertEqual(self.to_node_lib.dependent_libs,
                           set([self.from_node_lib.id]))
 
     def test_add_edge_files(self):
@@ -173,14 +173,14 @@ class TestGraphMethods(unittest.TestCase, CustomAssertions):
         self.g.add_edge(graph_consts.FIL_LIB, self.from_node_file.id,
                         self.to_node_lib.id)
 
-        self.assertEquals(self.g.edges[graph_consts.FIL_FIL][
+        self.assertEqual(self.g.edges[graph_consts.FIL_FIL][
             self.from_node_file.id], set([self.to_node_file.id]))
-        self.assertEquals(self.g.edges[graph_consts.FIL_SYM][
+        self.assertEqual(self.g.edges[graph_consts.FIL_SYM][
             self.from_node_file.id], set([self.to_node_sym.id]))
-        self.assertEquals(self.g.edges[graph_consts.FIL_LIB][
+        self.assertEqual(self.g.edges[graph_consts.FIL_LIB][
             self.from_node_file.id], set([self.to_node_lib.id]))
 
-        self.assertEquals(self.to_node_file.dependent_files,
+        self.assertEqual(self.to_node_file.dependent_files,
                           set([self.from_node_file.id]))
 
     def test_export_to_json(self):
@@ -188,7 +188,7 @@ class TestGraphMethods(unittest.TestCase, CustomAssertions):
         generated_graph.export_to_json("export_test.json")
         generated = open("export_test.json", "r")
         correct = open("test_graph.json", "r")
-        self.assertEquals(json.load(generated), json.load(correct))
+        self.assertEqual(json.load(generated), json.load(correct))
         generated.close()
         correct.close()
 
@@ -205,7 +205,7 @@ class TestGraphMethods(unittest.TestCase, CustomAssertions):
             self.assertNodeEquals(
                 graph_fromJSON.get_node(id), correct_graph.get_node(id))
 
-        self.assertEquals(graph_fromJSON.edges, correct_graph.edges)
+        self.assertEqual(graph_fromJSON.edges, correct_graph.edges)
 
 
 if __name__ == '__main__':
diff --git a/site_scons/site_tools/distsrc.py b/site_scons/site_tools/distsrc.py
index 861f5d9..d2dff0b 100644
--- a/site_scons/site_tools/distsrc.py
+++ b/site_scons/site_tools/distsrc.py
@@ -20,7 +20,7 @@ import shutil
 import tarfile
 import time
 import zipfile
-import StringIO
+import io
 
 from distutils.spawn import find_executable
 
@@ -82,7 +82,7 @@ class DistSrcTarArchive(DistSrcArchive):
 
     def append_file_contents(self, filename, file_contents,
             mtime=time.time(),
-            mode=0644,
+            mode=0o644,
             uname="root",
             gname="root"):
         file_metadata = tarfile.TarInfo(name=filename)
@@ -91,7 +91,7 @@ class DistSrcTarArchive(DistSrcArchive):
         file_metadata.uname = uname
         file_metadata.gname = gname
         file_metadata.size = len(file_contents)
-        file_buf = StringIO.StringIO(file_contents)
+        file_buf = io.StringIO(file_contents)
         if self.archive_mode == 'r':
             self.archive_file.close()
             self.archive_file = tarfile.open(
@@ -119,7 +119,7 @@ class DistSrcZipArchive(DistSrcArchive):
             name=key,
             size=item_data.file_size,
             mtime=time.mktime(fixed_time),
-            mode=0775 if is_dir else 0664,
+            mode=0o775 if is_dir else 0o664,
             type=tarfile.DIRTYPE if is_dir else tarfile.REGTYPE,
             uid=0,
             gid=0,
@@ -129,7 +129,7 @@ class DistSrcZipArchive(DistSrcArchive):
 
     def append_file_contents(self, filename, file_contents,
             mtime=time.time(),
-            mode=0644,
+            mode=0o644,
             uname="root",
             gname="root"):
         self.archive_file.writestr(filename, file_contents)
@@ -139,7 +139,7 @@ class DistSrcZipArchive(DistSrcArchive):
 
 def build_error_action(msg):
     def error_stub(target=None, source=None, env=None):
-        print msg
+        print(msg)
         env.Exit(1)
     return [ error_stub ]
 
@@ -162,7 +162,7 @@ def distsrc_action_generator(source, target, env, for_signature):
 
     target_ext = str(target[0])[-3:]
     if not target_ext in [ 'zip', 'tar' ]:
-        print "Invalid file format for distsrc. Must be tar or zip file"
+        print("Invalid file format for distsrc. Must be tar or zip file")
         env.Exit(1)
 
     git_cmd = "\"%s\" archive --format %s --output %s --prefix ${MONGO_DIST_SRC_PREFIX} HEAD" % (
diff --git a/site_scons/site_tools/icecream.py b/site_scons/site_tools/icecream.py
index 9838b63..fdf0c26 100644
--- a/site_scons/site_tools/icecream.py
+++ b/site_scons/site_tools/icecream.py
@@ -99,7 +99,7 @@ def generate(env):
     suffixes = _CSuffixes + _CXXSuffixes
     for object_builder in SCons.Tool.createObjBuilders(env):
         emitterdict = object_builder.builder.emitter
-        for suffix in emitterdict.iterkeys():
+        for suffix in emitterdict.keys():
             if not suffix in suffixes:
                 continue
             base = emitterdict[suffix]
diff --git a/site_scons/site_tools/idl_tool.py b/site_scons/site_tools/idl_tool.py
index 78bedfa..628f345 100755
--- a/site_scons/site_tools/idl_tool.py
+++ b/site_scons/site_tools/idl_tool.py
@@ -47,7 +47,7 @@ def idl_scanner(node, env, path):
 
     deps_list = deps_str.splitlines()
 
-    nodes_deps_list = [ env.File(d) for d in deps_list]
+    nodes_deps_list = [ env.File(d.decode("utf-8")) for d in deps_list]
     nodes_deps_list.extend(env.Glob('#buildscripts/idl/*.py'))
     nodes_deps_list.extend(env.Glob('#buildscripts/idl/idl/*.py'))
 
diff --git a/site_scons/site_tools/jstoh.py b/site_scons/site_tools/jstoh.py
index dc90b32..567958a 100644
--- a/site_scons/site_tools/jstoh.py
+++ b/site_scons/site_tools/jstoh.py
@@ -1,3 +1,5 @@
+from __future__ import unicode_literals
+
 import os
 import sys
 
@@ -39,8 +41,8 @@ def jsToHeader(target, source):
 
     text = '\n'.join(h)
 
-    print "writing: %s" % outFile
-    with open(outFile, 'wb') as out:
+    print("writing: %s" % outFile)
+    with open(outFile, 'w') as out:
         try:
             out.write(text)
         finally:
@@ -49,7 +51,7 @@ def jsToHeader(target, source):
 
 if __name__ == "__main__":
     if len(sys.argv) < 3:
-        print "Must specify [target] [source] "
+        print("Must specify [target] [source] ")
         sys.exit(1)
 
     jsToHeader(sys.argv[1], sys.argv[2:])
diff --git a/site_scons/site_tools/mongo_integrationtest.py b/site_scons/site_tools/mongo_integrationtest.py
index ff9a5f4..fccbbeb 100644
--- a/site_scons/site_tools/mongo_integrationtest.py
+++ b/site_scons/site_tools/mongo_integrationtest.py
@@ -12,10 +12,10 @@ def register_integration_test(env, test):
     env.Alias('$INTEGRATION_TEST_ALIAS', installed_test)
 
 def integration_test_list_builder_action(env, target, source):
-    ofile = open(str(target[0]), 'wb')
+    ofile = open(str(target[0]), 'w')
     try:
         for s in _integration_tests:
-            print '\t' + str(s)
+            print('\t' + str(s))
             ofile.write('%s\n' % s)
     finally:
         ofile.close()
diff --git a/site_scons/site_tools/mongo_unittest.py b/site_scons/site_tools/mongo_unittest.py
index ec99ab2..a4185a6 100644
--- a/site_scons/site_tools/mongo_unittest.py
+++ b/site_scons/site_tools/mongo_unittest.py
@@ -11,10 +11,10 @@ def register_unit_test(env, test):
     env.Alias('$UNITTEST_ALIAS', test)
 
 def unit_test_list_builder_action(env, target, source):
-    ofile = open(str(target[0]), 'wb')
+    ofile = open(str(target[0]), 'w')
     try:
         for s in _unittests:
-            print '\t' + str(s)
+            print('\t' + str(s))
             ofile.write('%s\n' % s)
     finally:
         ofile.close()
diff --git a/site_scons/site_tools/split_dwarf.py b/site_scons/site_tools/split_dwarf.py
index 95130c9..c02d786 100644
--- a/site_scons/site_tools/split_dwarf.py
+++ b/site_scons/site_tools/split_dwarf.py
@@ -52,7 +52,7 @@ def generate(env):
 
     for object_builder in SCons.Tool.createObjBuilders(env):
         emitterdict = object_builder.builder.emitter
-        for suffix in emitterdict.iterkeys():
+        for suffix in emitterdict.keys():
             if not suffix in suffixes:
                 continue
             base = emitterdict[suffix]
diff --git a/site_scons/site_tools/thin_archive.py b/site_scons/site_tools/thin_archive.py
index 511c0ef..0d8a83b 100644
--- a/site_scons/site_tools/thin_archive.py
+++ b/site_scons/site_tools/thin_archive.py
@@ -41,7 +41,7 @@ def exists(env):
     for line in pipe.stdout:
         if isgnu:
             continue  # consume all data
-        isgnu = re.search(r'^GNU ar', line)
+        isgnu = re.search(b'^GNU ar', line)
 
     return bool(isgnu)
 
diff --git a/site_scons/site_tools/xcode.py b/site_scons/site_tools/xcode.py
index 9ec68c3..5ddebb2 100644
--- a/site_scons/site_tools/xcode.py
+++ b/site_scons/site_tools/xcode.py
@@ -9,4 +9,4 @@ def generate(env):
 
     if 'DEVELOPER_DIR' in os.environ:
         env['ENV']['DEVELOPER_DIR'] = os.environ['DEVELOPER_DIR']
-        print "NOTE: Xcode detected; propagating DEVELOPER_DIR from shell environment to subcommands"
+        print("NOTE: Xcode detected; propagating DEVELOPER_DIR from shell environment to subcommands")
diff --git a/src/mongo/SConscript b/src/mongo/SConscript
index 426e57c..cc09ab4 100644
--- a/src/mongo/SConscript
+++ b/src/mongo/SConscript
@@ -156,7 +156,7 @@ js_engine_ver = get_option("js-engine") if get_option("server-js") == "on" else
 
 # On windows, we need to escape the backslashes in the command-line
 # so that windows paths look okay.
-cmd_line = " ".join(sys.argv).encode('string-escape')
+cmd_line = " ".join(sys.argv).encode('unicode_escape')
 if env.TargetOSIs('windows'):
     cmd_line = cmd_line.replace('\\', r'\\')
 
@@ -602,7 +602,7 @@ env.Append(MODULE_BANNERS = [distsrc.File('README'),
                              distsrc.File('MPL-2')])
 
 # If no module has introduced a file named LICENSE.txt, then inject the AGPL.
-if sum(itertools.imap(lambda x: x.name == "LICENSE.txt", env['MODULE_BANNERS'])) == 0:
+if sum(map(lambda x: x.name == "LICENSE.txt", env['MODULE_BANNERS'])) == 0:
     env.Append(MODULE_BANNERS = [distsrc.File('GNU-AGPL-3.0')])
 
 # All module banners get staged to the top level of the tarfile, so we
@@ -621,7 +621,7 @@ module_banner_transforms = ["--transform %s=$SERVER_DIST_BASENAME" % d for d in
 # Allow modules to map original file name directories to subdirectories
 # within the archive (e.g. { "src/mongo/db/modules/enterprise/docs": "snmp"})
 archive_addition_transforms = []
-for full_dir, archive_dir in env["ARCHIVE_ADDITION_DIR_MAP"].items():
+for full_dir, archive_dir in list(env["ARCHIVE_ADDITION_DIR_MAP"].items()):
   archive_addition_transforms.append("--transform \"%s=$SERVER_DIST_BASENAME/%s\"" %
                                      (full_dir, archive_dir))
 
diff --git a/src/mongo/base/generate_error_codes.py b/src/mongo/base/generate_error_codes.py
index 420ee96..b704767 100644
--- a/src/mongo/base/generate_error_codes.py
+++ b/src/mongo/base/generate_error_codes.py
@@ -26,6 +26,8 @@
 #    delete this exception statement from all source files in the program,
 #    then also delete it in the license file.
 
+from __future__ import unicode_literals
+
 """Generate error_codes.{h,cpp} from error_codes.err.
 
 Format of error_codes.err:
@@ -93,7 +95,7 @@ def main(argv):
                 categories=error_classes,
                 )
 
-        with open(output, 'wb') as outfile:
+        with open(output, 'w') as outfile:
             outfile.write(text)
 
 def die(message=None):
diff --git a/src/mongo/db/auth/generate_action_types.py b/src/mongo/db/auth/generate_action_types.py
index b712b29..39252ed 100755
--- a/src/mongo/db/auth/generate_action_types.py
+++ b/src/mongo/db/auth/generate_action_types.py
@@ -227,7 +227,7 @@ def hasDuplicateActionTypes(actionTypes):
     prevActionType = sortedActionTypes[0]
     for actionType in sortedActionTypes[1:]:
         if actionType == prevActionType:
-            print 'Duplicate actionType %s\n' % actionType
+            print('Duplicate actionType %s\n' % actionType)
             didFail = True
         prevActionType = actionType
 
@@ -240,7 +240,7 @@ def parseActionTypesFromFile(actionTypesFilename):
 
 if __name__ == "__main__":
     if len(sys.argv) != 4:
-        print "Usage: generate_action_types.py <path to action_types.txt> <header file path> <source file path>"
+        print("Usage: generate_action_types.py <path to action_types.txt> <header file path> <source file path>")
         sys.exit(-1)
 
     actionTypes = parseActionTypesFromFile(sys.argv[1])
diff --git a/src/mongo/db/fts/generate_stop_words.py b/src/mongo/db/fts/generate_stop_words.py
index e0dc801..e0aad76 100644
--- a/src/mongo/db/fts/generate_stop_words.py
+++ b/src/mongo/db/fts/generate_stop_words.py
@@ -7,7 +7,7 @@ def generate( header, source, language_files ):
     for x in language_files:
         print( "\t%s" % x )
 
-    out = open( header, "wb" )
+    out = open( header, "w" )
     out.write( """
 #pragma once
 #include <set>
@@ -24,7 +24,7 @@ namespace fts {
 
 
 
-    out = open( source, "wb" )
+    out = open( source, "w" )
     out.write( '#include "%s"' % header.rpartition( "/" )[2].rpartition( "\\" )[2] )
     out.write( """
 namespace mongo {
@@ -40,7 +40,7 @@ namespace fts {
         out.write( '  // %s\n' % l_file )
         out.write( '  {\n' )
         out.write( '   const char* const words[] = {\n' )
-        for word in open( l_file, "rb" ):
+        for word in open( l_file, "r" ):
             out.write( '       "%s",\n' % word.strip() )
         out.write( '   };\n' )
         out.write( '   const size_t wordcnt = sizeof(words) / sizeof(words[0]);\n' )
diff --git a/src/mongo/db/fts/unicode/gen_diacritic_map.py b/src/mongo/db/fts/unicode/gen_diacritic_map.py
index 08cfa95..7c623af 100644
--- a/src/mongo/db/fts/unicode/gen_diacritic_map.py
+++ b/src/mongo/db/fts/unicode/gen_diacritic_map.py
@@ -45,7 +45,7 @@ def add_diacritic_mapping(codepoint):
     # c : recomposed unicode character with diacritics removed
     a = chr(codepoint)
     d = normalize('NFD', a)
-    r = u''
+    r = ''
 
     for i in range(len(d)):
         if ord(d[i]) not in diacritics:
diff --git a/src/mongo/db/query/collation/generate_icu_init_cpp.py b/src/mongo/db/query/collation/generate_icu_init_cpp.py
index 8ae084a..7c576f6 100755
--- a/src/mongo/db/query/collation/generate_icu_init_cpp.py
+++ b/src/mongo/db/query/collation/generate_icu_init_cpp.py
@@ -26,6 +26,9 @@
 #    delete this exception statement from all source files in the program,
 #    then also delete it in the license file.
 
+from __future__ import unicode_literals
+
+import array
 import optparse
 import os
 import sys
@@ -110,8 +113,8 @@ MONGO_INITIALIZER(LoadICUData)(InitializerContext* context) {
 '''
     decimal_encoded_data = ''
     with open(data_file_path, 'rb') as data_file:
-        decimal_encoded_data = ','.join([str(ord(byte)) for byte in data_file.read()])
-    with open(cpp_file_path, 'wb') as cpp_file:
+        decimal_encoded_data = ','.join([str(byte) for byte in array.array("B", data_file.read()).tolist()])
+    with open(cpp_file_path, 'w') as cpp_file:
         cpp_file.write(source_template % dict(decimal_encoded_data=decimal_encoded_data))
 
 if __name__ == '__main__':
diff --git a/src/third_party/mozjs-45/extract/js/src/builtin/embedjs.py b/src/third_party/mozjs-45/extract/js/src/builtin/embedjs.py
index ece905d..03cc961 100644
--- a/src/third_party/mozjs-45/extract/js/src/builtin/embedjs.py
+++ b/src/third_party/mozjs-45/extract/js/src/builtin/embedjs.py
@@ -36,7 +36,7 @@
 #
 # It uses the C preprocessor to process its inputs.
 
-from __future__ import with_statement
+from __future__ import with_statement, unicode_literals
 import re, sys, os, subprocess
 import shlex
 import which
@@ -107,7 +107,7 @@ def preprocess(cxx, preprocessorOption, source, args = []):
   tmpOut = 'self-hosting-preprocessed.pp';
   outputArg = shlex.split(preprocessorOption + tmpOut)
 
-  with open(tmpIn, 'wb') as input:
+  with open(tmpIn, 'w') as input:
     input.write(source)
   print(' '.join(cxx + outputArg + args + [tmpIn]))
   result = subprocess.Popen(cxx + outputArg + args + [tmpIn]).wait()