diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index a1e4994e614a40fba6fc7dcf8a54863819f93e91..3324a1a29ee9cd0d2e5f4f40ad0eef5bb0acd475 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -29,13 +29,12 @@ variables:
       - .venv/
     when: 'always'
   before_script:
-    - virtualenv .venv
+    - virtualenv -p C:\Python39\python.exe .venv
     - .\.venv\Scripts\activate.ps1
-    - pip install --upgrade autobuild -i https://pkg.alchemyviewer.org/repository/autobuild/simple --extra-index-url https://pypi.org/simple
+    - pip3 install --upgrade autobuild -i https://ci-job-token:${CI_JOB_TOKEN}@git.alchemyviewer.org/api/v4/projects/54/packages/pypi/simple --extra-index-url https://pypi.org/simple
   script:
-    - |
-        autobuild configure -c ReleaseOS -- -DUSE_FMODSTUDIO=ON -DUSE_NVAPI=ON -DUSE_LTO=ON -DDISABLE_FATAL_WARNINGS=ON -DREVISION_FROM_VCS=FALSE
-        autobuild build -c ReleaseOS --no-configure
+    - autobuild configure -c ReleaseOS -- -DUSE_FMODSTUDIO=ON -DUSE_NVAPI=ON -DUSE_LTO=ON -DDISABLE_FATAL_WARNINGS=ON -DREVISION_FROM_VCS=FALSE
+    - autobuild build -c ReleaseOS --no-configure
   artifacts:
     name: "$env:CI_COMMIT_REF_NAME-$env:CI_COMMIT_SHORT_SHA"
     expire_in: 1 week
@@ -50,6 +49,7 @@ variables:
   stage: build
   tags:
     - mac
+    - m1
   cache:
     key:
       files:
@@ -61,13 +61,12 @@ variables:
       - .venv
     when: 'always'
   before_script:
-    - virtualenv .venv -p python2
+    - virtualenv .venv -p python3
     - source .venv/bin/activate
-    - pip install --upgrade autobuild dmgbuild -i https://pkg.alchemyviewer.org/repository/autobuild/simple --extra-index-url https://pypi.org/simple
+    - pip3 install --upgrade autobuild dmgbuild -i https://ci-job-token:${CI_JOB_TOKEN}@git.alchemyviewer.org/api/v4/projects/54/packages/pypi/simple --extra-index-url https://pypi.org/simple
   script:
-    - |
-        autobuild configure -c ReleaseOS -- -DUSE_FMODSTUDIO=ON -DUSE_NVAPI=ON -DUSE_LTO=ON -DDISABLE_FATAL_WARNINGS=ON -DREVISION_FROM_VCS=FALSE
-        autobuild build -c ReleaseOS --no-configure
+    - autobuild configure -c ReleaseOS -- -DUSE_FMODSTUDIO=ON -DUSE_NVAPI=ON -DUSE_LTO=ON -DDISABLE_FATAL_WARNINGS=ON -DREVISION_FROM_VCS=FALSE
+    - autobuild build -c ReleaseOS --no-configure
   artifacts:
     name: "$env:CI_COMMIT_REF_NAME-$env:CI_COMMIT_SHORT_SHA"
     expire_in: 1 week
@@ -92,19 +91,18 @@ variables:
       - .venv
     when: 'always'
   before_script:
-    - virtualenv .venv -p python2
+    - virtualenv .venv -p python3
     - source .venv/bin/activate
-    - pip install --upgrade autobuild -i https://pkg.alchemyviewer.org/repository/autobuild/simple --extra-index-url https://pypi.org/simple
+    - pip3 install --upgrade autobuild -i https://ci-job-token:${CI_JOB_TOKEN}@git.alchemyviewer.org/api/v4/projects/54/packages/pypi/simple --extra-index-url https://pypi.org/simple
   script:
-    - |
-        autobuild configure -c ReleaseOS -- -DUSE_FMODSTUDIO=ON -DUSE_NVAPI=ON -DUSE_LTO=ON -DDISABLE_FATAL_WARNINGS=ON -DREVISION_FROM_VCS=FALSE
-        autobuild build -c ReleaseOS --no-configure
+    - autobuild configure -c ReleaseOS -- -DUSE_FMODSTUDIO=ON -DUSE_NVAPI=ON -DUSE_LTO=ON -DDISABLE_FATAL_WARNINGS=ON -DREVISION_FROM_VCS=FALSE
+    - autobuild build -c ReleaseOS --no-configure
   artifacts:
     name: "$env:CI_COMMIT_REF_NAME-$env:CI_COMMIT_SHORT_SHA"
     expire_in: 1 week
     paths:
       - build-linux-*/build_data.json
-      - build-linux-*/newview/Alchemy_*.tar.bz2
+      - build-linux-*/newview/Alchemy_*.tar.xz
 
 .win32_build:
   extends: .win_build
@@ -263,36 +261,38 @@ build:release:windows64:
 .upload_template:
   stage: upload
   tags:
-    - autobuild
-    - windows
+    - powershell
+    - m1
+    - mac
   allow_failure: false
+  variables:
+    GIT_STRATEGY: none
   script:
     - |
-        $BuildData = Get-Content .\build-vc-64\newview\Release\build_data.json | ConvertFrom-Json
-        $BuildChannelVersion = $BuildData."Channel" + ' ' + $BuildData."Version"
-        $UploadDestViewerDir = $BuildChannelVersion.ToLower().Replace(" ", "/")
-        $UploadDestURL = "https://pkg.alchemyviewer.org/repository/viewer/${UploadDestViewerDir}"
+        $BuildData = Get-Content ./build-vc-64/newview/Release/build_data.json | ConvertFrom-Json
+        $PkgName = $BuildData.Channel.Replace(" ", "_")
+        $PkgVer = $BuildData.Version
+        $UploadDestURL = "${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/packages/generic/${PkgName}/${PkgVer}"
 
         $UploadParams = @{ UseBasicParsing = $true;
          Method = "PUT";
          Headers = @{
-           ContentType = "application/x-executable";
-           Authorization = "Basic $([System.Convert]::ToBase64String([System.Text.Encoding]::ASCII.GetBytes("$env:AUTOBUILD_HTTP_USER`:$env:AUTOBUILD_HTTP_PASS")))"; };
+           "JOB-TOKEN" = "${CI_JOB_TOKEN}"; };
          Verbose = $true; };
 
-        Push-Location .\build-linux-64\newview\
-          $FileNameLnx64 = Get-ChildItem -Path . -Name -Include Alchemy_*.tar.bz2
-          Invoke-WebRequest @UploadParams -InFile .\$FileNameLnx64 -Uri "${UploadDestURL}/${FileNameLnx64}"
+        Push-Location ./build-linux-64/newview/
+          $FileNameLnx64 = Get-ChildItem -Path . -Name -Include Alchemy_*.tar.xz
+          Invoke-WebRequest @UploadParams -InFile ./$FileNameLnx64 -Uri "${UploadDestURL}/${FileNameLnx64}"
         Pop-Location
 
-        Push-Location .\build-darwin-64\newview\
+        Push-Location ./build-darwin-64/newview/
           $FileNameMac64 = Get-ChildItem -Path . -Name -Include Alchemy_*.dmg
-          Invoke-WebRequest @UploadParams -InFile .\$FileNameMac64 -Uri "${UploadDestURL}/${FileNameMac64}"
+          Invoke-WebRequest @UploadParams -InFile ./$FileNameMac64 -Uri "${UploadDestURL}/${FileNameMac64}"
         Pop-Location
 
-        Push-Location .\build-vc-64\newview\Release\
+        Push-Location ./build-vc-64/newview/Release/
           $FileNameWin64 = Get-ChildItem -Path . -Name -Include Alchemy_*_Setup.exe
-          Invoke-WebRequest @UploadParams -InFile .\$FileNameWin64 -Uri "${UploadDestURL}/${FileNameWin64}"
+          Invoke-WebRequest @UploadParams -InFile ./$FileNameWin64 -Uri "${UploadDestURL}/${FileNameWin64}"
 
           If ($env:VIEWER_USE_CRASHPAD -eq 'TRUE') 
           {
@@ -337,4 +337,4 @@ upload:release:
     - if: '$CI_COMMIT_TAG =~ /.*-release/'
       when: manual
   environment:
-    name: release
\ No newline at end of file
+    name: release
diff --git a/indra/cmake/BuildPackagesInfo.cmake b/indra/cmake/BuildPackagesInfo.cmake
index 6f5f6fd93554b6580f348b2374535d69f4d959c0..6a37f97a00f8edbc05d8231b36b100fb08bb5fc5 100644
--- a/indra/cmake/BuildPackagesInfo.cmake
+++ b/indra/cmake/BuildPackagesInfo.cmake
@@ -13,8 +13,8 @@ add_custom_command(OUTPUT packages-info.txt
   MAIN_DEPENDENCY ${CMAKE_SOURCE_DIR}/../autobuild.xml
   DEPENDS ${CMAKE_SOURCE_DIR}/../scripts/packages-formatter.py
           ${CMAKE_SOURCE_DIR}/../autobuild.xml
-  COMMAND ${Python2_EXECUTABLE}
+  COMMAND ${Python3_EXECUTABLE}
           ${CMAKE_SOURCE_DIR}/cmake/run_build_test.py -DAUTOBUILD_ADDRSIZE=${ADDRESS_SIZE} -DAUTOBUILD=${AUTOBUILD_EXECUTABLE}
-          ${Python2_EXECUTABLE}
+          ${Python3_EXECUTABLE}
           ${CMAKE_SOURCE_DIR}/../scripts/packages-formatter.py "${VIEWER_CHANNEL}" "${VIEWER_SHORT_VERSION}.${VIEWER_VERSION_REVISION}" > packages-info.txt
   )
diff --git a/indra/cmake/LLTestCommand.cmake b/indra/cmake/LLTestCommand.cmake
index 984f7bb22ad5ed3c9af537595cf1aaff39b5c0d1..d46714f4fe7e54c7a3679875e96daa7ad753fdc7 100644
--- a/indra/cmake/LLTestCommand.cmake
+++ b/indra/cmake/LLTestCommand.cmake
@@ -4,14 +4,14 @@ MACRO(LL_TEST_COMMAND OUTVAR LD_LIBRARY_PATH)
   # cannot return a value. And yet, variables you set inside a FUNCTION are
   # local. Try a MACRO instead.
   SET(value
-    ${Python2_EXECUTABLE}
+    ${Python3_EXECUTABLE}
     "${CMAKE_SOURCE_DIR}/cmake/run_build_test.py")
   FOREACH(dir ${LD_LIBRARY_PATH})
     LIST(APPEND value "-l${dir}")
   ENDFOREACH(dir)
-  # Enough different tests want to be able to find CMake's Python2_EXECUTABLE
+  # Enough different tests want to be able to find CMake's Python3_EXECUTABLE
   # that we should just pop it into the environment for everybody.
-  LIST(APPEND value "-DPYTHON=${Python2_EXECUTABLE}")
+  LIST(APPEND value "-DPYTHON=${Python3_EXECUTABLE}")
   LIST(APPEND value ${ARGN})
   SET(${OUTVAR} ${value})
 ##IF(LL_TEST_VERBOSE)
diff --git a/indra/cmake/Python.cmake b/indra/cmake/Python.cmake
index e9bf28fc83218ae17be2e82e4bf20887d6a80d17..0164b263950266d7cbc4a29e305e8d4094c36ff6 100644
--- a/indra/cmake/Python.cmake
+++ b/indra/cmake/Python.cmake
@@ -4,35 +4,46 @@ if (WINDOWS)
   # On Windows, explicitly avoid Cygwin Python.
 
   if (DEFINED ENV{VIRTUAL_ENV})
-    find_program(Python2_EXECUTABLE
-      NAMES python.exe
+    find_program(Python3_EXECUTABLE
+      NAMES python3.exe python.exe
       PATHS
       "$ENV{VIRTUAL_ENV}\\scripts"
       NO_DEFAULT_PATH
       )
   else()
-    find_program(Python2_EXECUTABLE
-      NAMES python25.exe python23.exe python.exe
+    find_program(Python3_EXECUTABLE
+      NAMES python3.exe python.exe
       NO_DEFAULT_PATH # added so that cmake does not find cygwin python
       PATHS
-      [HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\PythonCore\\2.7\\InstallPath]
-      [HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\PythonCore\\2.6\\InstallPath]
-      [HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\PythonCore\\2.5\\InstallPath]
-      [HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\PythonCore\\2.4\\InstallPath]
-      [HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\PythonCore\\2.3\\InstallPath]
-      [HKEY_CURRENT_USER\\SOFTWARE\\Python\\PythonCore\\2.7\\InstallPath]
-      [HKEY_CURRENT_USER\\SOFTWARE\\Python\\PythonCore\\2.6\\InstallPath]
-      [HKEY_CURRENT_USER\\SOFTWARE\\Python\\PythonCore\\2.5\\InstallPath]
-      [HKEY_CURRENT_USER\\SOFTWARE\\Python\\PythonCore\\2.4\\InstallPath]
-      [HKEY_CURRENT_USER\\SOFTWARE\\Python\\PythonCore\\2.3\\InstallPath]
+      [HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\PythonCore\\3.9\\InstallPath]
+      [HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\PythonCore\\3.8\\InstallPath]
+      [HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\PythonCore\\3.7\\InstallPath]
+      [HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\PythonCore\\3.6\\InstallPath]
+      [HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\PythonCore\\3.5\\InstallPath]
+      [HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\PythonCore\\3.4\\InstallPath]
+      [HKEY_CURRENT_USER\\SOFTWARE\\Python\\PythonCore\\3.9\\InstallPath]
+      [HKEY_CURRENT_USER\\SOFTWARE\\Python\\PythonCore\\3.8\\InstallPath]
+      [HKEY_CURRENT_USER\\SOFTWARE\\Python\\PythonCore\\3.7\\InstallPath]
+      [HKEY_CURRENT_USER\\SOFTWARE\\Python\\PythonCore\\3.6\\InstallPath]
+      [HKEY_CURRENT_USER\\SOFTWARE\\Python\\PythonCore\\3.5\\InstallPath]
+      [HKEY_CURRENT_USER\\SOFTWARE\\Python\\PythonCore\\3.4\\InstallPath]
       )
   endif()
 
-  if (NOT Python2_EXECUTABLE)
+  if (NOT Python3_EXECUTABLE)
     message(FATAL_ERROR "No Python interpreter found")
-  endif (NOT Python2_EXECUTABLE)
+  endif (NOT Python3_EXECUTABLE)
 
-  mark_as_advanced(Python2_EXECUTABLE)
+  mark_as_advanced(Python3_EXECUTABLE)
 else (WINDOWS)
-  find_package(Python2 REQUIRED COMPONENTS Interpreter)
+  if (DEFINED ENV{VIRTUAL_ENV})
+    find_program(Python3_EXECUTABLE
+      NAMES python3 python
+      PATHS
+      "$ENV{VIRTUAL_ENV}/bin"
+      NO_DEFAULT_PATH
+      )
+  else()
+    find_package(Python3 REQUIRED COMPONENTS Interpreter)
+  endif()
 endif (WINDOWS)
diff --git a/indra/cmake/TemplateCheck.cmake b/indra/cmake/TemplateCheck.cmake
index 4dc3e89c3115c3d9855f052b12fdb588b5db5e59..8cb4fc581d6550aaf8d493316e6b237c7bce97c6 100644
--- a/indra/cmake/TemplateCheck.cmake
+++ b/indra/cmake/TemplateCheck.cmake
@@ -6,7 +6,7 @@ macro (check_message_template _target)
   add_custom_command(
       TARGET ${_target}
       PRE_LINK
-      COMMAND ${Python2_EXECUTABLE}
+      COMMAND ${Python3_EXECUTABLE}
       ARGS ${SCRIPTS_DIR}/template_verifier.py
            --mode=development --cache_master --master_url=${TEMPLATE_VERIFIER_MASTER_URL} ${TEMPLATE_VERIFIER_OPTIONS}
       COMMENT "Verifying message template - See http://wiki.secondlife.com/wiki/Template_verifier.py"
diff --git a/indra/cmake/run_build_test.py b/indra/cmake/run_build_test.py
index af2d8a0153c79930aa152109bc5771c57f4e2554..1e92868ae76fdfbad51d94e418d784b865ced85c 100755
--- a/indra/cmake/run_build_test.py
+++ b/indra/cmake/run_build_test.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
 """\
 @file   run_build_test.py
 @author Nat Goodspeed
@@ -17,7 +17,7 @@
 
 Example:
 
-python run_build_test.py -DFOO=bar myprog somearg otherarg
+python3 run_build_test.py -DFOO=bar myprog somearg otherarg
 
 sets environment variable FOO=bar, then runs:
 myprog somearg otherarg
@@ -47,7 +47,7 @@
 import os
 import sys
 import errno
-import HTMLParser
+import html.parser
 import re
 import signal
 import subprocess
@@ -111,10 +111,10 @@ def main(command, arguments=[], libpath=[], vars={}):
     # Now handle arbitrary environment variables. The tricky part is ensuring
     # that all the keys and values we try to pass are actually strings.
     if vars:
-        for key, value in vars.items():
+        for key, value in list(vars.items()):
             # As noted a few lines above, facilitate copy-paste rerunning.
             log.info("%s='%s' \\" % (key, value))
-    os.environ.update(dict([(str(key), str(value)) for key, value in vars.iteritems()]))
+    os.environ.update(dict([(str(key), str(value)) for key, value in vars.items()]))
     # Run the child process.
     command_list = [command]
     command_list.extend(arguments)
@@ -177,7 +177,7 @@ def translate_rc(rc):
         try:
             table = get_windows_table()
             symbol, desc = table[hexrc]
-        except Exception, err:
+        except Exception as err:
             log.error("(%s -- carrying on)" % err)
             log.error("terminated with rc %s (%s)" % (rc, hexrc))
         else:
@@ -194,7 +194,7 @@ def translate_rc(rc):
             strc = str(rc)
         return "terminated by signal %s" % strc
 
-class TableParser(HTMLParser.HTMLParser):
+class TableParser(html.parser.HTMLParser):
     """
     This HTMLParser subclass is designed to parse the table we know exists
     in windows-rcs.html, hopefully without building in too much knowledge of
@@ -204,9 +204,7 @@ class TableParser(HTMLParser.HTMLParser):
     whitespace = re.compile(r'\s*$')
 
     def __init__(self):
-        # Because Python 2.x's HTMLParser is an old-style class, we must use
-        # old-style syntax to forward the __init__() call -- not super().
-        HTMLParser.HTMLParser.__init__(self)
+        super().__init__()
         # this will collect all the data, eventually
         self.table = []
         # Stack whose top (last item) indicates where to append current
diff --git a/indra/copy_win_scripts/start-client.py b/indra/copy_win_scripts/start-client.py
index 6c4c15ad39f0a416c02ad67242fe96dd0254b626..f3579a1e71c6af3ffc245a4aed717dc8b65dd983 100755
--- a/indra/copy_win_scripts/start-client.py
+++ b/indra/copy_win_scripts/start-client.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
 """\
 @file   start-client.py
 
@@ -28,12 +28,12 @@
 import llstart
 
 def usage():
-    print """start-client.py
+    print("""start-client.py
     
     --grid <grid>
     --farm <grid>
     --region <starting region name>
-    """
+    """)
 
 def start_client(grid, slurl, build_config, my_args):
     login_url = "https://login.%s.lindenlab.com/cgi-bin/login.cgi" % (grid)
@@ -42,7 +42,7 @@ def start_client(grid, slurl, build_config, my_args):
                     "--loginuri" : login_url }
     viewer_args.update(my_args)
     # *sigh*  We must put --url at the end of the argument list.
-    if viewer_args.has_key("--url"):
+    if "--url" in viewer_args:
         slurl = viewer_args["--url"]
         del(viewer_args["--url"])
     viewer_args = llstart.get_args_from_dict(viewer_args)
@@ -54,7 +54,7 @@ def start_client(grid, slurl, build_config, my_args):
     # but the exe is at indra/build-<xxx>/newview/<target>
     build_path = os.path.dirname(os.getcwd());    
     f = open("start-client.log", "w")
-    print >>f, "Viewer startup arguments:"
+    print("Viewer startup arguments:", file=f)
     llstart.start("viewer", "../../newview", 
         "%s/newview/%s/alchemy-bin.exe" % (build_path, build_config),
         viewer_args, f)
@@ -63,7 +63,7 @@ def start_client(grid, slurl, build_config, my_args):
 if __name__ == "__main__":
     grid = llstart.get_config("grid")
     
-    if grid == None:
+    if grid is None:
         grid = "aditi"
         
     build_config = llstart.get_config("build_config")
@@ -81,7 +81,7 @@ def start_client(grid, slurl, build_config, my_args):
             sys.exit(0)
             
     slurl = llstart.get_config("slurl")            
-    if slurl == None:
+    if slurl is None:
         if region is None:
             region = llstart.get_user_name()
         slurl = "//%s/128/128/" % (region)
diff --git a/indra/deps/CMakeLists.txt b/indra/deps/CMakeLists.txt
index 9f794c072f50f87093fcc7600c4be8626e708bae..d0bb1689514a029faff859ae021a69b5fcc5a796 100644
--- a/indra/deps/CMakeLists.txt
+++ b/indra/deps/CMakeLists.txt
@@ -51,8 +51,8 @@ FetchContent_Declare(
   )
 FetchContent_Declare(
   absl
-  GIT_REPOSITORY https://git.alchemyviewer.org/alchemy/mirrors/abseil-cpp.git
-  GIT_TAG        9a7e447c511dae7276ab65fde4d04f6ed52b39c9
+  GIT_REPOSITORY https://git.alchemyviewer.org/alchemy/thirdparty/abseil-cpp.git
+  GIT_TAG        a198d62533e8de0c33a6311f38a3335f7930b7c7
   )
 FetchContent_Declare(
   readerwriterqueue
diff --git a/indra/fix-incredibuild.py b/indra/fix-incredibuild.py
deleted file mode 100755
index 6b13a7b466f59cfed4b13705f525474fe653f092..0000000000000000000000000000000000000000
--- a/indra/fix-incredibuild.py
+++ /dev/null
@@ -1,61 +0,0 @@
-#!/usr/bin/env python2
-## 
-## $LicenseInfo:firstyear=2011&license=viewerlgpl$
-## Second Life Viewer Source Code
-## Copyright (C) 2011, Linden Research, Inc.
-## 
-## This library is free software; you can redistribute it and/or
-## modify it under the terms of the GNU Lesser General Public
-## License as published by the Free Software Foundation;
-## version 2.1 of the License only.
-## 
-## This library is distributed in the hope that it will be useful,
-## but WITHOUT ANY WARRANTY; without even the implied warranty of
-## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-## Lesser General Public License for more details.
-## 
-## You should have received a copy of the GNU Lesser General Public
-## License along with this library; if not, write to the Free Software
-## Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301  USA
-## 
-## Linden Research, Inc., 945 Battery Street, San Francisco, CA  94111  USA
-## $/LicenseInfo$
-
-import sys
-import os
-import glob
-
-def delete_file_types(path, filetypes):
-    if os.path.exists(path):
-        print 'Cleaning: ' + path
-        orig_dir = os.getcwd();
-        os.chdir(path)
-        filelist = []
-        for type in filetypes:
-            filelist.extend(glob.glob(type))
-        for file in filelist:
-            os.remove(file)
-        os.chdir(orig_dir)
-
-def main():
-    build_types = ['*.exp','*.exe','*.pdb','*.idb',
-                 '*.ilk','*.lib','*.obj','*.ib_pdb_index']
-    pch_types = ['*.pch']
-    delete_file_types("build-vc80/newview/Release", build_types)
-    delete_file_types("build-vc80/newview/alchemy-bin.dir/Release/", 
-                      pch_types)
-    delete_file_types("build-vc80/newview/RelWithDebInfo", build_types)
-    delete_file_types("build-vc80/newview/alchemy-bin.dir/RelWithDebInfo/", 
-                      pch_types)
-    delete_file_types("build-vc80/newview/Debug", build_types)
-    delete_file_types("build-vc80/newview/alchemy-bin.dir/Debug/", 
-                      pch_types)
-
-
-    delete_file_types("build-vc80/test/RelWithDebInfo", build_types)
-    delete_file_types("build-vc80/test/test.dir/RelWithDebInfo/", 
-                      pch_types)
-
-
-if __name__ == "__main__":
-    main()
diff --git a/indra/lib/python/indra/ipc/llmessage.py b/indra/lib/python/indra/ipc/llmessage.py
index 91fb36b72c62b4979c9ca1cde2f3f4ab00fc2833..663e2d9c63a532ec9a5c2a7d7b7ad71e7201120f 100755
--- a/indra/lib/python/indra/ipc/llmessage.py
+++ b/indra/lib/python/indra/ipc/llmessage.py
@@ -26,8 +26,8 @@
 $/LicenseInfo$
 """
 
-from compatibility import Incompatible, Older, Newer, Same
-from tokenstream import TokenStream
+from .compatibility import Incompatible, Older, Newer, Same
+from .tokenstream import TokenStream
 
 ###
 ### Message Template
@@ -42,8 +42,8 @@ def addMessage(self, m):
     
     def compatibleWithBase(self, base):
         messagenames = (
-              frozenset(self.messages.keys())
-            | frozenset(base.messages.keys())
+              frozenset(list(self.messages.keys()))
+            | frozenset(list(base.messages.keys()))
             )
             
         compatibility = Same()
@@ -142,7 +142,7 @@ def compatibleWithBase(self, base):
         baselen = len(base.blocks)
         samelen = min(selflen, baselen)
             
-        for i in xrange(0, samelen):
+        for i in range(0, samelen):
             selfblock = self.blocks[i]
             baseblock = base.blocks[i]
             
@@ -196,7 +196,7 @@ def compatibleWithBase(self, base):
         selflen = len(self.variables)
         baselen = len(base.variables)
         
-        for i in xrange(0, min(selflen, baselen)):
+        for i in range(0, min(selflen, baselen)):
             selfvar = self.variables[i]
             basevar = base.variables[i]
             
diff --git a/indra/lib/python/indra/ipc/tokenstream.py b/indra/lib/python/indra/ipc/tokenstream.py
index b96f26d3ffdc033d20677ee3e5ff98cabe8dff12..ab97e94846514c1015539696639e087ceecf46c3 100755
--- a/indra/lib/python/indra/ipc/tokenstream.py
+++ b/indra/lib/python/indra/ipc/tokenstream.py
@@ -60,7 +60,7 @@ def __str__(self):
         return "line %d: %s @ ... %s" % (
             self.line, self.reason, self._contextString())
 
-    def __nonzero__(self):
+    def __bool__(self):
         return False
 
 
diff --git a/indra/lib/python/indra/util/llmanifest.py b/indra/lib/python/indra/util/llmanifest.py
index 013ba4798ce668a0c65263d1ef73f66bf0e036a8..512689b5261a1c2fada1e1c69726db1a9dec15ab 100755
--- a/indra/lib/python/indra/util/llmanifest.py
+++ b/indra/lib/python/indra/util/llmanifest.py
@@ -1,3 +1,4 @@
+#!/usr/bin/env python3
 """\
 @file llmanifest.py
 @author Ryan Williams
@@ -28,7 +29,8 @@
 """
 
 from collections import namedtuple, defaultdict
-import commands
+from io import open
+import subprocess
 import errno
 import filecmp
 import fnmatch
@@ -70,9 +72,9 @@ def proper_windows_path(path, current_platform = sys.platform):
     path = path.strip()
     drive_letter = None
     rel = None
-    match = re.match("/cygdrive/([a-z])/(.*)", path)
+    match = re.match(r"/cygdrive/([a-z])/(.*)", path)
     if not match:
-        match = re.match('([a-zA-Z]):\\\(.*)', path)
+        match = re.match(r'([a-zA-Z]):\\\(.*)', path)
     if not match:
         return None         # not an absolute path
     drive_letter = match.group(1)
@@ -83,8 +85,7 @@ def proper_windows_path(path, current_platform = sys.platform):
         return drive_letter.upper() + ':\\' + rel.replace('/', '\\')
 
 def get_default_platform(dummy):
-    return {'linux2':'linux',
-            'linux1':'linux',
+    return {'linux':'linux',
             'cygwin':'windows',
             'win32':'windows',
             'darwin':'darwin'
@@ -162,20 +163,20 @@ def get_default_platform(dummy):
 
 def usage(arguments, srctree=""):
     nd = {'name':sys.argv[0]}
-    print """Usage:
+    print("""Usage:
     %(name)s [options] [destdir]
     Options:
-    """ % nd
+    """ % nd)
     for arg in arguments:
         default = arg['default']
         if hasattr(default, '__call__'):
             default = "(computed value) \"" + str(default(srctree)) + '"'
         elif default is not None:
             default = '"' + default + '"'
-        print "\t--%s        Default: %s\n\t%s\n" % (
+        print("\t--%s        Default: %s\n\t%s\n" % (
             arg['name'],
             default,
-            arg['description'] % nd)
+            arg['description'] % nd))
 
 def main(extra=[]):
 ##  print ' '.join((("'%s'" % item) if ' ' in item else item)
@@ -200,10 +201,10 @@ def main(extra=[]):
     for k in 'artwork build dest source'.split():
         args[k] = os.path.normpath(args[k])
 
-    print "Source tree:", args['source']
-    print "Artwork tree:", args['artwork']
-    print "Build tree:", args['build']
-    print "Destination tree:", args['dest']
+    print("Source tree:", args['source'])
+    print("Artwork tree:", args['artwork'])
+    print("Build tree:", args['build'])
+    print("Destination tree:", args['dest'])
 
     # early out for help
     if 'help' in args:
@@ -223,10 +224,10 @@ def main(extra=[]):
     # fix up version
     if isinstance(args.get('versionfile'), str):
         try: # read in the version string
-            vf = open(args['versionfile'], 'r')
-            args['version'] = vf.read().strip().split('.')
+            with open(args['versionfile'], 'r', encoding='utf-8') as vf:
+                args['version'] = vf.read().strip().split('.')
         except:
-            print "Unable to read versionfile '%s'" % args['versionfile']
+            print("Unable to read versionfile '%s'" % args['versionfile'])
             raise
 
     # unspecified, default, and agni are default
@@ -238,7 +239,7 @@ def main(extra=[]):
 
     # debugging
     for opt in args:
-        print "Option:", opt, "=", args[opt]
+        print("Option:", opt, "=", args[opt])
 
     # pass in sourceid as an argument now instead of an environment variable
     args['sourceid'] = os.environ.get("sourceid", "")
@@ -246,18 +247,18 @@ def main(extra=[]):
     # Build base package.
     touch = args.get('touch')
     if touch:
-        print '================ Creating base package'
+        print('================ Creating base package')
     else:
-        print '================ Starting base copy'
+        print('================ Starting base copy')
     wm = LLManifest.for_platform(args['platform'], args.get('arch'))(args)
     wm.do(*args['actions'])
     # Store package file for later if making touched file.
     base_package_file = ""
     if touch:
-        print '================ Created base package ', wm.package_file
+        print('================ Created base package ', wm.package_file)
         base_package_file = "" + wm.package_file
     else:
-        print '================ Finished base copy'
+        print('================ Finished base copy')
 
     # handle multiple packages if set
     # ''.split() produces empty list
@@ -284,39 +285,38 @@ def main(extra=[]):
             args['sourceid']       = os.environ.get(package_id + "_sourceid")
             args['dest'] = base_dest_template.format(package_id)
             if touch:
-                print '================ Creating additional package for "', package_id, '" in ', args['dest']
+                print('================ Creating additional package for "', package_id, '" in ', args['dest'])
             else:
-                print '================ Starting additional copy for "', package_id, '" in ', args['dest']
+                print('================ Starting additional copy for "', package_id, '" in ', args['dest'])
             try:
                 wm = LLManifest.for_platform(args['platform'], args.get('arch'))(args)
                 wm.do(*args['actions'])
             except Exception as err:
                 sys.exit(str(err))
             if touch:
-                print '================ Created additional package ', wm.package_file, ' for ', package_id
+                print('================ Created additional package ', wm.package_file, ' for ', package_id)
                 with open(base_touch_template.format(package_id), 'w') as fp:
                     fp.write('set package_file=%s\n' % wm.package_file)
             else:
-                print '================ Finished additional copy "', package_id, '" in ', args['dest']
+                print('================ Finished additional copy "', package_id, '" in ', args['dest'])
     # Write out the package file in this format, so that it can easily be called
     # and used in a .bat file - yeah, it sucks, but this is the simplest...
     if touch:
         with open(touch, 'w') as fp:
             fp.write('set package_file=%s\n' % base_package_file)
-        print 'touched', touch
+        print('touched', touch)
     return 0
 
 class LLManifestRegistry(type):
     def __init__(cls, name, bases, dct):
         super(LLManifestRegistry, cls).__init__(name, bases, dct)
-        match = re.match("(\w+)Manifest", name)
+        match = re.match(r"(\w+)Manifest", name)
         if match:
            cls.manifests[match.group(1).lower()] = cls
 
 MissingFile = namedtuple("MissingFile", ("pattern", "tried"))
 
-class LLManifest(object):
-    __metaclass__ = LLManifestRegistry
+class LLManifest(object, metaclass=LLManifestRegistry):
     manifests = {}
     def for_platform(self, platform, arch = None):
         if arch:
@@ -408,8 +408,8 @@ def prefix(self, src='', build='', dst='', src_dst=None):
     def display_stacks(self):
         width = 1 + max(len(stack) for stack in self.PrefixManager.stacks)
         for stack in self.PrefixManager.stacks:
-            print "{} {}".format((stack + ':').ljust(width),
-                                 os.path.join(*getattr(self, stack)))
+            print("{} {}".format((stack + ':').ljust(width),
+                                 os.path.join(*getattr(self, stack))))
 
     class PrefixManager(object):
         # stack attributes we manage in this LLManifest (sub)class
@@ -426,7 +426,7 @@ def __init__(self, manifest):
             self.prevlen = { stack: len(getattr(self.manifest, stack)) - 1
                              for stack in self.stacks }
 
-        def __nonzero__(self):
+        def __bool__(self):
             # If the caller wrote:
             # if self.prefix(...):
             # then a value of this class had better evaluate as 'True'.
@@ -471,7 +471,7 @@ def end_prefix(self, descr=None):
         build = self.build_prefix.pop()
         dst = self.dst_prefix.pop()
         if descr and not(src == descr or build == descr or dst == descr):
-            raise ValueError, "End prefix '" + descr + "' didn't match '" +src+ "' or '" +dst + "'"
+            raise ValueError("End prefix '" + descr + "' didn't match '" +src+ "' or '" +dst + "'")
 
     def get_src_prefix(self):
         """ Returns the current source prefix."""
@@ -538,7 +538,7 @@ def run_command(self, command):
         Runs an external command.  
         Raises ManifestError exception if the command returns a nonzero status.
         """
-        print "Running command:", command
+        print("Running command:", command)
         sys.stdout.flush()
         try:
             subprocess.check_call(command)
@@ -551,18 +551,15 @@ def created_path(self, path):
           a) verify that you really have created it
           b) schedule it for cleanup"""
         if not os.path.exists(path):
-            raise ManifestError, "Should be something at path " + path
+            raise ManifestError("Should be something at path " + path)
         self.created_paths.append(path)
 
     def put_in_file(self, contents, dst, src=None):
         # write contents as dst
         dst_path = self.dst_path_of(dst)
         self.cmakedirs(os.path.dirname(dst_path))
-        f = open(dst_path, "wb")
-        try:
+        with open(dst_path, 'wb') as f:
             f.write(contents)
-        finally:
-            f.close()
 
         # Why would we create a file in the destination tree if not to include
         # it in the installer? The default src=None (plus the fact that the
@@ -572,16 +569,16 @@ def put_in_file(self, contents, dst, src=None):
         return dst_path
 
     def replace_in(self, src, dst=None, searchdict={}):
-        if dst == None:
+        if dst is None:
             dst = src
         # read src
-        f = open(self.src_path_of(src), "rbU")
-        contents = f.read()
-        f.close()
+        with open(self.src_path_of(src), 'r', encoding='utf-8') as f:
+            contents = f.read()
+
         # apply dict replacements
-        for old, new in searchdict.iteritems():
+        for old, new in searchdict.items():
             contents = contents.replace(old, new)
-        self.put_in_file(contents, dst)
+        self.put_in_file(contents.encode("utf-8"), dst)
         self.created_paths.append(dst)
 
     def copy_action(self, src, dst):
@@ -591,7 +588,7 @@ def copy_action(self, src, dst):
             self.created_paths.append(dst)
             self.ccopymumble(src, dst)
         else:
-            print "Doesn't exist:", src
+            print("Doesn't exist:", src)
 
     def package_action(self, src, dst):
         pass
@@ -609,8 +606,8 @@ def finish(self):
         # file error until all were resolved. This way permits the developer
         # to resolve them all at once.
         if self.missing:
-            print '*' * 72
-            print "Missing files:"
+            print('*' * 72)
+            print("Missing files:")
             # Instead of just dumping each missing file and all the places we
             # looked for it, group by common sets of places we looked. Use a
             # set to store the 'tried' directories, to avoid mismatches due to
@@ -622,12 +619,12 @@ def finish(self):
             # Now dump all the patterns sought in each group of 'tried'
             # directories.
             for tried, patterns in organize.items():
-                print "  Could not find in:"
+                print("  Could not find in:")
                 for dir in sorted(tried):
-                    print "    %s" % dir
+                    print("    %s" % dir)
                 for pattern in sorted(patterns):
-                    print "      %s" % pattern
-            print '*' * 72
+                    print("      %s" % pattern)
+            print('*' * 72)
             raise MissingError('%s patterns could not be found' % len(self.missing))
 
     def copy_finish(self):
@@ -640,18 +637,17 @@ def unpacked_finish(self):
         unpacked_file_name = "unpacked_%(plat)s_%(vers)s.tar" % {
             'plat':self.args['platform'],
             'vers':'_'.join(self.args['version'])}
-        print "Creating unpacked file:", unpacked_file_name
+        print("Creating unpacked file:", unpacked_file_name)
         # could add a gz here but that doubles the time it takes to do this step
-        tf = tarfile.open(self.src_path_of(unpacked_file_name), 'w:')
-        # add the entire installation package, at the very top level
-        tf.add(self.get_dst_prefix(), "")
-        tf.close()
+        with tarfile.open(self.src_path_of(unpacked_file_name), 'w:') as tf:
+            # add the entire installation package, at the very top level
+            tf.add(self.get_dst_prefix(), "")
 
     def cleanup_finish(self):
         """ Delete paths that were specified to have been created by this script"""
         for c in self.created_paths:
             # *TODO is this gonna be useful?
-            print "Cleaning up " + c
+            print("Cleaning up " + c)
 
     def process_either(self, src, dst):
         # If it's a real directory, recurse through it --
@@ -700,7 +696,7 @@ def includes(self, src, dst):
     def remove(self, *paths):
         for path in paths:
             if os.path.exists(path):
-                print "Removing path", path
+                print("Removing path", path)
                 if os.path.isdir(path):
                     shutil.rmtree(path)
                 else:
@@ -762,7 +758,7 @@ def ccopytree(self, src, dst):
             except (IOError, os.error) as why:
                 errors.append((srcname, dstname, why))
         if errors:
-            raise ManifestError, errors
+            raise ManifestError(errors)
 
 
     def cmakedirs(self, path):
@@ -789,22 +785,21 @@ def contents_of_tar(self, src_tar, dst_dir):
         relative to the source prefix) into the directory
         specified relative to the destination directory."""
         self.check_file_exists(src_tar)
-        tf = tarfile.open(self.src_path_of(src_tar), 'r')
-        for member in tf.getmembers():
-            tf.extract(member, self.ensure_dst_dir(dst_dir))
-            # TODO get actions working on these dudes, perhaps we should extract to a temporary directory and then process_directory on it?
-            self.file_list.append([src_tar,
-                           self.dst_path_of(os.path.join(dst_dir,member.name))])
-        tf.close()
+        with tarfile.open(self.src_path_of(src_tar), 'r') as tf:
+            for member in tf.getmembers():
+                tf.extract(member, self.ensure_dst_dir(dst_dir))
+                # TODO get actions working on these dudes, perhaps we should extract to a temporary directory and then process_directory on it?
+                self.file_list.append([src_tar,
+                               self.dst_path_of(os.path.join(dst_dir,member.name))])
 
 
     def wildcard_regex(self, src_glob, dst_glob):
         src_re = re.escape(src_glob)
-        src_re = src_re.replace('\*', '([-a-zA-Z0-9._ ]*)')
+        src_re = src_re.replace(r'\*', r'([-a-zA-Z0-9._ ]*)')
         dst_temp = dst_glob
         i = 1
         while dst_temp.count("*") > 0:
-            dst_temp = dst_temp.replace('*', '\g<' + str(i) + '>', 1)
+            dst_temp = dst_temp.replace(r'*', r'\g<' + str(i) + '>', 1)
             i = i+1
         return re.compile(src_re), dst_temp
 
@@ -841,9 +836,9 @@ def path2basename(self, path, file):
 
     def path(self, src, dst=None, err_if_missing=True):
         sys.stdout.flush()
-        if src == None:
+        if src is None:
             raise ManifestError("No source file, dst is " + dst)
-        if dst == None:
+        if dst is None:
             dst = src
         dst = os.path.join(self.get_dst_prefix(), dst)
         sys.stdout.write("Processing %s => %s ... " % (src, self._relative_dst_path(dst)))
@@ -881,7 +876,7 @@ def try_path(src):
             # assigned! Even if it was, though, we can be sure it is 0.
             return 0
 
-        print "%d files" % count
+        print("%d files" % count)
 
         # Let caller check whether we processed as many files as expected. In
         # particular, let caller notice 0.
@@ -895,7 +890,7 @@ def path_optional(self, src, dst=None):
         or a list containing dst (present). Concatenate these
         return values to get a list of all libs that are present.
         """
-        if dst == None:
+        if dst is None:
             dst = src
 
         # This was simple before we started needing to pass
@@ -910,10 +905,10 @@ def path_optional(self, src, dst=None):
             added = [os.path.relpath(d, self.get_dst_prefix())
                      for s, d in self.file_list[oldlen:]]
         except (ManifestError, MissingError) as err:
-            print >> sys.stderr, "Warning: "+err.msg
+            print("Warning: %s" % err.msg, file=sys.stderr)
             added = []
         if not added:
-            print "Skipping %s" % dst
+            print("Skipping %s" % dst)
         return added
 
     def do(self, *actions):
diff --git a/indra/lib/python/indra/util/test_win32_manifest.py b/indra/lib/python/indra/util/test_win32_manifest.py
deleted file mode 100755
index 9863b97778a68f086e435e6884edbf51ca2752e1..0000000000000000000000000000000000000000
--- a/indra/lib/python/indra/util/test_win32_manifest.py
+++ /dev/null
@@ -1,146 +0,0 @@
-#!/usr/bin/env python2
-"""\
-@file test_win32_manifest.py
-@brief Test an assembly binding version and uniqueness in a windows dll or exe.  
-
-$LicenseInfo:firstyear=2009&license=viewerlgpl$
-Second Life Viewer Source Code
-Copyright (C) 2009-2011, Linden Research, Inc.
-
-This library is free software; you can redistribute it and/or
-modify it under the terms of the GNU Lesser General Public
-License as published by the Free Software Foundation;
-version 2.1 of the License only.
-
-This library is distributed in the hope that it will be useful,
-but WITHOUT ANY WARRANTY; without even the implied warranty of
-MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-Lesser General Public License for more details.
-
-You should have received a copy of the GNU Lesser General Public
-License along with this library; if not, write to the Free Software
-Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301  USA
-
-Linden Research, Inc., 945 Battery Street, San Francisco, CA  94111  USA
-$/LicenseInfo$
-"""
-import sys, os
-import tempfile
-from xml.dom.minidom import parse
-
-class AssemblyTestException(Exception):
-    pass
-
-class NoManifestException(AssemblyTestException):
-    pass
-
-class MultipleBindingsException(AssemblyTestException):
-    pass
-
-class UnexpectedVersionException(AssemblyTestException):
-    pass
-
-class NoMatchingAssemblyException(AssemblyTestException):
-    pass
-
-def get_HKLM_registry_value(key_str, value_str):
-    import _winreg
-    reg = _winreg.ConnectRegistry(None, _winreg.HKEY_LOCAL_MACHINE)
-    key = _winreg.OpenKey(reg, key_str)
-    value = _winreg.QueryValueEx(key, value_str)[0]
-    #print 'Found: %s' % value
-    return value
-        
-def find_vc_dir():
-    supported_versions = (r'8.0', r'9.0')
-    supported_products = (r'VisualStudio', r'VCExpress')
-    value_str = (r'ProductDir')
-    
-    for product in supported_products:
-        for version in supported_versions:
-            key_str = (r'SOFTWARE\Microsoft\%s\%s\Setup\VC' %
-                      (product, version))
-            try:
-                return get_HKLM_registry_value(key_str, value_str)
-            except WindowsError, err:
-                x64_key_str = (r'SOFTWARE\Wow6432Node\Microsoft\VisualStudio\%s\Setup\VS' %
-                        version)
-                try:
-                    return get_HKLM_registry_value(x64_key_str, value_str)
-                except:
-                    print >> sys.stderr, "Didn't find MS %s version %s " % (product,version)
-        
-    raise
-
-def find_mt_path():
-    vc_dir = find_vc_dir()
-    mt_path = '\"%sbin\\mt.exe\"' % vc_dir
-    return mt_path
-    
-def test_assembly_binding(src_filename, assembly_name, assembly_ver):
-    print "checking %s dependency %s..." % (src_filename, assembly_name)
-
-    (tmp_file_fd, tmp_file_name) = tempfile.mkstemp(suffix='.xml')
-    tmp_file = os.fdopen(tmp_file_fd)
-    tmp_file.close()
-
-    mt_path = find_mt_path()
-    resource_id = ""
-    if os.path.splitext(src_filename)[1].lower() == ".dll":
-       resource_id = ";#2"
-    system_call = '%s -nologo -inputresource:%s%s -out:%s > NUL' % (mt_path, src_filename, resource_id, tmp_file_name)
-    print "Executing: %s" % system_call
-    mt_result = os.system(system_call)
-    if mt_result == 31:
-        print "No manifest found in %s" % src_filename
-        raise NoManifestException()
-
-    manifest_dom = parse(tmp_file_name)
-    nodes = manifest_dom.getElementsByTagName('assemblyIdentity')
-
-    versions = list()
-    for node in nodes:
-        if node.getAttribute('name') == assembly_name:
-            versions.append(node.getAttribute('version'))
-
-    if len(versions) == 0:
-        print "No matching assemblies found in %s" % src_filename
-        raise NoMatchingAssemblyException()
-        
-    elif len(versions) > 1:
-        print "Multiple bindings to %s found:" % assembly_name
-        print versions
-        print 
-        raise MultipleBindingsException(versions)
-
-    elif versions[0] != assembly_ver:
-        print "Unexpected version found for %s:" % assembly_name
-        print "Wanted %s, found %s" % (assembly_ver, versions[0])
-        print
-        raise UnexpectedVersionException(assembly_ver, versions[0])
-            
-    os.remove(tmp_file_name)
-    
-    print "SUCCESS: %s OK!" % src_filename
-    print
-  
-if __name__ == '__main__':
-
-    print
-    print "Running test_win32_manifest.py..."
-    
-    usage = 'test_win32_manfest <srcFileName> <assemblyName> <assemblyVersion>'
-
-    try:
-        src_filename = sys.argv[1]
-        assembly_name = sys.argv[2]
-        assembly_ver = sys.argv[3]
-    except:
-        print "Usage:"
-        print usage
-        print
-        raise
-    
-    test_assembly_binding(src_filename, assembly_name, assembly_ver)
-
-    
diff --git a/indra/llcommon/tests/llsdserialize_test.cpp b/indra/llcommon/tests/llsdserialize_test.cpp
index c699ccd903ebab6ca141e12a6f73826c8a22cbf6..3c42e519440eb224834de9d56483f8a5ffa2bfbd 100644
--- a/indra/llcommon/tests/llsdserialize_test.cpp
+++ b/indra/llcommon/tests/llsdserialize_test.cpp
@@ -1795,7 +1795,7 @@ namespace tut
         set_test_name("verify NamedTempFile");
         python("platform",
                "import sys\n"
-               "print 'Running on', sys.platform\n");
+               "print('Running on %s' % sys.platform)\n");
     }
 
     // helper for test<3>
@@ -1825,14 +1825,14 @@ namespace tut
         const char pydata[] =
             "def verify(iterable):\n"
             "    it = iter(iterable)\n"
-            "    assert it.next() == 17\n"
-            "    assert abs(it.next() - 3.14) < 0.01\n"
-            "    assert it.next() == '''\\\n"
+            "    assert next(it) == 17\n"
+            "    assert abs(next(it) - 3.14) < 0.01\n"
+            "    assert next(it) == '''\\\n"
             "This string\n"
             "has several\n"
             "lines.'''\n"
             "    try:\n"
-            "        it.next()\n"
+            "        next(it)\n"
             "    except StopIteration:\n"
             "        pass\n"
             "    else:\n"
@@ -1852,7 +1852,7 @@ namespace tut
                import_llsd <<
                "def parse_each(iterable):\n"
                "    for item in iterable:\n"
-               "        yield llsd.parse(item)\n" <<
+               "        yield llsd.parse(item.encode(\"utf-8\"))\n" <<
                pydata <<
                // Don't forget raw-string syntax for Windows pathnames.
                "verify(parse_each(open(r'" << file.getName() << "')))\n");
@@ -1884,7 +1884,7 @@ namespace tut
                // N.B. Using 'print' implicitly adds newlines.
                "with open(r'" << file.getName() << "', 'w') as f:\n"
                "    for item in DATA:\n"
-               "        print >>f, llsd.format_notation(item)\n");
+               "        print(llsd.format_notation(item).decode(\"utf-8\"), file=f)\n");
 
         std::ifstream inf(file.getName().c_str());
         LLSD item;
diff --git a/indra/llcorehttp/CMakeLists.txt b/indra/llcorehttp/CMakeLists.txt
index b987a82f28f7b3fd4d17b02ddd74e7adfa44bbd7..c7014ba498b5cf32980cb0cff652f730ed2bf405 100644
--- a/indra/llcorehttp/CMakeLists.txt
+++ b/indra/llcorehttp/CMakeLists.txt
@@ -167,7 +167,7 @@ if (LL_TESTS AND LLCOREHTTP_TESTS)
                           "${llcorehttp_TEST_SOURCE_FILES}"
                           "${test_libs}"
                           "-Dhttp_proxy"
-                          ${Python2_EXECUTABLE}
+                          ${Python3_EXECUTABLE}
                           "${CMAKE_CURRENT_SOURCE_DIR}/tests/test_llcorehttp_peer.py"
                           )
  
diff --git a/indra/llcorehttp/_httpoprequest.cpp b/indra/llcorehttp/_httpoprequest.cpp
index 5540bb4a8f15690a91b335f64eb24bd3591133c8..94bfca64a3a7487e490c908bdd6a2ef6e635dc49 100644
--- a/indra/llcorehttp/_httpoprequest.cpp
+++ b/indra/llcorehttp/_httpoprequest.cpp
@@ -647,10 +647,15 @@ HttpStatus HttpOpRequest::prepareRequest(HttpService * service)
 		break;
 	}
 
+	if (!mReqHeaders || !mReqHeaders->find("Connection"))
+	{
+        mCurlHeaders = curl_slist_append(mCurlHeaders, "Connection: keep-alive");
+	}
 
-    // *TODO: Should this be 'Keep-Alive' ?
-    mCurlHeaders = curl_slist_append(mCurlHeaders, "Connection: keep-alive");
-    mCurlHeaders = curl_slist_append(mCurlHeaders, "Keep-alive: 300");
+	if (!mReqHeaders || !mReqHeaders->find("Keep-Alive"))
+	{
+        mCurlHeaders = curl_slist_append(mCurlHeaders, "Keep-Alive: 300");
+	}
 
 	// Tracing
 	if (mTracing >= HTTP_TRACE_CURL_HEADERS)
diff --git a/indra/llcorehttp/httpheaders.cpp b/indra/llcorehttp/httpheaders.cpp
index de35eeca0893777f29c2e3eeca8787b92cd69216..1c0381ccb6e9d9858110023d4d0f02835f9e2b26 100644
--- a/indra/llcorehttp/httpheaders.cpp
+++ b/indra/llcorehttp/httpheaders.cpp
@@ -42,12 +42,30 @@ HttpHeaders::clear()
 
 void HttpHeaders::append(const std::string & name, const std::string & value)
 {
+    for (reverse_iterator iter(rbegin()), iend(rend()); iend != iter; ++iter)
+    {
+        if ((*iter).first == name)
+        {
+            iter->second = value;
+            return;
+        }
+    }
+
 	mHeaders.push_back(value_type(name, value));
 }
 
 
 void HttpHeaders::append(const char * name, const char * value)
 {
+    for (reverse_iterator iter(rbegin()), iend(rend()); iend != iter; ++iter)
+    {
+        if ((*iter).first == name)
+        {
+            iter->second = value;
+            return;
+        }
+    }
+
 	mHeaders.push_back(value_type(name, value));
 }
 
diff --git a/indra/llcorehttp/tests/test_httprequest.hpp b/indra/llcorehttp/tests/test_httprequest.hpp
index 11f6b71f2f2195641bfe87a15e4949111371e327..9364161da1bd0bf9c48be35df229f47f774705bc 100644
--- a/indra/llcorehttp/tests/test_httprequest.hpp
+++ b/indra/llcorehttp/tests/test_httprequest.hpp
@@ -2170,8 +2170,8 @@ void HttpRequestTestObjectType::test<19>()
 
 		// headers
 		headers = HttpHeaders::ptr_t(new HttpHeaders);
-		headers->append("Keep-Alive", "120");
-		headers->append("Accept-encoding", "deflate");
+        headers->append("Keep-Alive", "120");
+		headers->append("Accept-Encoding", "deflate");
 		headers->append("Accept", "text/plain");
 
 		// Issue a GET with modified headers
@@ -2352,10 +2352,10 @@ void HttpRequestTestObjectType::test<20>()
 
 		// headers
 		headers = HttpHeaders::ptr_t(new HttpHeaders());
-		headers->append("keep-Alive", "120");
+        headers->append("Keep-Alive", "120");
 		headers->append("Accept", "text/html");
-		headers->append("content-type", "application/llsd+xml");
-		headers->append("cache-control", "no-store");
+		headers->append("Content-Type", "application/llsd+xml");
+		headers->append("Cache-Control", "no-store");
 		
 		// And a buffer array
 		const char * msg("<xml><llsd><string>It was the best of times, it was the worst of times.</string></llsd></xml>");
@@ -2556,9 +2556,9 @@ void HttpRequestTestObjectType::test<21>()
 
 		// headers
 		headers = HttpHeaders::ptr_t(new HttpHeaders);
-		headers->append("content-type", "text/plain");
-		headers->append("content-type", "text/html");
-		headers->append("content-type", "application/llsd+xml");
+		headers->append("Content-Type", "text/plain");
+		headers->append("Content-Type", "text/html");
+		headers->append("Content-Type", "application/llsd+xml");
 		
 		// And a buffer array
 		const char * msg("<xml><llsd><string>It was the best of times, it was the worst of times.</string></llsd></xml>");
diff --git a/indra/llcorehttp/tests/test_llcorehttp_peer.py b/indra/llcorehttp/tests/test_llcorehttp_peer.py
index 6e5ca38e89475eb415ee4c6500d81b147fc1a64f..6993a55eea2f4dc9a1de9212d7e51cebd96e2410 100755
--- a/indra/llcorehttp/tests/test_llcorehttp_peer.py
+++ b/indra/llcorehttp/tests/test_llcorehttp_peer.py
@@ -1,6 +1,6 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
 """\
-@file   test_llsdmessage_peer.py
+@file   test_llcorehttp_peer.py
 @author Nat Goodspeed
 @date   2008-10-09
 @brief  This script asynchronously runs the executable (with args) specified on
@@ -34,11 +34,8 @@
 import time
 import select
 import getopt
-try:
-    from cStringIO import StringIO
-except ImportError:
-    from StringIO import StringIO
-from BaseHTTPServer import HTTPServer, BaseHTTPRequestHandler
+from io import StringIO
+from http.server import HTTPServer, BaseHTTPRequestHandler
 
 from llbase.fastest_elementtree import parse as xml_parse
 from llbase import llsd
@@ -101,9 +98,9 @@ def read(self):
         while size_remaining:
             chunk_size = min(size_remaining, max_chunk_size)
             chunk = self.rfile.read(chunk_size)
-            L.append(chunk)
+            L.append(chunk.decode('utf-8'))
             size_remaining -= len(chunk)
-        return ''.join(L)
+        return (''.join(L)).encode('utf-8')
         # end of swiped read() logic
 
     def read_xml(self):
@@ -127,26 +124,26 @@ def do_GET(self, withdata=True):
         try:
             self.answer(dict(reply="success", status=200,
                              reason="Your GET operation worked"))
-        except self.ignore_exceptions, e:
-            print >> sys.stderr, "Exception during GET (ignoring): %s" % str(e)
+        except self.ignore_exceptions as e:
+            print("Exception during GET (ignoring): %s" % str(e), file=sys.stderr)
 
     def do_POST(self):
         # Read the provided POST data.
         # self.answer(self.read())
         try:
             self.answer(dict(reply="success", status=200,
-                             reason=self.read()))
-        except self.ignore_exceptions, e:
-            print >> sys.stderr, "Exception during POST (ignoring): %s" % str(e)
+                             reason=self.read().decode('utf-8')))
+        except self.ignore_exceptions as e:
+            print("Exception during POST (ignoring): %s" % str(e), file=sys.stderr)
 
     def do_PUT(self):
         # Read the provided PUT data.
         # self.answer(self.read())
         try:
             self.answer(dict(reply="success", status=200,
-                             reason=self.read()))
-        except self.ignore_exceptions, e:
-            print >> sys.stderr, "Exception during PUT (ignoring): %s" % str(e)
+                             reason=self.read().decode('utf-8')))
+        except self.ignore_exceptions as e:
+            print("Exception during PUT (ignoring): %s" % str(e), file=sys.stderr)
 
     def answer(self, data, withdata=True):
         debug("%s.answer(%s): self.path = %r", self.__class__.__name__, data, self.path)
@@ -186,7 +183,7 @@ def answer(self, data, withdata=True):
             self.send_header("Content-type", "text/plain")
             self.end_headers()
             if body:
-                self.wfile.write(body)
+                self.wfile.write(body.encode('utf-8'))
         elif "/bug2295/" in self.path:
             # Test for https://jira.secondlife.com/browse/BUG-2295
             #
@@ -221,7 +218,7 @@ def answer(self, data, withdata=True):
             self.send_header("Content-type", "text/plain")
             self.end_headers()
             if body:
-                self.wfile.write(body)
+                self.wfile.write(body.encode('utf-8'))
         elif "fail" not in self.path:
             data = data.copy()          # we're going to modify
             # Ensure there's a "reply" key in data, even if there wasn't before
@@ -233,7 +230,7 @@ def answer(self, data, withdata=True):
                 self.reflect_headers()
             self.send_header("Content-type", "application/llsd+xml")
             self.send_header("Content-Length", str(len(response)))
-            self.send_header("X-LL-Special", "Mememememe");
+            self.send_header("X-LL-Special", "Mememememe")
             self.end_headers()
             if withdata:
                 self.wfile.write(response)
@@ -255,9 +252,9 @@ def answer(self, data, withdata=True):
             self.end_headers()
 
     def reflect_headers(self):
-        for name in self.headers.keys():
-            # print "Header:  %s: %s" % (name, self.headers[name])
-            self.send_header("X-Reflect-" + name, self.headers[name])
+        for name in list(self.headers.keys()):
+            # print("Header:  %s: %s" % (name, self.headers[name]))
+            self.send_header("X-Reflect-" + name, str(self.headers[name]))
 
     if not VERBOSE:
         # When VERBOSE is set, skip both these overrides because they exist to
@@ -283,10 +280,10 @@ class Server(HTTPServer):
     # default behavior which *shouldn't* cause the program to return
     # a failure status.
     def handle_error(self, request, client_address):
-        print '-'*40
-        print 'Ignoring exception during processing of request from',
-        print client_address
-        print '-'*40
+        print('-'*40)
+        print('Ignoring exception during processing of request from', end=' ')
+        print(client_address)
+        print('-'*40)
 
 if __name__ == "__main__":
     do_valgrind = False
@@ -307,7 +304,7 @@ def handle_error(self, request, client_address):
         # "Then there's Windows"
         # Instantiate a Server(TestHTTPRequestHandler) on the first free port
         # in the specified port range.
-        httpd, port = freeport(xrange(8000, 8020), make_server)
+        httpd, port = freeport(range(8000, 8020), make_server)
 
     # Pass the selected port number to the subject test program via the
     # environment. We don't want to impose requirements on the test program's
diff --git a/indra/llimage/llimageworker.cpp b/indra/llimage/llimageworker.cpp
index 140555a1acfaccf211d2495ca60908d10a775d57..84b5186ac28ecf7dda42372982ef0e6823157db1 100644
--- a/indra/llimage/llimageworker.cpp
+++ b/indra/llimage/llimageworker.cpp
@@ -106,7 +106,9 @@ LLImageDecodeThread::LLImageDecodeThread(bool threaded, U32 pool_size)
 		}
 	}
     else if (pool_size == 1)  // Disable if only 1
+	{
         pool_size = 0;
+	}
 
 	sImageThreads = pool_size;
 	
diff --git a/indra/llmessage/tests/test_llsdmessage_peer.py b/indra/llmessage/tests/test_llsdmessage_peer.py
deleted file mode 100755
index 511fd31fc86e6a0c6dccdd4a5023e48b42907316..0000000000000000000000000000000000000000
--- a/indra/llmessage/tests/test_llsdmessage_peer.py
+++ /dev/null
@@ -1,176 +0,0 @@
-#!/usr/bin/env python2
-"""\
-@file   test_llsdmessage_peer.py
-@author Nat Goodspeed
-@date   2008-10-09
-@brief  This script asynchronously runs the executable (with args) specified on
-        the command line, returning its result code. While that executable is
-        running, we provide dummy local services for use by C++ tests.
-
-$LicenseInfo:firstyear=2008&license=viewerlgpl$
-Second Life Viewer Source Code
-Copyright (C) 2010, Linden Research, Inc.
-
-This library is free software; you can redistribute it and/or
-modify it under the terms of the GNU Lesser General Public
-License as published by the Free Software Foundation;
-version 2.1 of the License only.
-
-This library is distributed in the hope that it will be useful,
-but WITHOUT ANY WARRANTY; without even the implied warranty of
-MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-Lesser General Public License for more details.
-
-You should have received a copy of the GNU Lesser General Public
-License along with this library; if not, write to the Free Software
-Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301  USA
-
-Linden Research, Inc., 945 Battery Street, San Francisco, CA  94111  USA
-$/LicenseInfo$
-"""
-
-import os
-import sys
-from BaseHTTPServer import HTTPServer, BaseHTTPRequestHandler
-
-from llbase.fastest_elementtree import parse as xml_parse
-from llbase import llsd
-from testrunner import freeport, run, debug, VERBOSE
-import time
-
-_storage=None
-
-class TestHTTPRequestHandler(BaseHTTPRequestHandler):
-    """This subclass of BaseHTTPRequestHandler is to receive and echo
-    LLSD-flavored messages sent by the C++ LLHTTPClient.
-    """
-    def read(self):
-        # The following logic is adapted from the library module
-        # SimpleXMLRPCServer.py.
-        # Get arguments by reading body of request.
-        # We read this in chunks to avoid straining
-        # socket.read(); around the 10 or 15Mb mark, some platforms
-        # begin to have problems (bug #792570).
-        try:
-            size_remaining = int(self.headers["content-length"])
-        except (KeyError, ValueError):
-            return ""
-        max_chunk_size = 10*1024*1024
-        L = []
-        while size_remaining:
-            chunk_size = min(size_remaining, max_chunk_size)
-            chunk = self.rfile.read(chunk_size)
-            L.append(chunk)
-            size_remaining -= len(chunk)
-        return ''.join(L)
-        # end of swiped read() logic
-
-    def read_xml(self):
-        # This approach reads the entire POST data into memory first
-        return llsd.parse(self.read())
-##         # This approach attempts to stream in the LLSD XML from self.rfile,
-##         # assuming that the underlying XML parser reads its input file
-##         # incrementally. Unfortunately I haven't been able to make it work.
-##         tree = xml_parse(self.rfile)
-##         debug("Finished raw parse")
-##         debug("parsed XML tree %s", tree)
-##         debug("parsed root node %s", tree.getroot())
-##         debug("root node tag %s", tree.getroot().tag)
-##         return llsd.to_python(tree.getroot())
-
-    def do_HEAD(self):
-        self.do_GET(withdata=False)
-
-    def do_GET(self, withdata=True):
-        # Of course, don't attempt to read data.
-        data = dict(reply="success", body="avatar", random=17)
-        self.answer(data, withdata=withdata)
-
-    def do_POST(self):
-        # Read the provided POST data.
-        self.answer(self.read_xml())
-
-    def do_PUT(self):
-        # Read the provided PUT data.
-        self.answer(self.read_xml())
-
-    def answer(self, data, withdata=True):
-        global _storage
-        debug("%s.answer(%s): self.path = %r", self.__class__.__name__, data, self.path)
-        if "fail" in self.path or "test/error" in self.path: # fail requested
-            status = data.get("status", 500)
-            # self.responses maps an int status to a (short, long) pair of
-            # strings. We want the longer string. That's why we pass a string
-            # pair to get(): the [1] will select the second string, whether it
-            # came from self.responses or from our default pair.
-            reason = data.get("reason",
-                               self.responses.get(status,
-                                                  ("fail requested",
-                                                   "Your request specified failure status %s "
-                                                   "without providing a reason" % status))[1])
-            debug("fail requested: %s: %r", status, reason)
-            self.send_error(status, reason)
-        else:
-            if "web/echo" in self.path:
-                pass
-            elif "test/timeout" in self.path:
-                time.sleep(5.0)
-                return
-            elif "test/storage" in self.path:
-                if "GET" == self.command:
-                    data = _storage
-                else:
-                    _storage = data
-                    data = "ok"
-            else:
-                data = data.copy()          # we're going to modify
-                # Ensure there's a "reply" key in data, even if there wasn't before
-                data["reply"] = data.get("reply", llsd.LLSD("success"))
-            response = llsd.format_xml(data)
-            debug("success: %s", response)
-            self.send_response(200)
-            self.send_header("Content-type", "application/llsd+xml")
-            self.send_header("Content-Length", str(len(response)))
-            self.end_headers()
-            if withdata:
-                self.wfile.write(response)
-
-    if not VERBOSE:
-        # When VERBOSE is set, skip both these overrides because they exist to
-        # suppress output.
-
-        def log_request(self, code, size=None):
-            # For present purposes, we don't want the request splattered onto
-            # stderr, as it would upset devs watching the test run
-            pass
-
-        def log_error(self, format, *args):
-            # Suppress error output as well
-            pass
-
-class Server(HTTPServer):
-    # This pernicious flag is on by default in HTTPServer. But proper
-    # operation of freeport() absolutely depends on it being off.
-    allow_reuse_address = False
-
-if __name__ == "__main__":
-    # function to make a server with specified port
-    make_server = lambda port: Server(('127.0.0.1', port), TestHTTPRequestHandler)
-
-    if not sys.platform.startswith("win"):
-        # Instantiate a Server(TestHTTPRequestHandler) on a port chosen by the
-        # runtime.
-        httpd = make_server(0)
-    else:
-        # "Then there's Windows"
-        # Instantiate a Server(TestHTTPRequestHandler) on the first free port
-        # in the specified port range.
-        httpd, port = freeport(xrange(8000, 8020), make_server)
-
-    # Pass the selected port number to the subject test program via the
-    # environment. We don't want to impose requirements on the test program's
-    # command-line parsing -- and anyway, for C++ integration tests, that's
-    # performed in TUT code rather than our own.
-    os.environ["PORT"] = str(httpd.server_port)
-    debug("$PORT = %s", httpd.server_port)
-    sys.exit(run(server_inst=httpd, *sys.argv[1:]))
diff --git a/indra/llmessage/tests/testrunner.py b/indra/llmessage/tests/testrunner.py
index baabe05f1b5cd3d1d803dc12d2ee14c11756e4aa..47c09ca245305183c4382e1b2427d177950f3e8f 100755
--- a/indra/llmessage/tests/testrunner.py
+++ b/indra/llmessage/tests/testrunner.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
 """\
 @file   testrunner.py
 @author Nat Goodspeed
@@ -41,7 +41,7 @@
 
 if VERBOSE:
     def debug(fmt, *args):
-        print fmt % args
+        print(fmt % args)
         sys.stdout.flush()
 else:
     debug = lambda *args: None
@@ -99,14 +99,14 @@ class Server(HTTPServer):
         # error because we can't return meaningful values. We have no 'port',
         # therefore no 'expr(port)'.
         portiter = iter(portlist)
-        port = portiter.next()
+        port = next(portiter)
 
         while True:
             try:
                 # If this value of port works, return as promised.
                 value = expr(port)
 
-            except socket.error, err:
+            except socket.error as err:
                 # Anything other than 'Address already in use', propagate
                 if err.args[0] != errno.EADDRINUSE:
                     raise
@@ -117,9 +117,9 @@ class Server(HTTPServer):
                 type, value, tb = sys.exc_info()
                 try:
                     try:
-                        port = portiter.next()
+                        port = next(portiter)
                     except StopIteration:
-                        raise type, value, tb
+                        raise type(value).with_traceback(tb)
                 finally:
                     # Clean up local traceback, see docs for sys.exc_info()
                     del tb
@@ -138,7 +138,7 @@ class Server(HTTPServer):
             # If we've actually arrived at this point, portiter.next() delivered a
             # new port value. Loop back to pass that to expr(port).
 
-    except Exception, err:
+    except Exception as err:
         debug("*** freeport() raising %s: %s", err.__class__.__name__, err)
         raise
 
@@ -227,13 +227,13 @@ def test_freeport():
     def exc(exception_class, *args):
         try:
             yield
-        except exception_class, err:
+        except exception_class as err:
             for i, expected_arg in enumerate(args):
                 assert expected_arg == err.args[i], \
                        "Raised %s, but args[%s] is %r instead of %r" % \
                        (err.__class__.__name__, i, err.args[i], expected_arg)
-            print "Caught expected exception %s(%s)" % \
-                  (err.__class__.__name__, ', '.join(repr(arg) for arg in err.args))
+            print("Caught expected exception %s(%s)" % \
+                  (err.__class__.__name__, ', '.join(repr(arg) for arg in err.args)))
         else:
             assert False, "Failed to raise " + exception_class.__class__.__name__
 
@@ -270,18 +270,18 @@ class SomeError(Exception): pass
     # This is the magic exception that should prompt us to retry
     inuse = socket.error(errno.EADDRINUSE, 'Address already in use')
     # Get the iterator to our ports list so we can check later if we've used all
-    ports = iter(xrange(5))
+    ports = iter(range(5))
     with exc(socket.error, errno.EADDRINUSE):
         freeport(ports, lambda port: raiser(inuse))
     # did we entirely exhaust 'ports'?
     with exc(StopIteration):
-        ports.next()
+        next(ports)
 
-    ports = iter(xrange(2))
+    ports = iter(range(2))
     # Any exception but EADDRINUSE should quit immediately
     with exc(SomeError):
         freeport(ports, lambda port: raiser(SomeError()))
-    assert_equals(ports.next(), 1)
+    assert_equals(next(ports), 1)
 
     # ----------- freeport() with platform-dependent socket stuff ------------
     # This is what we should've had unit tests to begin with (see CHOP-661).
@@ -290,14 +290,14 @@ def newbind(port):
         sock.bind(('127.0.0.1', port))
         return sock
 
-    bound0, port0 = freeport(xrange(7777, 7780), newbind)
+    bound0, port0 = freeport(range(7777, 7780), newbind)
     assert_equals(port0, 7777)
-    bound1, port1 = freeport(xrange(7777, 7780), newbind)
+    bound1, port1 = freeport(range(7777, 7780), newbind)
     assert_equals(port1, 7778)
-    bound2, port2 = freeport(xrange(7777, 7780), newbind)
+    bound2, port2 = freeport(range(7777, 7780), newbind)
     assert_equals(port2, 7779)
     with exc(socket.error, errno.EADDRINUSE):
-        bound3, port3 = freeport(xrange(7777, 7780), newbind)
+        bound3, port3 = freeport(range(7777, 7780), newbind)
 
 if __name__ == "__main__":
     test_freeport()
diff --git a/indra/newview/CMakeLists.txt b/indra/newview/CMakeLists.txt
index 91bf252a30006ef4db4c3adfa6dac597b457dfc3..96bf840e2dff8a04515c7eb6a53890d580531a88 100644
--- a/indra/newview/CMakeLists.txt
+++ b/indra/newview/CMakeLists.txt
@@ -1966,7 +1966,7 @@ if (WINDOWS)
 
     add_custom_command(
       OUTPUT  ${VIEWER_BUILD_DEST_DIR}/copy_touched.bat
-      COMMAND ${Python2_EXECUTABLE}
+      COMMAND ${Python3_EXECUTABLE}
       ARGS
         ${CMAKE_CURRENT_SOURCE_DIR}/viewer_manifest.py
         --actions=copy
@@ -2014,7 +2014,7 @@ if (WINDOWS)
     if (PACKAGE)
       add_custom_command(
         OUTPUT ${CMAKE_CURRENT_BINARY_DIR}/${CMAKE_CFG_INTDIR}/event_host.tar.bz2
-        COMMAND ${Python2_EXECUTABLE}
+        COMMAND ${Python3_EXECUTABLE}
         ARGS
           ${CMAKE_CURRENT_SOURCE_DIR}/event_host_manifest.py
           ${CMAKE_CURRENT_SOURCE_DIR}/..
@@ -2028,7 +2028,7 @@ if (WINDOWS)
 
       add_custom_command(
         OUTPUT ${VIEWER_BUILD_DEST_DIR}/touched.bat
-        COMMAND ${Python2_EXECUTABLE}
+        COMMAND ${Python3_EXECUTABLE}
         ARGS
           ${CMAKE_CURRENT_SOURCE_DIR}/viewer_manifest.py
           --arch=${ARCH}
@@ -2178,7 +2178,7 @@ if (LINUX)
 
   add_custom_command(
       OUTPUT ${product}.tar.bz2
-      COMMAND ${Python2_EXECUTABLE}
+      COMMAND ${Python3_EXECUTABLE}
       ARGS
         ${CMAKE_CURRENT_SOURCE_DIR}/viewer_manifest.py
         --arch=${ARCH}
@@ -2206,7 +2206,7 @@ if (LINUX)
 
   add_custom_command(
     OUTPUT  ${CMAKE_CURRENT_BINARY_DIR}/${CMAKE_CFG_INTDIR}/.${product}.copy_touched
-    COMMAND ${Python2_EXECUTABLE}
+    COMMAND ${Python3_EXECUTABLE}
     ARGS
       ${CMAKE_CURRENT_SOURCE_DIR}/viewer_manifest.py
       --actions=copy
@@ -2299,7 +2299,7 @@ if (DARWIN)
 
   add_custom_command(
     TARGET ${VIEWER_BINARY_NAME} POST_BUILD
-    COMMAND ${Python2_EXECUTABLE}
+    COMMAND ${Python3_EXECUTABLE}
     ARGS
       ${CMAKE_CURRENT_SOURCE_DIR}/viewer_manifest.py
       --actions=copy
@@ -2344,7 +2344,7 @@ if (DARWIN)
 
       add_custom_command(
         TARGET llpackage POST_BUILD
-        COMMAND ${Python2_EXECUTABLE}
+        COMMAND ${Python3_EXECUTABLE}
         ARGS
           ${CMAKE_CURRENT_SOURCE_DIR}/viewer_manifest.py
           --arch=${ARCH}
@@ -2410,7 +2410,7 @@ if (PACKAGE AND (RELEASE_CRASH_REPORTING OR NON_RELEASE_CRASH_REPORTING) AND VIE
         set(LLBUILD_CONFIG ${CMAKE_CFG_INTDIR})
     endif(CMAKE_CFG_INTDIR STREQUAL ".")
     add_custom_command(OUTPUT "${VIEWER_SYMBOL_FILE}"
-      COMMAND "${Python2_EXECUTABLE}"
+      COMMAND ${Python3_EXECUTABLE}
       ARGS
         "${CMAKE_CURRENT_SOURCE_DIR}/generate_breakpad_symbols.py"
         "${LLBUILD_CONFIG}"
diff --git a/indra/newview/build_win32_appConfig.py b/indra/newview/build_win32_appConfig.py
deleted file mode 100755
index 9fdceee1be91149947d95bf8227e1632fcd8af27..0000000000000000000000000000000000000000
--- a/indra/newview/build_win32_appConfig.py
+++ /dev/null
@@ -1,68 +0,0 @@
-# @file build_win32_appConfig.py
-# @brief Create the windows app.config file to redirect crt linkage.
-#
-# $LicenseInfo:firstyear=2009&license=viewerlgpl$
-# Second Life Viewer Source Code
-# Copyright (C) 2010, Linden Research, Inc.
-# 
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation;
-# version 2.1 of the License only.
-# 
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-# 
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301  USA
-# 
-# Linden Research, Inc., 945 Battery Street, San Francisco, CA  94111  USA
-# $/LicenseInfo$
-
-import sys, os, re
-from xml.dom.minidom import parse
-
-def munge_binding_redirect_version(src_manifest_name, src_config_name, dst_config_name):
-    manifest_dom = parse(src_manifest_name)
-    node = manifest_dom.getElementsByTagName('assemblyIdentity')[0]
-    manifest_assm_ver = node.getAttribute('version')
-    
-    config_dom = parse(src_config_name)
-    node = config_dom.getElementsByTagName('bindingRedirect')[0]
-    node.setAttribute('newVersion', manifest_assm_ver)
-    src_old_ver = re.match('([^-]*-).*', node.getAttribute('oldVersion')).group(1)
-    node.setAttribute('oldVersion', src_old_ver + manifest_assm_ver)
-    comment = config_dom.createComment("This file is automatically generated by the build. see indra/newview/build_win32_appConfig.py")
-    config_dom.insertBefore(comment, config_dom.childNodes[0])
-
-    print "Writing: " + dst_config_name
-    f = open(dst_config_name, 'w')
-    config_dom.writexml(f)
-    f.close()
-    
-    
-
-def main():
-    config = sys.argv[1]
-    src_dir = sys.argv[2]
-    dst_dir = sys.argv[3]
-    dst_name = sys.argv[4]
-    
-    if config.lower() == 'debug':
-        src_manifest_name = dst_dir + '/Microsoft.VC80.DebugCRT.manifest'
-        src_config_name = src_dir + '/SecondLifeDebug.exe.config'
-    else:
-        src_manifest_name = dst_dir + '/Microsoft.VC80.CRT.manifest'
-        src_config_name = src_dir + '/SecondLife.exe.config'
-
-    dst_config_name = dst_dir + '/' + dst_name
-        
-    munge_binding_redirect_version(src_manifest_name, src_config_name, dst_config_name)
-    
-    return 0
-
-if __name__ == "__main__":
-    main()
diff --git a/indra/newview/generate_breakpad_symbols.py b/indra/newview/generate_breakpad_symbols.py
index 2fc5abe8bacf6fc65450d688d872a81cf6cc3575..4a2ff0a2caa76eeb9f17c636982ec76701a52768 100755
--- a/indra/newview/generate_breakpad_symbols.py
+++ b/indra/newview/generate_breakpad_symbols.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
 """\
 @file generate_breakpad_symbols.py
 @author Brad Kittenbrink <brad@lindenlab.com>
@@ -37,13 +37,13 @@
 import shlex
 import subprocess
 import tarfile
-import StringIO
+import io
 import pprint
 
 DEBUG=False
 
 def usage():
-    print >>sys.stderr, "usage: %s search_dirs viewer_exes libs_suffix dump_syms_tool viewer_symbol_file" % sys.argv[0]
+    print("usage: %s search_dirs viewer_exes libs_suffix dump_syms_tool viewer_symbol_file" % sys.argv[0], file=sys.stderr)
 
 class MissingModuleError(Exception):
     def __init__(self, modules):
@@ -51,10 +51,10 @@ def __init__(self, modules):
         self.modules = modules
 
 def main(configuration, search_dirs, viewer_exes, libs_suffix, dump_syms_tool, viewer_symbol_file):
-    print "generate_breakpad_symbols run with args: %s" % str((configuration, search_dirs, viewer_exes, libs_suffix, dump_syms_tool, viewer_symbol_file))
+    print("generate_breakpad_symbols run with args: %s" % str((configuration, search_dirs, viewer_exes, libs_suffix, dump_syms_tool, viewer_symbol_file)))
 
-    if not re.match("release", configuration, re.IGNORECASE):
-        print "skipping breakpad symbol generation for non-release build."
+    if not re.match(r"release", configuration, re.IGNORECASE):
+        print("skipping breakpad symbol generation for non-release build.")
         return 0
 
     # split up list of viewer_exes
@@ -75,12 +75,12 @@ def list_files():
         for search_dir in search_dirs:
             for (dirname, subdirs, filenames) in os.walk(search_dir):
                 if DEBUG:
-                    print "scanning '%s' for modules..." % dirname
-                for f in itertools.ifilter(matches, filenames):
+                    print("scanning '%s' for modules..." % dirname)
+                for f in filter(matches, filenames):
                     yield os.path.join(dirname, f)
 
     def dump_module(m):
-        print "dumping module '%s' with '%s'..." % (m, dump_syms_tool)
+        print("dumping module '%s' with '%s'..." % (m, dump_syms_tool))
         dsym_full_path = m
         child = subprocess.Popen([dump_syms_tool, dsym_full_path] , stdout=subprocess.PIPE)
         out, err = child.communicate()
@@ -91,27 +91,27 @@ def dump_module(m):
         
     for m in list_files():
         if DEBUG:
-            print "examining module '%s' ... " % m,
+            print("examining module '%s' ... " % m, end=' ')
         filename=os.path.basename(m)
         if -1 != m.find("DWARF"):
             # Just use this module; it has the symbols we want.
             modules[filename] = m
             if DEBUG:
-                print "found dSYM entry"
+                print("found dSYM entry")
         elif filename not in modules:
             # Only use this if we don't already have a (possibly better) entry.
             modules[filename] = m
             if DEBUG:
-                print "found new entry"
+                print("found new entry")
         elif DEBUG:
-            print "ignoring entry"
+            print("ignoring entry")
 
 
-    print "Found these following modules:"
+    print("Found these following modules:")
     pprint.pprint( modules )
 
     out = tarfile.open(viewer_symbol_file, 'w:bz2')
-    for (filename,status,symbols,err) in itertools.imap(dump_module, modules.values()):
+    for (filename,status,symbols,err) in map(dump_module, list(modules.values())):
         if status == 0:
             module_line = symbols[:symbols.index('\n')]
             module_line = module_line.split()
@@ -121,20 +121,20 @@ def dump_module(m):
                 mod_name = module[:module.rindex('.pdb')]
             else:
                 mod_name = module
-            symbolfile = StringIO.StringIO(symbols)
+            symbolfile = io.StringIO(symbols)
             info = tarfile.TarInfo("%(module)s/%(hash_id)s/%(mod_name)s.sym" % dict(module=module, hash_id=hash_id, mod_name=mod_name))
             info.size = symbolfile.len
             out.addfile(info, symbolfile)
         else:
-            print >>sys.stderr, "warning: failed to dump symbols for '%s': %s" % (filename, err)
+            print("warning: failed to dump symbols for '%s': %s" % (filename, err), file=sys.stderr)
 
     out.close()
 
     missing_modules = [m for (m,_) in
-        itertools.ifilter(lambda (k,v): not v, found_required.iteritems())
+        filter(lambda k_v: not k_v[1], iter(found_required.items()))
     ]
     if missing_modules:
-        print >> sys.stderr, "failed to generate %s" % viewer_symbol_file
+        print("failed to generate %s" % viewer_symbol_file, file=sys.stderr)
         os.remove(viewer_symbol_file)
         raise MissingModuleError(missing_modules)
 
@@ -148,13 +148,13 @@ def match_module_basename(m):
                    == os.path.splitext(os.path.basename(m))[0].lower()
         # there must be at least one .sym file in tarfile_members that matches
         # each required module (ignoring file extensions)
-        if not any(itertools.imap(match_module_basename, tarfile_members)):
-            print >> sys.stderr, "failed to find required %s in generated %s" \
-                    % (required_module, viewer_symbol_file)
+        if not any(map(match_module_basename, tarfile_members)):
+            print("failed to find required %s in generated %s" \
+                    % (required_module, viewer_symbol_file), file=sys.stderr)
             os.remove(viewer_symbol_file)
             raise MissingModuleError([required_module])
 
-    print "successfully generated %s including required modules '%s'" % (viewer_symbol_file, viewer_exes)
+    print("successfully generated %s including required modules '%s'" % (viewer_symbol_file, viewer_exes))
 
     return 0
 
diff --git a/indra/newview/lldrawpoolalpha.cpp b/indra/newview/lldrawpoolalpha.cpp
index f4e0d210c5c64f96839f570ef70de38a9ff04602..3ecc43c4c3ed15e4585d968616befae18c112661 100644
--- a/indra/newview/lldrawpoolalpha.cpp
+++ b/indra/newview/lldrawpoolalpha.cpp
@@ -650,9 +650,9 @@ void LLDrawPoolAlpha::renderAlpha(U32 mask, S32 pass)
                 U32 have_mask = params.mVertexBuffer->getTypeMask() & mask;
 				if (have_mask != mask)
 				{ //FIXME!
-					LL_WARNS_ONCE() << "Missing required components, expected mask: " << mask
-									<< " present: " << have_mask
-									<< ". Skipping render batch." << LL_ENDL;
+					//LL_WARNS_ONCE() << "Missing required components, expected mask: " << mask
+					//				<< " present: " << have_mask
+					//				<< ". Skipping render batch." << LL_ENDL;
 					continue;
 				}
 
diff --git a/indra/newview/llmarketplacefunctions.cpp b/indra/newview/llmarketplacefunctions.cpp
index 60ca70ea74351d7aa00504e4c01d7f1ae50b33c8..e8b74e6ef7e8c423a11f171f5728d08e24aad44b 100644
--- a/indra/newview/llmarketplacefunctions.cpp
+++ b/indra/newview/llmarketplacefunctions.cpp
@@ -214,7 +214,7 @@ namespace LLMarketplaceImport
         httpOpts->setFollowRedirects(true);
 
         httpHeaders->append(HTTP_OUT_HEADER_ACCEPT, "*/*");
-        httpHeaders->append(HTTP_OUT_HEADER_CONNECTION, "Keep-Alive");
+        httpHeaders->append(HTTP_OUT_HEADER_CONNECTION, "keep-alive");
         httpHeaders->append(HTTP_OUT_HEADER_COOKIE, sMarketplaceCookie);
         httpHeaders->append(HTTP_OUT_HEADER_CONTENT_TYPE, HTTP_CONTENT_XML);
         httpHeaders->append(HTTP_OUT_HEADER_USER_AGENT, LLViewerMedia::getInstance()->getCurrentUserAgent());
@@ -854,8 +854,8 @@ void LLMarketplaceData::getSLMListingsCoro(LLUUID folderId)
     LLCore::HttpRequest::ptr_t httpRequest(new LLCore::HttpRequest);
     LLCore::HttpHeaders::ptr_t httpHeaders(new LLCore::HttpHeaders);
 
-    httpHeaders->append("Accept", "application/json");
-    httpHeaders->append("Content-Type", "application/json");
+    httpHeaders->append(HTTP_OUT_HEADER_ACCEPT, "application/json");
+    httpHeaders->append(HTTP_OUT_HEADER_CONTENT_TYPE, "application/json");
 
     std::string url = getSLMConnectURL("/listings");
 
@@ -916,8 +916,8 @@ void LLMarketplaceData::getSingleListingCoro(S32 listingId, LLUUID folderId)
     LLCore::HttpRequest::ptr_t httpRequest(new LLCore::HttpRequest);
     LLCore::HttpHeaders::ptr_t httpHeaders(new LLCore::HttpHeaders);
 
-    httpHeaders->append("Accept", "application/json");
-    httpHeaders->append("Content-Type", "application/json");
+    httpHeaders->append(HTTP_OUT_HEADER_ACCEPT, "application/json");
+    httpHeaders->append(HTTP_OUT_HEADER_CONTENT_TYPE, "application/json");
 
     std::string url = getSLMConnectURL("/listing/") + llformat("%d", listingId);
 
@@ -984,8 +984,8 @@ void LLMarketplaceData::createSLMListingCoro(LLUUID folderId, LLUUID versionId,
     LLCore::HttpRequest::ptr_t httpRequest(new LLCore::HttpRequest);
     LLCore::HttpHeaders::ptr_t httpHeaders(new LLCore::HttpHeaders);
 
-    httpHeaders->append("Accept", "application/json");
-    httpHeaders->append("Content-Type", "application/json");
+    httpHeaders->append(HTTP_OUT_HEADER_ACCEPT, "application/json");
+    httpHeaders->append(HTTP_OUT_HEADER_CONTENT_TYPE, "application/json");
 
     LLViewerInventoryCategory* category = gInventory.getCategory(folderId);
     LLSD invInfo;
@@ -1049,8 +1049,8 @@ void LLMarketplaceData::updateSLMListingCoro(LLUUID folderId, S32 listingId, LLU
     LLCore::HttpRequest::ptr_t httpRequest(new LLCore::HttpRequest);
     LLCore::HttpHeaders::ptr_t httpHeaders(new LLCore::HttpHeaders);
 
-    httpHeaders->append("Accept", "application/json");
-    httpHeaders->append("Content-Type", "application/json");
+    httpHeaders->append(HTTP_OUT_HEADER_ACCEPT, "application/json");
+    httpHeaders->append(HTTP_OUT_HEADER_CONTENT_TYPE, "application/json");
     
     LLSD invInfo;
     invInfo["listing_folder_id"] = folderId;
@@ -1128,8 +1128,8 @@ void LLMarketplaceData::associateSLMListingCoro(LLUUID folderId, S32 listingId,
     LLCore::HttpRequest::ptr_t httpRequest(new LLCore::HttpRequest);
     LLCore::HttpHeaders::ptr_t httpHeaders(new LLCore::HttpHeaders);
 
-    httpHeaders->append("Accept", "application/json");
-    httpHeaders->append("Content-Type", "application/json");
+    httpHeaders->append(HTTP_OUT_HEADER_ACCEPT, "application/json");
+    httpHeaders->append(HTTP_OUT_HEADER_CONTENT_TYPE, "application/json");
 
     LLSD invInfo;
     invInfo["listing_folder_id"] = folderId;
@@ -1206,8 +1206,8 @@ void LLMarketplaceData::deleteSLMListingCoro(S32 listingId)
     LLCore::HttpRequest::ptr_t httpRequest(new LLCore::HttpRequest);
     LLCore::HttpHeaders::ptr_t httpHeaders(new LLCore::HttpHeaders);
 
-    httpHeaders->append("Accept", "application/json");
-    httpHeaders->append("Content-Type", "application/json");
+    httpHeaders->append(HTTP_OUT_HEADER_ACCEPT, "application/json");
+    httpHeaders->append(HTTP_OUT_HEADER_CONTENT_TYPE, "application/json");
 
     std::string url = getSLMConnectURL("/listing/") + llformat("%d", listingId);
     LLUUID folderId = getListingFolder(listingId);
diff --git a/indra/newview/llvovolume.cpp b/indra/newview/llvovolume.cpp
index 788f2ccdb525ac7e89051bf258dd3eb661a1e957..8002d7edc091b3f8b357864cd9fa71fd01434a4b 100644
--- a/indra/newview/llvovolume.cpp
+++ b/indra/newview/llvovolume.cpp
@@ -5197,13 +5197,13 @@ static LLTrace::BlockTimerStatHandle FTM_REGISTER_FACE("Register Face");
 void LLVolumeGeometryManager::registerFace(LLSpatialGroup* group, LLFace* facep, U32 type)
 {
 	LL_RECORD_BLOCK_TIME(FTM_REGISTER_FACE);
-	if (   type == LLRenderPass::PASS_ALPHA 
-		&& facep->getTextureEntry()->getMaterialParams().notNull() 
-		&& !facep->getVertexBuffer()->hasDataType(LLVertexBuffer::TYPE_TANGENT)
-		&& LLViewerShaderMgr::instance()->getShaderLevel(LLViewerShaderMgr::SHADER_OBJECT) > 1)
-	{
-		LL_WARNS_ONCE("RenderMaterials") << "Oh no! No binormals for this alpha blended face!" << LL_ENDL;
-	}
+	//if (   type == LLRenderPass::PASS_ALPHA 
+	//	&& facep->getTextureEntry()->getMaterialParams().notNull() 
+	//	&& !facep->getVertexBuffer()->hasDataType(LLVertexBuffer::TYPE_TANGENT)
+	//	&& LLViewerShaderMgr::instance()->getShaderLevel(LLViewerShaderMgr::SHADER_OBJECT) > 1)
+	//{
+	//	LL_WARNS_ONCE("RenderMaterials") << "Oh no! No binormals for this alpha blended face!" << LL_ENDL;
+	//}
 
 //	bool selected = facep->getViewerObject()->isSelected();
 //
diff --git a/indra/newview/tests/test_llxmlrpc_peer.py b/indra/newview/tests/test_llxmlrpc_peer.py
index f644afa1a2a9d3dce82a191e68cf8595c50ce437..365848b8193e24e1f2a72c04bda3d2dc84c25977 100755
--- a/indra/newview/tests/test_llxmlrpc_peer.py
+++ b/indra/newview/tests/test_llxmlrpc_peer.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
 """\
 @file   test_llxmlrpc_peer.py
 @author Nat Goodspeed
@@ -31,7 +31,7 @@
 
 import os
 import sys
-from SimpleXMLRPCServer import SimpleXMLRPCServer
+from xmlrpc.server import SimpleXMLRPCServer
 
 mydir = os.path.dirname(__file__)       # expected to be .../indra/newview/tests/
 sys.path.insert(0, os.path.join(mydir, os.pardir, os.pardir, "llmessage", "tests"))
@@ -85,7 +85,7 @@ def log_error(self, format, *args):
         # "Then there's Windows"
         # Instantiate a TestServer on the first free port in the specified
         # port range.
-        xmlrpcd, port = freeport(xrange(8000, 8020), make_server)
+        xmlrpcd, port = freeport(range(8000, 8020), make_server)
 
     # Pass the selected port number to the subject test program via the
     # environment. We don't want to impose requirements on the test program's
diff --git a/indra/newview/viewer_manifest.py b/indra/newview/viewer_manifest.py
index 4089e37b5ddbd8c2727a7381ee92b1fe544f9741..1ec03d76105bbbb1ce24cf7d09521dd8e8063fda 100755
--- a/indra/newview/viewer_manifest.py
+++ b/indra/newview/viewer_manifest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
 """\
 @file viewer_manifest.py
 @author Ryan Williams
@@ -26,6 +26,8 @@
 Linden Research, Inc., 945 Battery Street, San Francisco, CA  94111  USA
 $/LicenseInfo$
 """
+
+from io import open
 import errno
 import json
 import os
@@ -114,17 +116,17 @@ def construct(self):
                 if sourceid:
                     settings_install['sourceid'] = settings_template['sourceid'].copy()
                     settings_install['sourceid']['Value'] = sourceid
-                    print "Set sourceid in settings_install.xml to '%s'" % sourceid
+                    print("Set sourceid in settings_install.xml to '%s'" % sourceid)
 
                 if self.args.get('channel_suffix'):
                     settings_install['CmdLineChannel'] = settings_template['CmdLineChannel'].copy()
                     settings_install['CmdLineChannel']['Value'] = self.channel_with_pkg_suffix()
-                    print "Set CmdLineChannel in settings_install.xml to '%s'" % self.channel_with_pkg_suffix()
+                    print("Set CmdLineChannel in settings_install.xml to '%s'" % self.channel_with_pkg_suffix())
 
                 if self.args.get('grid'):
                     settings_install['CmdLineGridChoice'] = settings_template['CmdLineGridChoice'].copy()
                     settings_install['CmdLineGridChoice']['Value'] = self.grid()
-                    print "Set CmdLineGridChoice in settings_install.xml to '%s'" % self.grid()
+                    print("Set CmdLineGridChoice in settings_install.xml to '%s'" % self.grid())
 
                 # put_in_file(src=) need not be an actual pathname; it
                 # only needs to be non-empty
@@ -147,30 +149,30 @@ def construct(self):
 
             # skins
             with self.prefix(src_dst="skins"):
-                    # include the entire textures directory recursively
-                    with self.prefix(src_dst="*/textures"):
-                            self.path("*/*.jpg")
-                            self.path("*/*.png")
-                            self.path("*.tga")
-                            self.path("*.j2c")
-                            self.path("*.png")
-                            self.path("textures.xml")
-                    self.path("*/xui/*/*.xml")
-                    self.path("*/xui/*/widgets/*.xml")
-                    self.path("*/*.xml")
-                    self.path("*/*.json")
-
-                    # Local HTML files (e.g. loading screen)
-                    # The claim is that we never use local html files any
-                    # longer. But rather than commenting out this block, let's
-                    # rename every html subdirectory as html.old. That way, if
-                    # we're wrong, a user actually does have the relevant
-                    # files; s/he just needs to rename every html.old
-                    # directory back to html to recover them.
-                    with self.prefix(src="*/html", dst="*/html.old"):
-                            self.path("*.png")
-                            self.path("*/*/*.html")
-                            self.path("*/*/*.gif")
+                # include the entire textures directory recursively
+                with self.prefix(src_dst="*/textures"):
+                    self.path("*/*.jpg")
+                    self.path("*/*.png")
+                    self.path("*.tga")
+                    self.path("*.j2c")
+                    self.path("*.png")
+                    self.path("textures.xml")
+                self.path("*/xui/*/*.xml")
+                self.path("*/xui/*/widgets/*.xml")
+                self.path("*/*.xml")
+                self.path("*/*.json")
+
+                 # Local HTML files (e.g. loading screen)
+                # The claim is that we never use local html files any
+                # longer. But rather than commenting out this block, let's
+                # rename every html subdirectory as html.old. That way, if
+                # we're wrong, a user actually does have the relevant
+                # files; s/he just needs to rename every html.old
+                # directory back to html to recover them.
+                with self.prefix(src="*/html", dst="*/html.old"):
+                    self.path("*.png")
+                    self.path("*/*/*.html")
+                    self.path("*/*/*.gif")
 
 
             #build_data.json.  Standard with exception handling is fine.  If we can't open a new file for writing, we have worse problems
@@ -193,7 +195,7 @@ def construct(self):
             #we likely no longer need the test, since we will throw an exception above, but belt and suspenders and we get the
             #return code for free.
             if not self.path2basename(os.pardir, "build_data.json"):
-                print "No build_data.json file"
+                print("No build_data.json file")
 
     def finish_build_data_dict(self, build_data_dict):
         return build_data_dict
@@ -270,26 +272,26 @@ def icon_path(self):
 
     def extract_names(self,src):
         try:
-            contrib_file = open(src,'r')
+            contrib_file = open(src, 'r', encoding='utf-8')
         except IOError:
-            print "Failed to open '%s'" % src
+            print("Failed to open '%s'" % src)
             raise
         lines = contrib_file.readlines()
         contrib_file.close()
 
         # All lines up to and including the first blank line are the file header; skip them
         lines.reverse() # so that pop will pull from first to last line
-        while not re.match("\s*$", lines.pop()) :
+        while not re.match(r"\s*$", lines.pop()) :
             pass # do nothing
 
         # A line that starts with a non-whitespace character is a name; all others describe contributions, so collect the names
         names = []
         for line in lines :
-            if re.match("\S", line) :
+            if re.match(r"\S", line) :
                 names.append(line.rstrip())
         # It's not fair to always put the same people at the head of the list
         random.shuffle(names)
-        return ', '.join(names)
+        return ', '.join(names).encode("utf-8")
 
     def relsymlinkf(self, src, dst=None, catch=True):
         """
@@ -311,7 +313,7 @@ def symlinkf(self, src, dst=None, catch=True):
         """
         Like ln -sf, but uses os.symlink() instead of running ln. This creates
         a symlink at 'dst' that points to 'src' -- see:
-        https://docs.python.org/2/library/os.html#os.symlink
+        https://docs.python.org/3/library/os.html#os.symlink
 
         If you omit 'dst', this creates a symlink with basename(src) at
         get_dst_prefix() -- in other words: put a symlink to this pathname
@@ -373,11 +375,11 @@ def _symlinkf(self, src, dst, catch):
                         os.remove(dst)
                         os.symlink(src, dst)
                 elif os.path.isdir(dst):
-                    print "Requested symlink (%s) exists but is a directory; replacing" % dst
+                    print("Requested symlink (%s) exists but is a directory; replacing" % dst)
                     shutil.rmtree(dst)
                     os.symlink(src, dst)
                 elif os.path.exists(dst):
-                    print "Requested symlink (%s) exists but is a file; replacing" % dst
+                    print("Requested symlink (%s) exists but is a file; replacing" % dst)
                     os.remove(dst)
                     os.symlink(src, dst)
                 else:
@@ -385,8 +387,8 @@ def _symlinkf(self, src, dst, catch):
                     raise
         except Exception as err:
             # report
-            print "Can't symlink %r -> %r: %s: %s" % \
-                  (dst, src, err.__class__.__name__, err)
+            print("Can't symlink %r -> %r: %s: %s" % \
+                  (dst, src, err.__class__.__name__, err))
             # if caller asked us not to catch, re-raise this exception
             if not catch:
                 raise
@@ -656,8 +658,7 @@ def wpath(path):
         result = ""
         dest_files = [pair[1] for pair in self.file_list if pair[0] and os.path.isfile(pair[1])]
         # sort deepest hierarchy first
-        dest_files.sort(lambda a,b: cmp(a.count(os.path.sep),b.count(os.path.sep)) or cmp(a,b))
-        dest_files.reverse()
+        dest_files.sort(key=lambda path: (path.count(os.path.sep), path), reverse=True)
         out_path = None
         for pkg_file in dest_files:
             rel_file = os.path.normpath(pkg_file.replace(self.get_dst_prefix()+os.path.sep,''))
@@ -680,8 +681,7 @@ def wpath(path):
             for d in deleted_file_dirs:
                 deleted_dirs.extend(path_ancestors(d))
             # sort deepest hierarchy first
-            deleted_dirs.sort(lambda a,b: cmp(a.count(os.path.sep),b.count(os.path.sep)) or cmp(a,b))
-            deleted_dirs.reverse()
+            deleted_dirs.sort(key=lambda path: (path.count(os.path.sep), path), reverse=True)
             prev = None
             for d in deleted_dirs:
                 if d != prev:   # skip duplicates
@@ -767,19 +767,19 @@ def package_finish(self):
         installer_created=False
         nsis_attempts=3
         nsis_retry_wait=15
-        for attempt in xrange(nsis_attempts):
+        for attempt in range(nsis_attempts):
             try:
                 self.run_command([NSIS_path, '/V2', self.dst_path_of(tempfile)])
             except ManifestError as err:
                 if attempt+1 < nsis_attempts:
-                    print >> sys.stderr, "nsis failed, waiting %d seconds before retrying" % nsis_retry_wait
+                    print("nsis failed, waiting %d seconds before retrying" % nsis_retry_wait, file=sys.stderr)
                     time.sleep(nsis_retry_wait)
                     nsis_retry_wait*=2
             else:
                 # NSIS worked! Done!
                 break
         else:
-            print >> sys.stderr, "Maximum nsis attempts exceeded; giving up"
+            print("Maximum nsis attempts exceeded; giving up", file=sys.stderr)
             raise
 
         self.sign(installer_file)
@@ -791,10 +791,10 @@ def sign(self, exe):
         python  = os.environ.get('PYTHON', sys.executable)
         if os.path.exists(sign_py):
             dst_path = self.dst_path_of(exe)
-            print "about to run signing of: ", dst_path
+            print("about to run signing of: ", dst_path)
             self.run_command([python, sign_py, dst_path])
         else:
-            print "Skipping code signing of %s %s: %s not found" % (self.dst_path_of(exe), exe, sign_py)
+            print("Skipping code signing of %s %s: %s not found" % (self.dst_path_of(exe), exe, sign_py))
 
     def escape_slashes(self, path):
         return path.replace('\\', '\\\\\\\\')
@@ -1040,7 +1040,7 @@ def package_finish(self):
         appname = os.path.basename(application)
 
         vol_icon = self.src_path_of(os.path.join(self.icon_path(), 'alchemy.icns'))
-        print "DEBUG: icon_path '%s'" % vol_icon
+        print("DEBUG: icon_path '%s'" % vol_icon)
 
         dmgoptions = {
             'format': 'ULFO',
@@ -1160,7 +1160,7 @@ def construct(self):
 
         # Get the icons based on the channel type
         icon_path = self.icon_path()
-        print "DEBUG: icon_path '%s'" % icon_path
+        print("DEBUG: icon_path '%s'" % icon_path)
         with self.prefix(src=icon_path) :
             self.path("alchemy_256.png","alchemy_icon.png")
             with self.prefix(dst="res-sdl") :
@@ -1216,7 +1216,7 @@ def package_finish(self):
             self.run_command(['find', self.get_dst_prefix(),
                               '-type', 'f', '-perm', old,
                               '-exec', 'chmod', new, '{}', ';'])
-        self.package_file = installer_name + '.tar.bz2'
+        self.package_file = installer_name + '.tar.xz'
 
         # temporarily move directory tree so that it has the right
         # name in the tarfile
@@ -1229,17 +1229,17 @@ def package_finish(self):
                 # --numeric-owner hides the username of the builder for
                 # security etc.
                 self.run_command(['tar', '-C', self.get_build_prefix(),
-                                  '--numeric-owner', '-cjf',
-                                 tempname + '.tar.bz2', installer_name])
+                                  '--numeric-owner', '-cJf',
+                                 tempname + '.tar.xz', installer_name])
             else:
-                print "Skipping %s.tar.bz2 for non-Release build (%s)" % \
-                      (installer_name, self.args['buildtype'])
+                print("Skipping %s.tar.xz for non-Release build (%s)" % \
+                      (installer_name, self.args['buildtype']))
         finally:
             self.run_command(["mv", tempname, realname])
 
     def strip_binaries(self):
         if self.args['buildtype'].lower() == 'release' and self.is_packaging_viewer():
-            print "* Going strip-crazy on the packaged binaries, since this is a RELEASE build"
+            print("* Going strip-crazy on the packaged binaries, since this is a RELEASE build")
             # makes some small assumptions about our packaged dir structure
             self.run_command(
                 ["find"] +
diff --git a/indra/test/test_llmanifest.py b/indra/test/test_llmanifest.py
index b2b2b72c3bb435642178179db3287e2a839ae2c0..c746d59ff22766a3cda5bb3fbe8e6dd2fb13d157 100755
--- a/indra/test/test_llmanifest.py
+++ b/indra/test/test_llmanifest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
 """
 @file test_llmanifest.py
 @author Ryan Williams
@@ -124,10 +124,10 @@ def testpathof(self):
 
     def testcmakedirs(self):
         self.m.cmakedirs("test_dir_DELETE/nested/dir")
-        self.assert_(os.path.exists("test_dir_DELETE/nested/dir"))
-        self.assert_(os.path.isdir("test_dir_DELETE"))
-        self.assert_(os.path.isdir("test_dir_DELETE/nested"))
-        self.assert_(os.path.isdir("test_dir_DELETE/nested/dir"))
+        self.assertTrue(os.path.exists("test_dir_DELETE/nested/dir"))
+        self.assertTrue(os.path.isdir("test_dir_DELETE"))
+        self.assertTrue(os.path.isdir("test_dir_DELETE/nested"))
+        self.assertTrue(os.path.isdir("test_dir_DELETE/nested/dir"))
         os.removedirs("test_dir_DELETE/nested/dir")
 
 if __name__ == '__main__':
diff --git a/scripts/code_tools/modified_strings.py b/scripts/code_tools/modified_strings.py
index 6a763b6ec54e81ca31e8a12e1bf23932c6646fc5..e7a9d239dc7c5da4f93143598f6cbcf76ffc8932 100644
--- a/scripts/code_tools/modified_strings.py
+++ b/scripts/code_tools/modified_strings.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 """\
 
 This script scans the SL codebase for translation-related strings.
@@ -25,7 +25,7 @@
 $/LicenseInfo$
 """
 
-from __future__ import print_function
+
 
 import xml.etree.ElementTree as ET
 import argparse
@@ -75,10 +75,10 @@
 ]
 
 def codify_for_print(val):
-    if isinstance(val, unicode):
+    if isinstance(val, str):
         return val.encode("utf-8")
     else:
-        return unicode(val, 'utf-8').encode("utf-8")
+        return str(val, 'utf-8').encode("utf-8")
 
 # Returns a dict of { name => xml_node }
 def read_xml_elements(blob):
@@ -186,7 +186,7 @@ def make_translation_table(mod_tree, base_tree, lang, args):
         transl_dict = read_xml_elements(transl_blob)
 
         rows = 0
-        for name in mod_dict.keys():
+        for name in list(mod_dict.keys()):
             if not name in base_dict or mod_dict[name].text != base_dict[name].text or (args.missing and not name in transl_dict):
                 elt = mod_dict[name]
                 val = elt.text
@@ -307,7 +307,7 @@ def save_translation_file(per_lang_data, aux_data, outfile):
         print("Added", num_translations, "rows for language", lang)
 
     # Reference info, not for translation
-    for aux, data in aux_data.items():
+    for aux, data in list(aux_data.items()):
         df = pd.DataFrame(data, columns = ["Key", "Value"]) 
         df.to_excel(writer, index=False, sheet_name=aux)
         worksheet = writer.sheets[aux]
diff --git a/scripts/content_tools/anim_tool.py b/scripts/content_tools/anim_tool.py
index 3aef8cd5ab40e175052b6d71bc6675087a062a27..e7b86a88fa07f76f17a532c0e80b11dea0f50b52 100644
--- a/scripts/content_tools/anim_tool.py
+++ b/scripts/content_tools/anim_tool.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python
+#!/usr/bin/env python3
 """\
 @file   anim_tool.py
 @author Brad Payne, Nat Goodspeed
@@ -39,7 +39,7 @@
 import math
 import os
 import random
-from cStringIO import StringIO
+from io import StringIO
 import struct
 import sys
 from xml.etree import ElementTree
@@ -179,7 +179,7 @@ def unpack(duration, fup):
         return this
 
     def dump(self, f):
-        print >>f, "    rot_key: t %.3f" % self.time,"st",self.time_short,"rot",",".join("%.3f" % f for f in self.rotation)
+        print("    rot_key: t %.3f" % self.time,"st",self.time_short,"rot",",".join("%.3f" % f for f in self.rotation), file=f)
 
     def pack(self, fp):
         fp.pack("<H",self.time_short)
@@ -215,7 +215,7 @@ def unpack(duration, fup):
         return this
 
     def dump(self, f):
-        print >>f, "    pos_key: t %.3f" % self.time,"pos ",",".join("%.3f" % f for f in self.position)
+        print("    pos_key: t %.3f" % self.time,"pos ",",".join("%.3f" % f for f in self.position), file=f)
         
     def pack(self, fp):
         fp.pack("<H",self.time_short)
@@ -247,18 +247,18 @@ def pack(self, fp):
                 self.ease_out_start, self.ease_out_stop)
 
     def dump(self, f):
-        print >>f, "  constraint:"
-        print >>f, "    chain_length",self.chain_length
-        print >>f, "    constraint_type",self.constraint_type
-        print >>f, "    source_volume",self.source_volume
-        print >>f, "    source_offset",self.source_offset
-        print >>f, "    target_volume",self.target_volume
-        print >>f, "    target_offset",self.target_offset
-        print >>f, "    target_dir",self.target_dir
-        print >>f, "    ease_in_start",self.ease_in_start
-        print >>f, "    ease_in_stop",self.ease_in_stop
-        print >>f, "    ease_out_start",self.ease_out_start
-        print >>f, "    ease_out_stop",self.ease_out_stop
+        print("  constraint:", file=f)
+        print("    chain_length",self.chain_length, file=f)
+        print("    constraint_type",self.constraint_type, file=f)
+        print("    source_volume",self.source_volume, file=f)
+        print("    source_offset",self.source_offset, file=f)
+        print("    target_volume",self.target_volume, file=f)
+        print("    target_offset",self.target_offset, file=f)
+        print("    target_dir",self.target_dir, file=f)
+        print("    ease_in_start",self.ease_in_start, file=f)
+        print("    ease_in_stop",self.ease_in_stop, file=f)
+        print("    ease_out_start",self.ease_out_start, file=f)
+        print("    ease_out_stop",self.ease_out_stop, file=f)
         
 class Constraints(object):
     @staticmethod
@@ -266,7 +266,7 @@ def unpack(duration, fup):
         this = Constraints()
         (num_constraints, ) = fup.unpack("<i")
         this.constraints = [Constraint.unpack(duration, fup)
-                            for i in xrange(num_constraints)]
+                            for i in range(num_constraints)]
         return this
 
     def pack(self, fp):
@@ -275,7 +275,7 @@ def pack(self, fp):
             c.pack(fp)
 
     def dump(self, f):
-        print >>f, "constraints:",len(self.constraints)
+        print("constraints:",len(self.constraints), file=f)
         for c in self.constraints:
             c.dump(f)
 
@@ -296,7 +296,7 @@ def unpack(duration, fup):
         this = PositionCurve()
         (num_pos_keys, ) = fup.unpack("<i")
         this.keys = [PosKey.unpack(duration, fup)
-                     for k in xrange(num_pos_keys)]
+                     for k in range(num_pos_keys)]
         return this
 
     def pack(self, fp):
@@ -305,8 +305,8 @@ def pack(self, fp):
             k.pack(fp)
 
     def dump(self, f):
-        print >>f, "  position_curve:"
-        print >>f, "    num_pos_keys", len(self.keys)
+        print("  position_curve:", file=f)
+        print("    num_pos_keys", len(self.keys), file=f)
         for k in self.keys:
             k.dump(f)
 
@@ -327,7 +327,7 @@ def unpack(duration, fup):
         this = RotationCurve()
         (num_rot_keys, ) = fup.unpack("<i")
         this.keys = [RotKey.unpack(duration, fup)
-                     for k in xrange(num_rot_keys)]
+                     for k in range(num_rot_keys)]
         return this
 
     def pack(self, fp):
@@ -336,8 +336,8 @@ def pack(self, fp):
             k.pack(fp)
 
     def dump(self, f):
-        print >>f, "  rotation_curve:"
-        print >>f, "    num_rot_keys", len(self.keys)
+        print("  rotation_curve:", file=f)
+        print("    num_rot_keys", len(self.keys), file=f)
         for k in self.keys:
             k.dump(f)
             
@@ -364,9 +364,9 @@ def pack(self, fp):
         self.position_curve.pack(fp)
 
     def dump(self, f):
-        print >>f, "joint:"
-        print >>f, "  joint_name:",self.joint_name
-        print >>f, "  joint_priority:",self.joint_priority
+        print("joint:", file=f)
+        print("  joint_name:",self.joint_name, file=f)
+        print("  joint_priority:",self.joint_priority, file=f)
         self.rotation_curve.dump(f)
         self.position_curve.dump(f)
 
@@ -440,10 +440,10 @@ def unpack(self,fup):
             fup.unpack("@ffiffII")
         
         self.joints = [JointInfo.unpack(self.duration, fup)
-                       for j in xrange(num_joints)]
+                       for j in range(num_joints)]
         if self.verbose:
             for joint_info in self.joints:
-                print "unpacked joint",joint_info.joint_name
+                print("unpacked joint",joint_info.joint_name)
         self.constraints = Constraints.unpack(self.duration, fup)
         self.buffer = fup.buffer
         
@@ -461,17 +461,17 @@ def dump(self, filename="-"):
             f = sys.stdout
         else:
             f = open(filename,"w")
-        print >>f, "versions: ", self.version, self.sub_version
-        print >>f, "base_priority: ", self.base_priority
-        print >>f, "duration: ", self.duration
-        print >>f, "emote_name: ", self.emote_name
-        print >>f, "loop_in_point: ", self.loop_in_point
-        print >>f, "loop_out_point: ", self.loop_out_point
-        print >>f, "loop: ", self.loop
-        print >>f, "ease_in_duration: ", self.ease_in_duration
-        print >>f, "ease_out_duration: ", self.ease_out_duration
-        print >>f, "hand_pose", self.hand_pose
-        print >>f, "num_joints", len(self.joints)
+        print("versions: ", self.version, self.sub_version, file=f)
+        print("base_priority: ", self.base_priority, file=f)
+        print("duration: ", self.duration, file=f)
+        print("emote_name: ", self.emote_name, file=f)
+        print("loop_in_point: ", self.loop_in_point, file=f)
+        print("loop_out_point: ", self.loop_out_point, file=f)
+        print("loop: ", self.loop, file=f)
+        print("ease_in_duration: ", self.ease_in_duration, file=f)
+        print("ease_out_duration: ", self.ease_out_duration, file=f)
+        print("hand_pose", self.hand_pose, file=f)
+        print("num_joints", len(self.joints), file=f)
         for j in self.joints:
             j.dump(f)
         self.constraints.dump(f)
@@ -482,7 +482,7 @@ def write(self, filename):
         fp.write(filename)
 
     def write_src_data(self, filename):
-        print "write file",filename
+        print("write file",filename)
         with open(filename,"wb") as f:
             f.write(self.buffer)
 
@@ -501,11 +501,11 @@ def delete_joint(self, name):
         j = self.find_joint(name)
         if j:
             if self.verbose:
-                print "removing joint", name
+                print("removing joint", name)
             self.joints.remove(j)
         else:
             if self.verbose:
-                print "joint not found to remove", name
+                print("joint not found to remove", name)
 
     def summary(self):
         nj = len(self.joints)
@@ -513,13 +513,13 @@ def summary(self):
         nstatic = len([j for j in self.joints
                        if j.rotation_curve.is_static()
                        and j.position_curve.is_static()])
-        print "summary: %d joints, non-zero priority %d, static %d" % (nj, nz, nstatic)
+        print("summary: %d joints, non-zero priority %d, static %d" % (nj, nz, nstatic))
 
     def add_pos(self, joint_names, positions):
         js = [joint for joint in self.joints if joint.joint_name in joint_names]
         for j in js:
             if self.verbose:
-                print "adding positions",j.joint_name,positions
+                print("adding positions",j.joint_name,positions)
             j.joint_priority = 4
             j.position_curve.keys = [PosKey(self.duration * i / (len(positions) - 1),
                                             self.duration,
@@ -529,7 +529,7 @@ def add_pos(self, joint_names, positions):
     def add_rot(self, joint_names, rotations):
         js = [joint for joint in self.joints if joint.joint_name in joint_names]
         for j in js:
-            print "adding rotations",j.joint_name
+            print("adding rotations",j.joint_name)
             j.joint_priority = 4
             j.rotation_curve.keys = [RotKey(self.duration * i / (len(rotations) - 1),
                                             self.duration,
@@ -539,8 +539,8 @@ def add_rot(self, joint_names, rotations):
 def twistify(anim, joint_names, rot1, rot2):
     js = [joint for joint in anim.joints if joint.joint_name in joint_names]
     for j in js:
-        print "twisting",j.joint_name
-        print len(j.rotation_curve.keys)
+        print("twisting",j.joint_name)
+        print(len(j.rotation_curve.keys))
         j.joint_priority = 4
         # Set the joint(s) to rot1 at time 0, rot2 at the full duration.
         j.rotation_curve.keys = [
@@ -563,7 +563,7 @@ def get_joint_by_name(tree,name):
     if len(matches)==1:
         return matches[0]
     elif len(matches)>1:
-        print "multiple matches for name",name
+        print("multiple matches for name",name)
         return None
     else:
         return None
@@ -577,7 +577,7 @@ def get_elt_pos(elt):
         return (0.0, 0.0, 0.0)
 
 def resolve_joints(names, skel_tree, lad_tree, no_hud=False):
-    print "resolve joints, no_hud is",no_hud
+    print("resolve joints, no_hud is",no_hud)
     if skel_tree and lad_tree:
         all_elts = [elt for elt in skel_tree.getroot().iter()]
         all_elts.extend([elt for elt in lad_tree.getroot().iter()])
@@ -641,12 +641,12 @@ def main(*argv):
     parser.add_argument("outfilename", nargs="?", help="name of a .anim file to output")
     args = parser.parse_args(argv)
 
-    print "anim_tool.py: " + " ".join(argv)
-    print "dump is", args.dump
-    print "infilename",args.infilename,"outfilename",args.outfilename
-    print "rot",args.rot
-    print "pos",args.pos
-    print "joints",args.joints
+    print("anim_tool.py: " + " ".join(argv))
+    print("dump is", args.dump)
+    print("infilename",args.infilename,"outfilename",args.outfilename)
+    print("rot",args.rot)
+    print("pos",args.pos)
+    print("joints",args.joints)
 
     anim = Anim(args.infilename, args.verbose)
     skel_tree = None
@@ -663,7 +663,7 @@ def main(*argv):
     if args.joints:
         joints = resolve_joints(args.joints, skel_tree, lad_tree, args.no_hud)
         if args.verbose:
-            print "joints resolved to",joints
+            print("joints resolved to",joints)
         for name in joints:
             anim.add_joint(name,0)
     if args.delete_joints:
@@ -677,8 +677,8 @@ def main(*argv):
         # pick a random sequence of positions for each joint specified
         for joint in joints:
             # generate a list of rand_pos triples
-            pos_array = [tuple(random.uniform(-1,1) for i in xrange(3))
-                         for j in xrange(args.rand_pos)]
+            pos_array = [tuple(random.uniform(-1,1) for i in range(3))
+                         for j in range(args.rand_pos)]
             # close the loop by cycling back to the first entry
             pos_array.append(pos_array[0])
             anim.add_pos([joint], pos_array)
@@ -688,26 +688,26 @@ def main(*argv):
             if elt is not None:
                 anim.add_pos([joint], 2*[get_elt_pos(elt)])
             else:
-                print "no elt or no pos data for",joint
+                print("no elt or no pos data for",joint)
     if args.set_version:
         anim.version, anim.sub_version = args.set_version
     if args.base_priority is not None:
-        print "set base priority",args.base_priority
+        print("set base priority",args.base_priority)
         anim.base_priority = args.base_priority
     # --joint_priority sets priority for ALL joints, not just the explicitly-
     # specified ones
     if args.joint_priority is not None:
-        print "set joint priority",args.joint_priority
+        print("set joint priority",args.joint_priority)
         for joint in anim.joints:
             joint.joint_priority = args.joint_priority
     if args.duration is not None:
-        print "set duration",args.duration
+        print("set duration",args.duration)
         anim.duration = args.duration
     if args.loop_in is not None:
-        print "set loop_in",args.loop_in
+        print("set loop_in",args.loop_in)
         anim.loop_in_point = args.loop_in
     if args.loop_out is not None:
-        print "set loop_out",args.loop_out
+        print("set loop_out",args.loop_out)
         anim.loop_out_point = args.loop_out
     if args.dump:
         anim.dump("-")
diff --git a/scripts/content_tools/arche_tool.py b/scripts/content_tools/arche_tool.py
index f99d7be39ad7be65dfb47bce1223bcfee26e425f..677af62d2f2f1805841cb91c64ebfec6536dcfcc 100644
--- a/scripts/content_tools/arche_tool.py
+++ b/scripts/content_tools/arche_tool.py
@@ -1,4 +1,4 @@
-#!runpy.sh
+#!/usr/bin/env python3
 
 """\
 
@@ -42,23 +42,23 @@ def node_key(e):
 def compare_matched_nodes(key,items,summary):
     tags = list(set([e.tag for e in items]))
     if len(tags) != 1:
-        print "different tag types for key",key
+        print("different tag types for key",key)
         summary.setdefault("tag_mismatch",0)
         summary["tag_mismatch"] += 1
         return
-    all_attrib = list(set(chain.from_iterable([e.attrib.keys() for e in items])))
+    all_attrib = list(set(chain.from_iterable([list(e.attrib.keys()) for e in items])))
     #print key,"all_attrib",all_attrib
     for attr in all_attrib:
         vals = [e.get(attr) for e in items]
         #print "key",key,"attr",attr,"vals",vals
         if len(set(vals)) != 1:
-            print key,"- attr",attr,"multiple values",vals
+            print(key,"- attr",attr,"multiple values",vals)
             summary.setdefault("attr",{})
             summary["attr"].setdefault(attr,0)
             summary["attr"][attr] += 1
 
 def compare_trees(file_trees):
-    print "compare_trees"
+    print("compare_trees")
     summary = {}
     all_keys = list(set([node_key(e) for tree in file_trees for e in tree.getroot().iter() if node_key(e)]))
     #print "keys",all_keys
@@ -70,14 +70,14 @@ def compare_trees(file_trees):
         items = []
         for nodes in tree_nodes:
             if not key in nodes:
-                print "file",i,"missing item for key",key
+                print("file",i,"missing item for key",key)
                 summary.setdefault("missing",0)
                 summary["missing"] += 1
             else:
                 items.append(nodes[key])
         compare_matched_nodes(key,items,summary)
-    print "Summary:"
-    print summary
+    print("Summary:")
+    print(summary)
                 
 def dump_appearance_params(tree):
     vals = []
@@ -88,7 +88,7 @@ def dump_appearance_params(tree):
                 vals.append("{" + e.get("id") + "," +e.get("u8") + "}")
                 #print e.get("id"), e.get("name"), e.get("group"), e.get("u8")
     if len(vals)==253:
-        print ", ".join(vals)
+        print(", ".join(vals))
         
     
 if __name__ == "__main__":
@@ -101,9 +101,9 @@ def dump_appearance_params(tree):
     args = parser.parse_args()
 
 
-    print "files",args.files
+    print("files",args.files)
     file_trees = [etree.parse(filename) for filename in args.files]
-    print args
+    print(args)
     if args.compare:
         compare_trees(file_trees)
     if args.appearance_params:
diff --git a/scripts/content_tools/dae_tool.py b/scripts/content_tools/dae_tool.py
index 823f69cb854d1123cf1cb4a17c755e47ed78e3f6..2454fafa467be6ee44af5535cbd7bdf6d1b90bad 100644
--- a/scripts/content_tools/dae_tool.py
+++ b/scripts/content_tools/dae_tool.py
@@ -1,4 +1,4 @@
-#!runpy.sh
+#!/usr/bin/env python3
 
 """\
 
@@ -35,14 +35,14 @@
 from lxml import etree
 
 def mesh_summary(mesh):
-    print "scenes",mesh.scenes
+    print("scenes",mesh.scenes)
     for scene in mesh.scenes:
-        print "scene",scene
+        print("scene",scene)
         for node in scene.nodes:
-            print "node",node
+            print("node",node)
 
 def mesh_lock_offsets(tree, joints):
-    print "mesh_lock_offsets",tree,joints
+    print("mesh_lock_offsets",tree,joints)
     for joint_node in tree.iter():
         if "node" not in joint_node.tag:
             continue
@@ -57,11 +57,11 @@ def mesh_lock_offsets(tree, joints):
                         floats[7] += 0.0001
                         floats[11] += 0.0001
                         matrix_node.text = " ".join([str(f) for f in floats])
-                        print joint_node.get("name"),matrix_node.tag,"text",matrix_node.text,len(floats),floats
+                        print(joint_node.get("name"),matrix_node.tag,"text",matrix_node.text,len(floats),floats)
         
 
 def mesh_random_offsets(tree, joints):
-    print "mesh_random_offsets",tree,joints
+    print("mesh_random_offsets",tree,joints)
     for joint_node in tree.iter():
         if "node" not in joint_node.tag:
             continue
@@ -73,13 +73,13 @@ def mesh_random_offsets(tree, joints):
             for matrix_node in list(joint_node):
                 if "matrix" in matrix_node.tag:
                     floats = [float(x) for x in matrix_node.text.split()]
-                    print "randomizing",floats
+                    print("randomizing",floats)
                     if len(floats) == 16:
                         floats[3] += random.uniform(-1.0,1.0)
                         floats[7] += random.uniform(-1.0,1.0)
                         floats[11] += random.uniform(-1.0,1.0)
                         matrix_node.text = " ".join([str(f) for f in floats])
-                        print joint_node.get("name"),matrix_node.tag,"text",matrix_node.text,len(floats),floats
+                        print(joint_node.get("name"),matrix_node.tag,"text",matrix_node.text,len(floats),floats)
         
 
 if __name__ == "__main__":
@@ -96,24 +96,24 @@ def mesh_random_offsets(tree, joints):
     tree = None
 
     if args.infilename:
-        print "reading",args.infilename
+        print("reading",args.infilename)
         mesh = Collada(args.infilename)
         tree = etree.parse(args.infilename)
 
     if args.summary:
-        print "summarizing",args.infilename
+        print("summarizing",args.infilename)
         mesh_summary(mesh)
         
     if args.lock_offsets:
-        print "locking offsets for",args.lock_offsets
+        print("locking offsets for",args.lock_offsets)
         mesh_lock_offsets(tree, args.lock_offsets)
 
     if args.random_offsets:
-        print "adding random offsets for",args.random_offsets
+        print("adding random offsets for",args.random_offsets)
         mesh_random_offsets(tree, args.random_offsets)
 
     if args.outfilename:
-        print "writing",args.outfilename
+        print("writing",args.outfilename)
         f = open(args.outfilename,"w")
-        print >>f, etree.tostring(tree, pretty_print=True) #need update to get: , short_empty_elements=True)
+        print(etree.tostring(tree, pretty_print=True), file=f) #need update to get: , short_empty_elements=True)
     
diff --git a/scripts/content_tools/skel_tool.py b/scripts/content_tools/skel_tool.py
index 26f63326f1d67822872c0405e746e22ef2df6691..449ecd6a6ce01ede5688b5b1d10e6d303150efb7 100644
--- a/scripts/content_tools/skel_tool.py
+++ b/scripts/content_tools/skel_tool.py
@@ -1,4 +1,4 @@
-#!runpy.sh
+#!/usr/bin/env python3
 
 """\
 
@@ -32,14 +32,14 @@
  
 def get_joint_names(tree):
     joints = [element.get('name') for element in tree.getroot().iter() if element.tag in ['bone','collision_volume']]
-    print "joints:",joints
+    print("joints:",joints)
     return joints
 
 def get_aliases(tree):
     aliases = {}
     alroot = tree.getroot()
     for element in alroot.iter():
-        for key in element.keys():
+        for key in list(element.keys()):
             if key == 'aliases':
                 name = element.get('name')
                 val = element.get('aliases')
@@ -58,19 +58,19 @@ def float_tuple(str, n=3):
         if len(result)==n:
             return result
         else:
-            print "tuple length wrong:", str,"gave",result,"wanted len",n,"got len",len(result)
+            print("tuple length wrong:", str,"gave",result,"wanted len",n,"got len",len(result))
             raise Exception()
     except:
-        print "convert failed for:",str
+        print("convert failed for:",str)
         raise
 
 def check_symmetry(name, field, vec1, vec2):
     if vec1[0] != vec2[0]:
-        print name,field,"x match fail"
+        print(name,field,"x match fail")
     if vec1[1] != -vec2[1]:
-        print name,field,"y mirror image fail"
+        print(name,field,"y mirror image fail")
     if vec1[2] != vec2[2]:
-        print name,field,"z match fail"
+        print(name,field,"z match fail")
 
 def enforce_symmetry(tree, element, field, fix=False):
     name = element.get("name")
@@ -92,7 +92,7 @@ def get_element_by_name(tree,name):
     if len(matches)==1:
         return matches[0]
     elif len(matches)>1:
-        print "multiple matches for name",name
+        print("multiple matches for name",name)
         return None
     else:
         return None
@@ -100,7 +100,7 @@ def get_element_by_name(tree,name):
 def list_skel_tree(tree):
     for element in tree.getroot().iter():
         if element.tag == "bone":
-            print element.get("name"),"-",element.get("support")
+            print(element.get("name"),"-",element.get("support"))
     
 def validate_child_order(tree, ogtree, fix=False):
     unfixable = 0
@@ -116,12 +116,12 @@ def validate_child_order(tree, ogtree, fix=False):
         if og_element is not None:
             for echild,ochild in zip(list(element),list(og_element)):
                 if echild.get("name") != ochild.get("name"):
-                    print "Child ordering error, parent",element.get("name"),echild.get("name"),"vs",ochild.get("name")
+                    print("Child ordering error, parent",element.get("name"),echild.get("name"),"vs",ochild.get("name"))
                     if fix:
                         tofix.add(element.get("name"))
     children = {}
     for name in tofix:
-        print "FIX",name
+        print("FIX",name)
         element = get_element_by_name(tree,name)
         og_element = get_element_by_name(ogtree,name)
         children = []
@@ -130,20 +130,20 @@ def validate_child_order(tree, ogtree, fix=False):
             elt = get_element_by_name(tree,og_elt.get("name"))
             if elt is not None:
                 children.append(elt)
-                print "b:",elt.get("name")
+                print("b:",elt.get("name"))
             else:
-                print "b missing:",og_elt.get("name")
+                print("b missing:",og_elt.get("name"))
         # then add children that are not present in the original joints
         for elt in list(element):
             og_elt = get_element_by_name(ogtree,elt.get("name"))
             if og_elt is None:
                 children.append(elt)
-                print "e:",elt.get("name")
+                print("e:",elt.get("name"))
         # if we've done this right, we have a rearranged list of the same length
         if len(children)!=len(element):
-            print "children",[e.get("name") for e in children]
-            print "element",[e.get("name") for e in element]
-            print "children changes for",name,", cannot reconcile"
+            print("children",[e.get("name") for e in children])
+            print("element",[e.get("name") for e in element])
+            print("children changes for",name,", cannot reconcile")
         else:
             element[:] = children
 
@@ -163,7 +163,7 @@ def validate_child_order(tree, ogtree, fix=False):
 # - digits of precision should be consistent (again, except for old joints)
 # - new bones should have pos, pivot the same
 def validate_skel_tree(tree, ogtree, reftree, fix=False):
-    print "validate_skel_tree"
+    print("validate_skel_tree")
     (num_bones,num_cvs) = (0,0)
     unfixable = 0
     defaults = {"connected": "false", 
@@ -175,7 +175,7 @@ def validate_skel_tree(tree, ogtree, reftree, fix=False):
         # Preserve values from og_file:
         for f in ["pos","rot","scale","pivot"]:
             if og_element is not None and og_element.get(f) and (str(element.get(f)) != str(og_element.get(f))):
-                print element.get("name"),"field",f,"has changed:",og_element.get(f),"!=",element.get(f)
+                print(element.get("name"),"field",f,"has changed:",og_element.get(f),"!=",element.get(f))
                 if fix:
                     element.set(f, og_element.get(f))
 
@@ -187,17 +187,17 @@ def validate_skel_tree(tree, ogtree, reftree, fix=False):
             fields.extend(["end","connected"])
         for f in fields:
             if not element.get(f):
-                print element.get("name"),"missing required field",f
+                print(element.get("name"),"missing required field",f)
                 if fix:
                     if og_element is not None and og_element.get(f):
-                        print "fix from ogtree"
+                        print("fix from ogtree")
                         element.set(f,og_element.get(f))
                     elif ref_element is not None and ref_element.get(f):
-                        print "fix from reftree"
+                        print("fix from reftree")
                         element.set(f,ref_element.get(f))
                     else:
                         if f in defaults:
-                            print "fix by using default value",f,"=",defaults[f]
+                            print("fix by using default value",f,"=",defaults[f])
                             element.set(f,defaults[f])
                         elif f == "support":
                             if og_element is not None:
@@ -205,7 +205,7 @@ def validate_skel_tree(tree, ogtree, reftree, fix=False):
                             else:
                                 element.set(f,"extended")
                         else:
-                            print "unfixable:",element.get("name"),"no value for field",f
+                            print("unfixable:",element.get("name"),"no value for field",f)
                             unfixable += 1
 
         fix_name(element)
@@ -214,7 +214,7 @@ def validate_skel_tree(tree, ogtree, reftree, fix=False):
             enforce_symmetry(tree, element, field, fix)
         if element.get("support")=="extended":
             if element.get("pos") != element.get("pivot"):
-                print "extended joint",element.get("name"),"has mismatched pos, pivot"
+                print("extended joint",element.get("name"),"has mismatched pos, pivot")
         
 
         if element.tag == "linden_skeleton":
@@ -223,19 +223,19 @@ def validate_skel_tree(tree, ogtree, reftree, fix=False):
             all_bones = [e for e in tree.getroot().iter() if e.tag=="bone"]
             all_cvs = [e for e in tree.getroot().iter() if e.tag=="collision_volume"]
             if num_bones != len(all_bones):
-                print "wrong bone count, expected",len(all_bones),"got",num_bones
+                print("wrong bone count, expected",len(all_bones),"got",num_bones)
                 if fix:
                     element.set("num_bones", str(len(all_bones)))
             if num_cvs != len(all_cvs):
-                print "wrong cv count, expected",len(all_cvs),"got",num_cvs
+                print("wrong cv count, expected",len(all_cvs),"got",num_cvs)
                 if fix:
                     element.set("num_collision_volumes", str(len(all_cvs)))
 
-    print "skipping child order code"
+    print("skipping child order code")
     #unfixable += validate_child_order(tree, ogtree, fix)
 
     if fix and (unfixable > 0):
-        print "BAD FILE:", unfixable,"errs could not be fixed"
+        print("BAD FILE:", unfixable,"errs could not be fixed")
             
 
 def slider_info(ladtree,skeltree):
@@ -243,37 +243,37 @@ def slider_info(ladtree,skeltree):
         for skel_param in param.iter("param_skeleton"):
             bones = [b for b in skel_param.iter("bone")]
         if bones:
-            print "param",param.get("name"),"id",param.get("id")
+            print("param",param.get("name"),"id",param.get("id"))
             value_min = float(param.get("value_min"))
             value_max = float(param.get("value_max"))
             neutral = 100.0 * (0.0-value_min)/(value_max-value_min)
-            print "  neutral",neutral
+            print("  neutral",neutral)
             for b in bones:
                 scale = float_tuple(b.get("scale","0 0 0"))
                 offset = float_tuple(b.get("offset","0 0 0"))
-                print "  bone", b.get("name"), "scale", scale, "offset", offset
+                print("  bone", b.get("name"), "scale", scale, "offset", offset)
                 scale_min = [value_min * s for s in scale]
                 scale_max = [value_max * s for s in scale]
                 offset_min = [value_min * t for t in offset]
                 offset_max = [value_max * t for t in offset]
                 if (scale_min != scale_max):
-                    print "    Scale MinX", scale_min[0]
-                    print "    Scale MinY", scale_min[1]
-                    print "    Scale MinZ", scale_min[2]
-                    print "    Scale MaxX", scale_max[0]
-                    print "    Scale MaxY", scale_max[1]
-                    print "    Scale MaxZ", scale_max[2]
+                    print("    Scale MinX", scale_min[0])
+                    print("    Scale MinY", scale_min[1])
+                    print("    Scale MinZ", scale_min[2])
+                    print("    Scale MaxX", scale_max[0])
+                    print("    Scale MaxY", scale_max[1])
+                    print("    Scale MaxZ", scale_max[2])
                 if (offset_min != offset_max):
-                    print "    Offset MinX", offset_min[0]
-                    print "    Offset MinY", offset_min[1]
-                    print "    Offset MinZ", offset_min[2]
-                    print "    Offset MaxX", offset_max[0]
-                    print "    Offset MaxY", offset_max[1]
-                    print "    Offset MaxZ", offset_max[2]
+                    print("    Offset MinX", offset_min[0])
+                    print("    Offset MinY", offset_min[1])
+                    print("    Offset MinZ", offset_min[2])
+                    print("    Offset MaxX", offset_max[0])
+                    print("    Offset MaxY", offset_max[1])
+                    print("    Offset MaxZ", offset_max[2])
     
 # Check contents of avatar_lad file relative to a specified skeleton
 def validate_lad_tree(ladtree,skeltree,orig_ladtree):
-    print "validate_lad_tree"
+    print("validate_lad_tree")
     bone_names = [elt.get("name") for elt in skeltree.iter("bone")]
     bone_names.append("mScreen")
     bone_names.append("mRoot")
@@ -285,7 +285,7 @@ def validate_lad_tree(ladtree,skeltree,orig_ladtree):
         #print "attachment",att_name
         joint_name = att.get("joint")
         if not joint_name in bone_names:
-            print "att",att_name,"linked to invalid joint",joint_name
+            print("att",att_name,"linked to invalid joint",joint_name)
     for skel_param in ladtree.iter("param_skeleton"):
         skel_param_id = skel_param.get("id")
         skel_param_name = skel_param.get("name")
@@ -297,13 +297,13 @@ def validate_lad_tree(ladtree,skeltree,orig_ladtree):
         for bone in skel_param.iter("bone"):
             bone_name = bone.get("name")
             if not bone_name in bone_names:
-                print "skel param references invalid bone",bone_name
-                print etree.tostring(bone)
+                print("skel param references invalid bone",bone_name)
+                print(etree.tostring(bone))
             bone_scale = float_tuple(bone.get("scale","0 0 0"))
             bone_offset = float_tuple(bone.get("offset","0 0 0"))
             param = bone.getparent().getparent()
             if bone_scale==(0, 0, 0) and bone_offset==(0, 0, 0):
-                print "no-op bone",bone_name,"in param",param.get("id","-1")
+                print("no-op bone",bone_name,"in param",param.get("id","-1"))
             # check symmetry of sliders
             if "Right" in bone.get("name"):
                 left_name = bone_name.replace("Right","Left")
@@ -312,12 +312,12 @@ def validate_lad_tree(ladtree,skeltree,orig_ladtree):
                     if b.get("name")==left_name:
                         left_bone = b
                 if left_bone is None:
-                    print "left_bone not found",left_name,"in",param.get("id","-1")
+                    print("left_bone not found",left_name,"in",param.get("id","-1"))
                 else:
                     left_scale = float_tuple(left_bone.get("scale","0 0 0"))
                     left_offset = float_tuple(left_bone.get("offset","0 0 0"))
                     if left_scale != bone_scale:
-                        print "scale mismatch between",bone_name,"and",left_name,"in param",param.get("id","-1")
+                        print("scale mismatch between",bone_name,"and",left_name,"in param",param.get("id","-1"))
                     param_id = int(param.get("id","-1"))
                     if param_id in [661]: # shear
                         expected_offset = tuple([bone_offset[0],bone_offset[1],-bone_offset[2]])
@@ -326,7 +326,7 @@ def validate_lad_tree(ladtree,skeltree,orig_ladtree):
                     else:
                         expected_offset = tuple([bone_offset[0],-bone_offset[1],bone_offset[2]])
                     if left_offset != expected_offset:
-                        print "offset mismatch between",bone_name,"and",left_name,"in param",param.get("id","-1")
+                        print("offset mismatch between",bone_name,"and",left_name,"in param",param.get("id","-1"))
                     
     drivers = {}
     for driven_param in ladtree.iter("driven"):
@@ -340,15 +340,15 @@ def validate_lad_tree(ladtree,skeltree,orig_ladtree):
             if (actual_param.get("value_min") != driver.get("value_min") or \
                 actual_param.get("value_max") != driver.get("value_max")):
                 if args.verbose:
-                    print "MISMATCH min max:",driver.get("id"),"drives",driven_param.get("id"),"min",driver.get("value_min"),actual_param.get("value_min"),"max",driver.get("value_max"),actual_param.get("value_max")
+                    print("MISMATCH min max:",driver.get("id"),"drives",driven_param.get("id"),"min",driver.get("value_min"),actual_param.get("value_min"),"max",driver.get("value_max"),actual_param.get("value_max"))
 
     for driven_id in drivers:
         dset = drivers[driven_id]
         if len(dset) != 1:
-            print "driven_id",driven_id,"has multiple drivers",dset
+            print("driven_id",driven_id,"has multiple drivers",dset)
         else:
             if args.verbose:
-                print "driven_id",driven_id,"has one driver",dset
+                print("driven_id",driven_id,"has one driver",dset)
     if orig_ladtree:
         # make sure expected message format is unchanged
         orig_message_params_by_id = dict((int(param.get("id")),param) for param in orig_ladtree.iter("param") if param.get("group") in ["0","3"])
@@ -358,25 +358,25 @@ def validate_lad_tree(ladtree,skeltree,orig_ladtree):
         message_ids = sorted(message_params_by_id.keys())
         #print "message_ids",message_ids
         if (set(message_ids) != set(orig_message_ids)):
-            print "mismatch in message ids!"
-            print "added",set(message_ids) - set(orig_message_ids)
-            print "removed",set(orig_message_ids) - set(message_ids)
+            print("mismatch in message ids!")
+            print("added",set(message_ids) - set(orig_message_ids))
+            print("removed",set(orig_message_ids) - set(message_ids))
         else:
-            print "message ids OK"
+            print("message ids OK")
     
 def remove_joint_by_name(tree, name):
-    print "remove joint:",name
+    print("remove joint:",name)
     elt = get_element_by_name(tree,name)
     while elt is not None:
         children = list(elt)
         parent = elt.getparent()
-        print "graft",[e.get("name") for e in children],"into",parent.get("name")
-        print "remove",elt.get("name")
+        print("graft",[e.get("name") for e in children],"into",parent.get("name"))
+        print("remove",elt.get("name"))
         #parent_children = list(parent)
         loc = parent.index(elt)
         parent[loc:loc+1] = children
         elt[:] = []
-        print "parent now:",[e.get("name") for e in list(parent)]
+        print("parent now:",[e.get("name") for e in list(parent)])
         elt = get_element_by_name(tree,name)
     
 def compare_skel_trees(atree,btree):
@@ -386,9 +386,9 @@ def compare_skel_trees(atree,btree):
     b_missing = set()
     a_names = set(e.get("name") for e in atree.getroot().iter() if e.get("name"))
     b_names = set(e.get("name") for e in btree.getroot().iter() if e.get("name"))
-    print "a_names\n  ",str("\n  ").join(sorted(list(a_names)))
-    print
-    print "b_names\n  ","\n  ".join(sorted(list(b_names)))
+    print("a_names\n  ",str("\n  ").join(sorted(list(a_names))))
+    print()
+    print("b_names\n  ","\n  ".join(sorted(list(b_names))))
     all_names = set.union(a_names,b_names)
     for name in all_names:
         if not name:
@@ -396,38 +396,38 @@ def compare_skel_trees(atree,btree):
         a_element = get_element_by_name(atree,name)
         b_element = get_element_by_name(btree,name)
         if a_element is None or b_element is None:
-            print "something not found for",name,a_element,b_element
+            print("something not found for",name,a_element,b_element)
         if a_element is not None and b_element is not None:
             all_attrib = set.union(set(a_element.attrib.keys()),set(b_element.attrib.keys()))
-            print name,all_attrib
+            print(name,all_attrib)
             for att in all_attrib:
                 if a_element.get(att) != b_element.get(att):
                     if not att in diffs:
                         diffs[att] = set()
                     diffs[att].add(name)
-                print "tuples",name,att,float_tuple(a_element.get(att)),float_tuple(b_element.get(att))
+                print("tuples",name,att,float_tuple(a_element.get(att)),float_tuple(b_element.get(att)))
                 if float_tuple(a_element.get(att)) != float_tuple(b_element.get(att)):
-                    print "diff in",name,att
+                    print("diff in",name,att)
                     if not att in realdiffs:
                         realdiffs[att] = set()
                     realdiffs[att].add(name)
     for att in diffs:
-        print "Differences in",att
+        print("Differences in",att)
         for name in sorted(diffs[att]):
-            print "  ",name
+            print("  ",name)
     for att in realdiffs:
-        print "Real differences in",att
+        print("Real differences in",att)
         for name in sorted(diffs[att]):
-            print "  ",name
+            print("  ",name)
     a_missing = b_names.difference(a_names)
     b_missing = a_names.difference(b_names)
     if len(a_missing) or len(b_missing):
-        print "Missing from comparison"
+        print("Missing from comparison")
         for name in a_missing:
-            print "  ",name
-        print "Missing from infile"
+            print("  ",name)
+        print("Missing from infile")
         for name in b_missing:
-            print "  ",name
+            print("  ",name)
 
 if __name__ == "__main__":
 
@@ -499,5 +499,5 @@ def compare_skel_trees(atree,btree):
         
     if args.outfilename:
         f = open(args.outfilename,"w")
-        print >>f, etree.tostring(tree, pretty_print=True) #need update to get: , short_empty_elements=True)
+        print(etree.tostring(tree, pretty_print=True), file=f) #need update to get: , short_empty_elements=True)
 
diff --git a/scripts/md5check.py b/scripts/md5check.py
index 1a54a2844c395e037a321a789f7015e642a0faa6..20ebfa665683893896a4e5242d8e3dcedaca96b7 100755
--- a/scripts/md5check.py
+++ b/scripts/md5check.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 """\
 @file md5check.py
 @brief Replacement for message template compatibility verifier.
@@ -29,14 +29,14 @@
 import hashlib
 
 if len(sys.argv) != 3:
-    print """Usage: %s --create|<hash-digest> <file>
+    print("""Usage: %s --create|<hash-digest> <file>
 
 Creates an md5sum hash digest of the specified file content
 and compares it with the given hash digest.
 
 If --create is used instead of a hash digest, it will simply
 print out the hash digest of specified file content.
-""" % sys.argv[0]
+""" % sys.argv[0])
     sys.exit(1)
 
 if sys.argv[2] == '-':
@@ -48,9 +48,9 @@
 
 hexdigest = hashlib.md5(fh.read()).hexdigest()
 if sys.argv[1] == '--create':
-    print hexdigest
+    print(hexdigest)
 elif hexdigest == sys.argv[1]:
-    print "md5sum check passed:", filename
+    print("md5sum check passed:", filename)
 else:
-    print "md5sum check FAILED:", filename
+    print("md5sum check FAILED:", filename)
     sys.exit(1)
diff --git a/scripts/metrics/viewer_asset_logs.py b/scripts/metrics/viewer_asset_logs.py
index e48286f696d19c05cf79e241aa447e3298a10a08..dbbdfb101866285aa865d3d1dae7ac57c15be8eb 100644
--- a/scripts/metrics/viewer_asset_logs.py
+++ b/scripts/metrics/viewer_asset_logs.py
@@ -1,4 +1,4 @@
-#!runpy.sh
+#!/usr/bin/env python3
 
 """\
 
@@ -40,7 +40,7 @@ def get_metrics_record(infiles):
         context = iter(context)
 
         # get the root element
-        event, root = context.next()
+        event, root = next(context)
         try:
             for event, elem in context:
                 if event == "end" and elem.tag == "llsd":
@@ -48,7 +48,7 @@ def get_metrics_record(infiles):
                     sd = llsd.parse_xml(xmlstr)
                     yield sd
         except etree.XMLSyntaxError:
-            print "Fell off end of document"
+            print("Fell off end of document")
 
         f.close()
 
@@ -56,7 +56,7 @@ def update_stats(stats,rec):
     for region in rec["regions"]:
         region_key = (region["grid_x"],region["grid_y"])
         #print "region",region_key
-        for field, val in region.iteritems():
+        for field, val in region.items():
             if field in ["duration","grid_x","grid_y"]:
                 continue
             if field == "fps":
@@ -96,7 +96,7 @@ def update_stats(stats,rec):
     for key in sorted(stats.keys()):
         val = stats[key]
         if val["count"] > 0:
-            print key,"count",val["count"],"mean_time",val["sum"]/val["count"],"mean_bytes",val["sum_bytes"]/val["count"],"net bytes/sec",val["sum_bytes"]/val["sum"],"enqueued",val["enqueued"],"dequeued",val["dequeued"]
+            print(key,"count",val["count"],"mean_time",val["sum"]/val["count"],"mean_bytes",val["sum_bytes"]/val["count"],"net bytes/sec",val["sum_bytes"]/val["sum"],"enqueued",val["enqueued"],"dequeued",val["dequeued"])
         else:
-            print key,"count",val["count"],"enqueued",val["enqueued"],"dequeued",val["dequeued"]
+            print(key,"count",val["count"],"enqueued",val["enqueued"],"dequeued",val["dequeued"])
 
diff --git a/scripts/metrics/viewerstats.py b/scripts/metrics/viewerstats.py
index f7be3d967e19b14edbf2c0a841943ab6d18bb5a2..576598a30b9f87391679a7eabd1e0c5fb6d85a18 100755
--- a/scripts/metrics/viewerstats.py
+++ b/scripts/metrics/viewerstats.py
@@ -1,4 +1,4 @@
-#!runpy.sh
+#!/usr/bin/env python3
 
 """\
 
diff --git a/scripts/packages-formatter.py b/scripts/packages-formatter.py
index b1eef3c7211cdba715bad43c890ffa04ccc4bf58..dafa0bf53b7bb687ce88c46416ba6d589849cdae 100755
--- a/scripts/packages-formatter.py
+++ b/scripts/packages-formatter.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 """\
 This module formats the package version and copyright information for the
 viewer and its dependent packages.
@@ -37,8 +37,11 @@
 args = parser.parse_args()
 
 _autobuild=os.getenv('AUTOBUILD', 'autobuild')
+_autobuild_env=os.environ.copy()
+# Coerce stdout encoding to utf-8 as cygwin's will be detected as cp1252 otherwise.
+_autobuild_env["PYTHONIOENCODING"] = "utf-8"
 
-pkg_line=re.compile('^([\w-]+):\s+(.*)$')
+pkg_line=re.compile(r'^([\w-]+):\s+(.*)$')
 
 def autobuild(*args):
     """
@@ -50,7 +53,7 @@ def autobuild(*args):
     try:
         child = subprocess.Popen(command,
                                  stdin=None, stdout=subprocess.PIPE,
-                                 universal_newlines=True)
+                                 universal_newlines=True, env=_autobuild_env)
     except OSError as err:
         if err.errno != errno.ENOENT:
             # Don't attempt to interpret anything but ENOENT
@@ -110,20 +113,20 @@ def add_info(key, pkg, lines):
                 break
 
 # Now that we've run through all of both outputs -- are there duplicates?
-if any(pkgs for pkgs in dups.values()):
-    for key, pkgs in dups.items():
+if any(pkgs for pkgs in list(dups.values())):
+    for key, pkgs in list(dups.items()):
         if pkgs:
-            print >>sys.stderr, "Duplicate %s for %s" % (key, ", ".join(pkgs))
+            print("Duplicate %s for %s" % (key, ", ".join(pkgs)), file=sys.stderr)
     sys.exit(1)
 
-print "%s %s" % (args.channel, args.version)
-print viewer_copyright
+print("%s %s" % (args.channel, args.version))
+print(viewer_copyright)
 version = list(info['versions'].items())
 version.sort()
 for pkg, pkg_version in version:
-    print ': '.join([pkg, pkg_version])
+    print(': '.join([pkg, pkg_version]))
     try:
-        print info['copyrights'][pkg]
+        print(info['copyrights'][pkg])
     except KeyError:
         sys.exit("No copyright for %s" % pkg)
-    print
+    print()
diff --git a/scripts/setup-path.py b/scripts/setup-path.py
index ce83d815bf8d0ac3c743d8848e715da48d2b29bb..427d1195200cd5eeeb76ca0ef968789200832e2c 100755
--- a/scripts/setup-path.py
+++ b/scripts/setup-path.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 """\
 @file setup-path.py
 @brief Get the python library directory in the path, so we don't have
diff --git a/scripts/template_verifier.py b/scripts/template_verifier.py
index f13f012ffa2923592d23e1233159751ebdc224e2..7a05bac5ad882a927d7cf88117293da429f22a3d 100755
--- a/scripts/template_verifier.py
+++ b/scripts/template_verifier.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 """\
 @file template_verifier.py
 @brief Message template compatibility verifier.
@@ -58,16 +58,15 @@ def add_indra_lib_path():
                 sys.path.insert(0, dir)
             break
     else:
-        print >>sys.stderr, "This script is not inside a valid installation."
+        print("This script is not inside a valid installation.", file=sys.stderr)
         sys.exit(1)
 
 add_indra_lib_path()
 
 import optparse
 import os
-import urllib
+import urllib.request, urllib.parse, urllib.error
 import hashlib
-import certifi
 
 from indra.ipc import compatibility
 from indra.ipc import tokenstream
@@ -91,7 +90,7 @@ def getstatusoutput(command):
 
 
 def die(msg):
-    print >>sys.stderr, msg
+    print(msg, file=sys.stderr)
     sys.exit(1)
 
 MESSAGE_TEMPLATE = 'message_template.msg'
@@ -107,7 +106,7 @@ def retry(times, function, *args, **kwargs):
     for i in range(times):
         try:
             return function(*args, **kwargs)
-        except Exception, e:
+        except Exception as e:
             if i == times - 1:
                 raise e  # we retried all the times we could
 
@@ -139,10 +138,14 @@ def fetch(url):
     if url.startswith('file://'):
         # just open the file directly because urllib is dumb about these things
         file_name = url[len('file://'):]
-        return open(file_name).read()
+        with open(file_name, 'rb') as f:
+            return f.read()
     else:
-        # *FIX: this doesn't throw an exception for a 404, and oddly enough the sl.com 404 page actually gets parsed successfully
-        return ''.join(urllib.urlopen(url, cafile = certifi.where()).readlines())   
+        with urllib.request.urlopen(url) as res:
+            body = res.read()
+            if res.status > 299:
+                sys.exit("ERROR: Unable to download %s. HTTP status %d.\n%s" % (url, res.status, body.decode("utf-8")))
+            return body
 
 def cache_master(master_url):
     """Using the url for the master, updates the local cache, and returns an url to the local cache."""
@@ -154,23 +157,22 @@ def cache_master(master_url):
         and time.time() - os.path.getmtime(master_cache) < MAX_MASTER_AGE):
         return master_cache_url  # our cache is fresh
     # new master doesn't exist or isn't fresh
-    print "Refreshing master cache from %s" % master_url
+    print("Refreshing master cache from %s" % master_url)
     def get_and_test_master():
         new_master_contents = fetch(master_url)
-        llmessage.parseTemplateString(new_master_contents)
+        llmessage.parseTemplateString(new_master_contents.decode("utf-8"))
         return new_master_contents
     try:
         new_master_contents = retry(3, get_and_test_master)
-    except IOError, e:
+    except IOError as e:
         # the refresh failed, so we should just soldier on
-        print "WARNING: unable to download new master, probably due to network error.  Your message template compatibility may be suspect."
-        print "Cause: %s" % e
+        print("WARNING: unable to download new master, probably due to network error.  Your message template compatibility may be suspect.")
+        print("Cause: %s" % e)
         return master_cache_url
     try:
         tmpname = '%s.%d' % (master_cache, os.getpid())
-        mc = open(tmpname, 'wb')
-        mc.write(new_master_contents)
-        mc.close()
+        with open(tmpname, "wb") as mc:
+            mc.write(new_master_contents)
         try:
             os.rename(tmpname, master_cache)
         except OSError:
@@ -181,9 +183,9 @@ def get_and_test_master():
             # a single day.
             os.unlink(master_cache)
             os.rename(tmpname, master_cache)
-    except IOError, e:
-        print "WARNING: Unable to write master message template to %s, proceeding without cache." % master_cache
-        print "Cause: %s" % e
+    except IOError as e:
+        print("WARNING: Unable to write master message template to %s, proceeding without cache." % master_cache)
+        print("Cause: %s" % e)
         return master_url
     return master_cache_url
 
@@ -247,16 +249,16 @@ def run(sysargs):
     # both current and master supplied in positional params
     if len(args) == 2:
         master_filename, current_filename = args
-        print "master:", master_filename
-        print "current:", current_filename
+        print("master:", master_filename)
+        print("current:", current_filename)
         master_url = 'file://%s' % master_filename
         current_url = 'file://%s' % current_filename
     # only current supplied in positional param
     elif len(args) == 1:
         master_url = None
         current_filename = args[0]
-        print "master:", options.master_url 
-        print "current:", current_filename
+        print("master:", options.master_url) 
+        print("current:", current_filename)
         current_url = 'file://%s' % current_filename
     # nothing specified, use defaults for everything
     elif len(args) == 0:
@@ -270,8 +272,8 @@ def run(sysargs):
         
     if current_url is None:
         current_filename = local_template_filename()
-        print "master:", options.master_url
-        print "current:", current_filename
+        print("master:", options.master_url)
+        print("current:", current_filename)
         current_url = 'file://%s' % current_filename
 
     # retrieve the contents of the local template
@@ -282,46 +284,45 @@ def run(sysargs):
         sha_url = "%s.sha256" % current_url
         current_sha = fetch(sha_url)
         if hexdigest == current_sha:
-            print "Message template SHA_256 has not changed."
+            print("Message template SHA_256 has not changed.")
             sys.exit(0)
 
     # and check for syntax
-    current_parsed = llmessage.parseTemplateString(current)
+    current_parsed = llmessage.parseTemplateString(current.decode("utf-8"))
 
     if options.cache_master:
         # optionally return a url to a locally-cached master so we don't hit the network all the time
         master_url = cache_master(master_url)
 
     def parse_master_url():
-        master = fetch(master_url)
+        master = fetch(master_url).decode("utf-8")
         return llmessage.parseTemplateString(master)
     try:
         master_parsed = retry(3, parse_master_url)
-    except (IOError, tokenstream.ParseError), e:
+    except (IOError, tokenstream.ParseError) as e:
         if options.mode == 'production':
             raise e
         else:
-            print "WARNING: problems retrieving the master from %s."  % master_url
-            print "Syntax-checking the local template ONLY, no compatibility check is being run."
-            print "Cause: %s\n\n" % e
+            print("WARNING: problems retrieving the master from %s."  % master_url)
+            print("Syntax-checking the local template ONLY, no compatibility check is being run.")
+            print("Cause: %s\n\n" % e)
             return 0
         
     acceptable, compat = compare(
         master_parsed, current_parsed, options.mode)
 
     def explain(header, compat):
-        print header
+        print(header)
         # indent compatibility explanation
-        print '\n\t'.join(compat.explain().split('\n'))
+        print('\n\t'.join(compat.explain().split('\n')))
 
     if acceptable:
         explain("--- PASS ---", compat)
         if options.force_verification == False:
-            print "Updating sha256 to %s" % hexdigest
+            print("Updating sha256 to %s" % hexdigest)
             sha_filename = "%s.sha256" % current_filename
-            sha_file = open(sha_filename, 'w')
-            sha_file.write(hexdigest)
-            sha_file.close()
+            with open(sha_filename, 'w') as sha_file:
+                sha_file.write(hexdigest)
     else:
         explain("*** FAIL ***", compat)
         return 1