Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Quick fixes to make Python3 work #35

Open
wants to merge 4 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
32 changes: 16 additions & 16 deletions dsfileinformation.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,26 +50,26 @@
(wbuildnumber, ) = unpack('I', header[224:228])
(wservicepack, ) = unpack('I', header[228:232])

print "Header checksum: %s" % hexlify(header[:4][::-1])
print "Signature: %s" % hexlify(header[4:8][::-1])
print "File format version: %s" % hexlify(header[8:12][::-1])
print "File type: %s" % hexlify(header[12:16][::-1])
print "Page size: %d bytes" % pagesize
print "DB time: %s" % hexlify(header[16:24][::-1])
print "Windows version: %d.%d (%d) Service pack %d" % (
print("Header checksum: %s" % header[:4][::-1].hex())
print("Signature: %s" % header[4:8][::-1].hex())
print("File format version: %s" % header[8:12][::-1].hex())
print("File type: %s" % header[12:16][::-1].hex())
print("Page size: %d bytes" % pagesize)
print("DB time: %s" % header[16:24][::-1].hex())
print("Windows version: %d.%d (%d) Service pack %d" % (
wmajorversion,
wminorversion,
wbuildnumber,
wservicepack
)
print "Creation time: %04d.%02d.%02d %02d:%02d:%02d" % dsGetDBLogTimeStampStr(header[24:52][4:12])
print "Attach time: %04d.%02d.%02d %02d:%02d:%02d" % dsGetDBLogTimeStampStr(header[72:80])
))
print("Creation time: %04d.%02d.%02d %02d:%02d:%02d" % dsGetDBLogTimeStampStr(header[24:52][4:12]))
print("Attach time: %04d.%02d.%02d %02d:%02d:%02d" % dsGetDBLogTimeStampStr(header[72:80]))
if unpack("B", header[88:96][:1]) == (0, ):
print "Detach time: database is in dirty state"
print("Detach time: database is in dirty state")
else:
print "Detach time: %04d.%02d.%02d %02d:%02d:%02d" % dsGetDBLogTimeStampStr(header[88:96])
print "Consistent time: %04d.%02d.%02d %02d:%02d:%02d" % dsGetDBLogTimeStampStr(header[64:72])
print "Recovery time: %04d.%02d.%02d %02d:%02d:%02d" % dsGetDBLogTimeStampStr(header[244:252])
print "Header dump (first 672 bytes):"
print dump(header[:672], 16, 4)
print("Detach time: %04d.%02d.%02d %02d:%02d:%02d" % dsGetDBLogTimeStampStr(header[88:96]))
print("Consistent time: %04d.%02d.%02d %02d:%02d:%02d" % dsGetDBLogTimeStampStr(header[64:72]))
print("Recovery time: %04d.%02d.%02d %02d:%02d:%02d" % dsGetDBLogTimeStampStr(header[244:252]))
print("Header dump (first 672 bytes):")
print(dump(header[:672], 16, 4))
f.close()
8 changes: 4 additions & 4 deletions framework/addrspace.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,11 +35,11 @@
class FileAddressSpace:
def __init__(self, fname, mode='rb', fast=False):
self.fname = fname
self.name = fname
self.fhandle = open(fname, mode)
self.name = fname
self.fhandle = open(fname, mode)
self.fsize = os.path.getsize(fname)

if fast == True:
if fast == True:
self.fast_fhandle = open(fname, mode)

def fread(self,len):
Expand Down Expand Up @@ -80,7 +80,7 @@ def vtop(self, vaddr):

def read(self, vaddr, length, zero=False):
first_block = BLOCK_SIZE - vaddr % BLOCK_SIZE
full_blocks = ((length + (vaddr % BLOCK_SIZE)) / BLOCK_SIZE) - 1
full_blocks = ((length + (vaddr % BLOCK_SIZE)) // BLOCK_SIZE) - 1
left_over = (length + vaddr) % BLOCK_SIZE

paddr = self.vtop(vaddr)
Expand Down
8 changes: 4 additions & 4 deletions framework/win32/hashdump.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,8 +54,8 @@
antpassword = "NTPASSWORD\0"
almpassword = "LMPASSWORD\0"

empty_lm = "aad3b435b51404eeaad3b435b51404ee".decode('hex')
empty_nt = "31d6cfe0d16ae931b73c59d7e0c089c0".decode('hex')
empty_lm = bytes.fromhex("aad3b435b51404eeaad3b435b51404ee")
empty_nt = bytes.fromhex("31d6cfe0d16ae931b73c59d7e0c089c0")

def str_to_key(s):
key = []
Expand Down Expand Up @@ -235,8 +235,8 @@ def dump_hashes(sysaddr, samaddr):
lmhash,nthash = get_user_hashes(user,hbootkey)
if not lmhash: lmhash = empty_lm
if not nthash: nthash = empty_nt
print "%s:%d:%s:%s:::" % (get_user_name(user), int(user.Name,16),
lmhash.encode('hex'), nthash.encode('hex'))
print("%s:%d:%s:%s:::" % (get_user_name(user), int(user.Name,16),
lmhash.encode('hex'), nthash.encode('hex')))

def dump_file_hashes(syshive_fname, samhive_fname):
sysaddr = HiveFileAddressSpace(syshive_fname)
Expand Down
8 changes: 4 additions & 4 deletions framework/win32/rawreg.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,9 +23,9 @@
from struct import unpack

ROOT_INDEX = 0x20
LH_SIG = unpack("<H","lh")[0]
LF_SIG = unpack("<H","lf")[0]
RI_SIG = unpack("<H","ri")[0]
LH_SIG = unpack("<H",b"lh")[0]
LF_SIG = unpack("<H",b"lf")[0]
RI_SIG = unpack("<H",b"ri")[0]

def get_root(address_space):
return Obj("_CM_KEY_NODE", ROOT_INDEX, address_space)
Expand All @@ -38,7 +38,7 @@ def open_key(root, key):
for s in subkeys(root):
if s.Name.upper() == keyname.upper():
return open_key(s, key)
print "ERR: Couldn't find subkey %s of %s" % (keyname, root.Name)
print("ERR: Couldn't find subkey %s of %s" % (keyname, root.Name))
return None

def subkeys(key,stable=True):
Expand Down
2 changes: 1 addition & 1 deletion ntds/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@
debug = True

def simple_exception(type, value, traceback):
sys.stderr.write("[!] Error!", value, "\n")
sys.stderr.write("[!] Error! " + str(value) + "\n")
sys.exit(1)

if debug == False:
Expand Down
54 changes: 27 additions & 27 deletions ntds/dsdatabase.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,9 +26,9 @@
import time
import ntds.dsfielddictionary
from ntds.dsencryption import *
from lib.map import *
from lib.sid import *
from lib.guid import *
from .lib.map import *
from .lib.sid import *
from .lib.guid import *
import pickle

dsMapOffsetByLineId = {} #Map that can be used to find the offset for line
Expand All @@ -49,7 +49,7 @@ def dsInitDatabase(dsESEFile, workdir):
global dsDatabaseSize
dsDatabaseSize = stat(dsESEFile).st_size
sys.stderr.write("\n[+] Initialising engine...\n")
db = open(dsESEFile , 'rb', 0)
db = open(dsESEFile , 'r')
db.seek(0)
line = db.readline()
if line == "":
Expand Down Expand Up @@ -197,7 +197,7 @@ def dsCheckMaps(dsDatabase, workdir):
dsLoadMap(path.join(workdir, "ridguid.map"), dsMapRecordIdByGUID)
dsLoadMap(path.join(workdir, "ridtype.map"), dsMapRecordIdByTypeId)

pek = open(path.join(workdir, "pek.map"), "rb")
pek = open(path.join(workdir, "pek.map"), "r")
ntds.dsfielddictionary.dsEncryptedPEK = pek.read()
pek.close()

Expand Down Expand Up @@ -289,19 +289,19 @@ def dsBuildMaps(dsDatabase, workdir):
try:
dsMapRecordIdBySID[str(SID(record[ntds.dsfielddictionary.dsSIDIndex]))]
except KeyError:
dsMapRecordIdBySID[str(SID(record[ntds.dsfielddictionary.dsSIDIndex]))] = int(record[ntds.dsfielddictionary.dsRecordIdIndex])
dsMapRecordIdBySID[str(SID(record[ntds.dsfielddictionary.dsSIDIndex]))] = int(record[ntds.dsfielddictionary.dsRecordIdIndex])

try:
dsMapRecordIdByGUID[str(GUID(record[ntds.dsfielddictionary.dsObjectGUIDIndex]))]
except KeyError:
dsMapRecordIdByGUID[str(GUID(record[ntds.dsfielddictionary.dsObjectGUIDIndex]))] = int(record[ntds.dsfielddictionary.dsRecordIdIndex])
dsMapRecordIdByGUID[str(GUID(record[ntds.dsfielddictionary.dsObjectGUIDIndex]))] = int(record[ntds.dsfielddictionary.dsRecordIdIndex])

try:
if record[ntds.dsfielddictionary.dsObjectTypeIdIndex] != "":
dsMapRecordIdByTypeId[int(record[ntds.dsfielddictionary.dsObjectTypeIdIndex])].append(int(record[ntds.dsfielddictionary.dsRecordIdIndex]))
if record[ntds.dsfielddictionary.dsObjectTypeIdIndex] != "":
dsMapRecordIdByTypeId[int(record[ntds.dsfielddictionary.dsObjectTypeIdIndex])].append(int(record[ntds.dsfielddictionary.dsRecordIdIndex]))
except KeyError:
dsMapRecordIdByTypeId[int(record[ntds.dsfielddictionary.dsObjectTypeIdIndex])] = []
dsMapRecordIdByTypeId[int(record[ntds.dsfielddictionary.dsObjectTypeIdIndex])].append(int(record[ntds.dsfielddictionary.dsRecordIdIndex]))
dsMapRecordIdByTypeId[int(record[ntds.dsfielddictionary.dsObjectTypeIdIndex])] = []
dsMapRecordIdByTypeId[int(record[ntds.dsfielddictionary.dsObjectTypeIdIndex])].append(int(record[ntds.dsfielddictionary.dsRecordIdIndex]))

lineid += 1
sys.stderr.write("\n")
Expand All @@ -326,7 +326,7 @@ def dsBuildMaps(dsDatabase, workdir):
pickle.dump(dsMapChildsByRecordId, childsrid)
childsrid.close()

pek = open(path.join(workdir, "pek.map"), "wb")
pek = open(path.join(workdir, "pek.map"), "w")
pek.write(ntds.dsfielddictionary.dsEncryptedPEK)
pek.close()

Expand Down Expand Up @@ -355,28 +355,28 @@ def dsBuildTypeMap(dsDatabase, workdir):
sys.stderr.write("[+] Sanity checks...\n")

if dsSchemaTypeId == -1:
sys.stderr.write("[!] Error! The Schema object's type id cannot be found! The DB is inconsistent!\n")
sys.exit(1)
sys.stderr.write("[!] Error! The Schema object's type id cannot be found! The DB is inconsistent!\n")
sys.exit(1)
elif len(dsMapRecordIdByTypeId[dsSchemaTypeId]) > 1:
sys.stderr.write("[!] Warning! There are more than 1 schema objects! The DB is inconsistent!\n")
sys.stderr.write(" Schema record ids: " + str(dsMapRecordIdByTypeId[dsSchemaTypeId]) + "\n")
sys.stderr.write(" Please select the schema id you would like to use!\n")
tmp = raw_input()
while True:
try:
if int(tmp) in dsMapRecordIdByTypeId[dsSchemaTypeId]:
sys.stderr.write("[!] Warning! There are more than 1 schema objects! The DB is inconsistent!\n")
sys.stderr.write(" Schema record ids: " + str(dsMapRecordIdByTypeId[dsSchemaTypeId]) + "\n")
sys.stderr.write(" Please select the schema id you would like to use!\n")
tmp = input()
while True:
try:
if int(tmp) in dsMapRecordIdByTypeId[dsSchemaTypeId]:
schemarecid = int(tmp)
break
else:
sys.stderr.write(" Please enter a number that is in the list of ids!\n")
tmp = raw_input()
sys.stderr.write(" Please enter a number that is in the list of ids!\n")
tmp = input()
except:
sys.stderr.write(" Please enter a number!\n")
tmp = raw_input()
sys.stderr.write(" Please enter a number!\n")
tmp = input()
elif len(dsMapRecordIdByTypeId[dsSchemaTypeId]) == 0:
sys.stderr.write("[!] Warning! There is no schema object! The DB is inconsistent!\n")
sys.stderr.write("[!] Warning! There is no schema object! The DB is inconsistent!\n")
else:
schemarecid = dsMapRecordIdByTypeId[dsSchemaTypeId][0]
schemarecid = dsMapRecordIdByTypeId[dsSchemaTypeId][0]

sys.stderr.write(" Schema record id: %d\n" % schemarecid)
sys.stderr.write(" Schema type id: %d\n" % int(dsMapTypeByRecordId[schemarecid]))
Expand Down
4 changes: 2 additions & 2 deletions ntds/dslink.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,15 +21,15 @@
import ntds.dsfielddictionary
from ntds.dstime import *
import sys
from lib.map import *
from .lib.map import *
import pickle
from os import path

dsMapLinks = {}
dsMapBackwardLinks = {}

def dsInitLinks(dsESEFile, workdir):
dl = open(dsESEFile , 'rb', 0)
dl = open(dsESEFile, 'r')
dl.seek(0)
line = dl.readline()
if line == "":
Expand Down
44 changes: 22 additions & 22 deletions ntds/dsobjects.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,7 @@ def getAncestors(self, dsDatabase):
ancestorlist = []
ancestorvalue = self.Record[ntds.dsfielddictionary.dsAncestorsIndex]
if ancestorvalue != "":
l = len(ancestorvalue) / 8
l = len(ancestorvalue) // 8
for aid in range(0, l):
(ancestorid,) = unpack('I', unhexlify(ancestorvalue[aid * 8:aid * 8 + 8]))
ancestor = dsObject(dsDatabase, ancestorid)
Expand Down Expand Up @@ -213,12 +213,12 @@ def getPasswordHashes(self):
encnthash = unhexlify(self.Record[ntds.dsfielddictionary.dsNTHashIndex][16:])
if enclmhash != '':
lmhash = dsDecryptWithPEK(ntds.dsfielddictionary.dsPEK, enclmhash)
lmhash = hexlify(dsDecryptSingleHash(self.SID.RID, lmhash))
lmhash = dsDecryptSingleHash(self.SID.RID, lmhash).hex()
if lmhash == '':
lmhash = "NO PASSWORD"
if encnthash != '':
nthash = dsDecryptWithPEK(ntds.dsfielddictionary.dsPEK, encnthash)
nthash = hexlify(dsDecryptSingleHash(self.SID.RID, nthash))
nthash = dsDecryptSingleHash(self.SID.RID, nthash).hex()
if nthash == '':
nthash = "NO PASSWORD"
return (lmhash, nthash)
Expand All @@ -236,14 +236,14 @@ def getPasswordHistory(self):
if lmhash == '':
lmhistory.append('NO PASSWORD')
else:
lmhistory.append(hexlify(lmhash))
lmhistory.append(lmhash.hex())
if snthistory != "":
for hindex in range(0,len(snthistory)/16):
nthash = dsDecryptSingleHash(self.SID.RID, snthistory[hindex*16:(hindex+1)*16])
if nthash == '':
nthistory.append('NO PASSWORD')
else:
nthistory.append(hexlify(nthash))
nthistory.append(nthash.hex())
return (lmhistory, nthistory)

def getSupplementalCredentials(self):
Expand Down Expand Up @@ -477,19 +477,19 @@ def __init__(self):
self.OlderCredentials = []

def Print(self, indent=""):
print "{0}salt: {1}".format(indent, self.DefaultSalt)
print("{0}salt: {1}".format(indent, self.DefaultSalt))
if len(self.Credentials) > 0:
print "{0}Credentials".format(indent)
print("{0}Credentials".format(indent))
for key in self.Credentials:
print "{0} {1} {2}".format(indent, key.KeyType, hexlify(key.Key))
print("{0} {1} {2}".format(indent, key.KeyType, key.Key.hex()))
if len(self.OldCredentials) > 0:
print "{0}OldCredentials".format(indent)
print("{0}OldCredentials".format(indent))
for key in self.OldCredentials:
print "{0} {1} {2}".format(indent, key.KeyType, hexlify(key.Key))
print("{0} {1} {2}".format(indent, key.KeyType, key.Key.hex()))
if len(self.OlderCredentials) > 0:
print "{0}OlderCredentials".format(indent)
print("{0}OlderCredentials".format(indent))
for key in self.OlderCredentials:
print "{0} {1} {2}".format(indent, key.KeyType, hexlify(key.Key))
print("{0} {1} {2}".format(indent, key.KeyType, key.Key.hex()))

class dsSupplCredentials:
'''
Expand All @@ -507,23 +507,23 @@ def __init__(self, text):

def Print(self, indent=""):
if self.KerberosNewerKeys != None:
print "{0}Kerberos newer keys".format(indent)
print("{0}Kerberos newer keys".format(indent))
self.KerberosNewerKeys.Print(indent + " ")
if self.KerberosKeys != None:
print "{0}Kerberos keys".format(indent)
print("{0}Kerberos keys".format(indent))
self.KerberosKeys.Print(indent + " ")
if self.WDigestHashes != None:
print "{0}WDigest hashes".format(indent)
print("{0}WDigest hashes".format(indent))
for h in self.WDigestHashes:
print "{0} {1}".format(indent, hexlify(h))
print("{0} {1}".format(indent, h.hex()))
if self.Packages != None:
print "{0}Packages".format(indent)
print("{0}Packages".format(indent))
for p in self.Packages:
print "{0} {1}".format(indent, p)
print("{0} {1}".format(indent, p))
if self.Password != None:
print "{0}Password: {1}".format(indent, self.Password)
print "Debug: "
print dump(self.Text,16,16)
print("{0}Password: {1}".format(indent, self.Password))
print("Debug: ")
print(dump(self.Text,16,16))

def ParseUserProperties(self, text):
offset = 0
Expand Down Expand Up @@ -613,7 +613,7 @@ def ParseUserProperty(self, text, offset):
except:
self.Password = dump(unhexlify(text[offset:offset+ValueLength]),16,16)
else:
print Name
print(Name)
return offset + ValueLength

def ParseWDigestPropertyValue(self, text):
Expand Down
18 changes: 9 additions & 9 deletions ntds/dstime.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,12 +24,12 @@
from struct import *

class dsUTC(datetime.tzinfo):
def utcoffset(self, dt):
return datetime.timedelta(hours=0)
def dst(self, dt):
return datetime.timedelta(0)
def tzname(self,dt):
return "UTC"
def utcoffset(self, dt):
return datetime.timedelta(hours=0)
def dst(self, dt):
return datetime.timedelta(0)
def tzname(self,dt):
return "UTC"

tzinfoUTC=dsUTC()
_FILETIME_null_date = datetime.datetime(1601, 1, 1, 0, 0, 0, tzinfo=tzinfoUTC)
Expand Down Expand Up @@ -72,13 +72,13 @@ def dsGetDSDateTime(dsTimeStamp):
if dsVerifyDSTimeStamp(dsTimeStamp) == -1:
return "Never"
else:
return _FILETIME_null_date + datetime.timedelta(microseconds=int(dsTimeStamp) / 10)
return _FILETIME_null_date + datetime.timedelta(microseconds=int(dsTimeStamp) // 10)

def dsGetDSTimeStampStr(dsTimeStamp):
if dsVerifyDSTimeStamp(dsTimeStamp) == -1:
return "Never"
else:
return str(_FILETIME_null_date + datetime.timedelta(microseconds=int(dsTimeStamp) / 10))
return str(_FILETIME_null_date + datetime.timedelta(microseconds=int(dsTimeStamp) // 10))

def dsGetPOSIXTimeStamp(dsTimeStamp):
ts = 0
Expand Down Expand Up @@ -112,4 +112,4 @@ def dsGetDBTimeStampStr(dsDBTimeStamp):
if len(dsDBTimeStamp) < 8:
return ""
(hours, mins, secs) = unpack('HHHxx', dsDBTimeStamp)
return (hours, mins, secs)
return (hours, mins, secs)
Loading