Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Added shared drive crawler #141

Merged
merged 13 commits into from
Sep 1, 2019
3 changes: 2 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,9 @@ It features a few tools:
- Logs credentials used when connecting
- Steals data copied to the clipboard
- Saves a copy of the files transferred over the network
- Crawls shared drives in the background and saves them locally
- Saves replays of connections so you can look at them later
- Run console commands or PowerShell payloads automatically on new connections
- Runs console commands or PowerShell payloads automatically on new connections
- RDP Player:
- See live RDP connections coming from the MITM
- View replays of RDP connections
Expand Down
34 changes: 31 additions & 3 deletions bin/pyrdp-mitm.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,9 +46,16 @@ def __init__(self, config: MITMConfig):

def buildProtocol(self, addr):
sessionID = f"{names.get_first_name()}{random.randrange(100000,999999)}"
logger = logging.getLogger(LOGGER_NAMES.MITM_CONNECTIONS)
logger = SessionLogger(logger, sessionID)
mitm = RDPMITM(logger, self.config)

# mainLogger logs in a file and stdout
mainlogger = logging.getLogger(LOGGER_NAMES.MITM_CONNECTIONS)
mainlogger = SessionLogger(mainlogger, sessionID)

# crawler logger only logs to a file for analysis purposes
crawlerLogger = logging.getLogger(LOGGER_NAMES.CRAWLER)
crawlerLogger = SessionLogger(crawlerLogger, sessionID)

mitm = RDPMITM(mainlogger, crawlerLogger, self.config)

return mitm.getProtocol()

Expand Down Expand Up @@ -87,6 +94,21 @@ def prepareLoggers(logLevel: int, logFilter: str, sensorID: str, outDir: Path):
connectionsLogger = logging.getLogger(LOGGER_NAMES.MITM_CONNECTIONS)
connectionsLogger.addHandler(jsonFileHandler)

crawlerFormatter = VariableFormatter("[{asctime}] - {sessionID} - {message}", style = "{", defaultVariables = {
"sessionID": "GLOBAL"
})

crawlerFileHandler = logging.FileHandler(logDir / "crawl.log")
crawlerFileHandler.setFormatter(crawlerFormatter)

jsonCrawlerFileHandler = logging.FileHandler(logDir / "crawl.json")
jsonCrawlerFileHandler.setFormatter(JSONFormatter({"sensor": sensorID}))

crawlerLogger = logging.getLogger(LOGGER_NAMES.CRAWLER)
crawlerLogger.addHandler(crawlerFileHandler)
crawlerLogger.addHandler(jsonCrawlerFileHandler)
crawlerLogger.setLevel(logging.INFO)

log.prepareSSLLogger(logDir / "ssl.log")


Expand Down Expand Up @@ -169,6 +191,9 @@ def main():
parser.add_argument("--payload-powershell-file", help="PowerShell script to run automatically upon connection (as -EncodedCommand)", default=None)
parser.add_argument("--payload-delay", help="Time to wait after a new connection before sending the payload, in milliseconds", default=None)
parser.add_argument("--payload-duration", help="Amount of time for which input / output should be dropped, in milliseconds. This can be used to hide the payload screen.", default=None)
parser.add_argument("--disable-crawler", help="Disable automatic shared drive scraping", action="store_true")
xshill marked this conversation as resolved.
Show resolved Hide resolved
parser.add_argument("--crawler-match-file", help="File to be used by the crawler to chose what to download when scraping the client shared drives.", default=None)
parser.add_argument("--crawler-ignore-file", help="File to be used by the crawler to chose what folders to avoid when scraping the client shared drives.", default=None)
parser.add_argument("--no-replay", help="Disable replay recording", action="store_true")

args = parser.parse_args()
Expand Down Expand Up @@ -210,6 +235,9 @@ def main():
config.replacementUsername = args.username
config.replacementPassword = args.password
config.outDir = outDir
config.disableCrawler = args.disable_crawler
config.crawlerMatchFileName = args.crawler_match_file
config.crawlerIgnoreFileName = args.crawler_ignore_file
config.recordReplays = not args.no_replay


Expand Down
1 change: 1 addition & 0 deletions pyrdp/logging/adapters.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ def __init__(self, logger: logging.Logger, sessionID: str):
:param sessionID: session ID value.
"""
super().__init__(logger, {"sessionID": sessionID})
self.sessionID = sessionID

def createChild(self, childName: str, sessionID: str = None) -> 'SessionLogger':
"""
Expand Down
5 changes: 3 additions & 2 deletions pyrdp/logging/log.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,20 +11,22 @@


class LOGGER_NAMES:
# Root logger
PYRDP = "pyrdp"
MITM = f"{PYRDP}.mitm"
MITM_CONNECTIONS = f"{MITM}.connections"
PLAYER = f"{PYRDP}.player"
PLAYER_UI = f"{PLAYER}.ui"

# Independent logger
CRAWLER = "crawler"

def getSSLLogger():
"""
Get the SSL logger.
"""
return logging.getLogger("ssl")


def prepareSSLLogger(path: Path):
"""
Prepares the SSL master secret logger.
Expand All @@ -45,7 +47,6 @@ def prepareSSLLogger(path: Path):
logger.addHandler(streamHandler)
logger.setLevel(logging.INFO)


def info(*args):
logging.getLogger(LOGGER_NAMES.PYRDP).info(*args)

Expand Down
Loading