Skip to content

Commit

Permalink
Check in some additional utility scripts
Browse files Browse the repository at this point in the history
The scripts are pretty self-explanatory.
- we can generate tokens for token-based authentication
- find active users
- print out invalid pipeline states
  • Loading branch information
shankari committed Sep 15, 2021
1 parent 7d11c89 commit 2d282c6
Show file tree
Hide file tree
Showing 3 changed files with 92 additions and 0 deletions.
21 changes: 21 additions & 0 deletions bin/auth/generate_random_tokens.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
import secrets
import argparse

def generateRandomToken(length):
return secrets.token_urlsafe(length)

def generateRandomTokensForProgram(program, token_length, count):
return [program+"_"+generateRandomToken(token_length) for i in range(count)]

if __name__ == '__main__':
parser = argparse.ArgumentParser(prog="generate_random_tokens")

parser.add_argument("program")
parser.add_argument("token_length", type=int)
parser.add_argument("count", type=int)

args = parser.parse_args()

tokens = generateRandomTokensForProgram(args.program, args.token_length, args.count)
for t in tokens:
print(t)
52 changes: 52 additions & 0 deletions bin/monitor/find_active_users.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
import arrow
import logging
import json
import argparse
import bson.json_util as bju
from uuid import UUID
import emission.core.get_database as edb

def find_last_get(uuid):
last_get_result_list = list(edb.get_timeseries_db().find({"user_id": uuid,
"metadata.key": "stats/server_api_time",
"data.name": "POST_/usercache/get"}).sort("data.ts", -1).limit(1))
last_get = last_get_result_list[0] if len(last_get_result_list) > 0 else None
return last_get

def check_active(uuid_list, threshold):
now = arrow.get().timestamp
last_get_entries = [find_last_get(npu) for npu in uuid_list]
for uuid, lge in zip(uuid_list, last_get_entries):
if lge is None:
print(uuid, None, "inactive")
else:
last_call_diff = arrow.get().timestamp - lge["metadata"]["write_ts"]
if last_call_diff > threshold:
print(uuid, lge["metadata"]["write_fmt_time"], "inactive")
else:
print(uuid, lge["metadata"]["write_fmt_time"], "active")

if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
parser = argparse.ArgumentParser(prog="find_active_users")

group = parser.add_mutually_exclusive_group(required=True)
group.add_argument("-e", "--user_email", nargs="+")
group.add_argument("-u", "--user_uuid", nargs="+")
group.add_argument("-a", "--all", action="store_true")
group.add_argument("-f", "--file")

args = parser.parse_args()

if args.user_uuid:
uuid_list = [uuid.UUID(uuid_str) for uuid_str in args.user_uuid]
elif args.user_email:
uuid_list = [ecwu.User.fromEmail(uuid_str).uuid for uuid_str in args.user_email]
elif args.all:
uuid_list = esdu.get_all_uuids()
elif args.file:
with open(args.file) as fd:
uuid_strings = fd.readlines()
uuid_list = [UUID(us.strip()) for us in uuid_strings]
ONE_WEEK = 7 * 24 * 60 * 60
check_active(uuid_list, ONE_WEEK)
19 changes: 19 additions & 0 deletions bin/monitor/find_invalid_pipeline_state.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
import arrow
import logging
import argparse
import emission.core.wrapper.pipelinestate as ecwp
import emission.core.get_database as edb

# Run in containers using:
# sudo docker exec $CONTAINER bash -c 'cd e-mission-server; source setup/activate.sh; ./e-mission-py.bash bin/debug/find_invalid_pipeline_state.py'

def print_all_invalid_state():
all_invalid_states = edb.get_pipeline_state_db().find({"curr_run_ts": {"$ne": None}})
for invalid_state in all_invalid_states:
print(f"{invalid_state.user_id}: {ecwp.PipelineStage(invalid_state.pipeline_stage)} set to {arrow.get(invalid_state.curr_run_ts)}")

if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
parser = argparse.ArgumentParser(prog="find_invalid_pipeline_state")
args = parser.parse_args()
print_all_invalid_state()

0 comments on commit 2d282c6

Please sign in to comment.