-
Notifications
You must be signed in to change notification settings - Fork 85
/
helpdesk_bycategory.py
167 lines (140 loc) · 5.66 KB
/
helpdesk_bycategory.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
#!/usr/bin/env python
"""helpdesk_bycategory - Exports JSON files with data from Helpdesk
Requires mysql connector in python: pip install mysql-connector
"""
from credentials import WORKDESK_USER
from credentials import WORKDESK_PASSWORD
import json
import datetime
import time
import logging.config
import mysql.connector
__author__ = 'scott@flakshack.com (Scott Vintinner)'
# =================================SETTINGS======================================
SAMPLE_INTERVAL = 120
MAX_RESULTS = 10 #
mysql_config = {
'host': 'workdesk',
'database': 'workdesk',
'user': WORKDESK_USER,
'password': WORKDESK_PASSWORD
}
# ===============================================================================
# Current MYSQL doesn't support groupby in a subquery, so we have to break into 2 SQL statements
# SQL to grab top classes and count of tickets
# SELECT class, count(class)
# FROM calls
# WHERE category IN ("Helpdesk", "Application Support", "Network Services")
# AND track >= UNIX_TIMESTAMP('2014-05-8 00:00:00')
# GROUP BY class
# ORDER BY count(class) DESC
# LIMIT 10
# SQL to grab count of ticket responses for specified classes
# SELECT c.class, count(c.class)
# FROM calls c JOIN notes n ON c.id = n.call_id
# WHERE c.category IN ("Helpdesk", "Application Support", "Network Services")
# AND c.track >= UNIX_TIMESTAMP('2014-05-8 00:00:00')
# AND c.class IN ("VIEW")
# GROUP BY c.class
# ORDER BY count(c.class) DESC
# LIMIT 10
class MonitorJSON:
"""This is a simple class passed to Monitor threads so we can access the current JSON data in that thread"""
def __init__(self):
# Set the default empty values for all
category_data = [{"category": "----", "tickets": 0, "responses": 0}]
self.json = json.dumps({"categories": category_data})
def generate_json(hd_monitor):
logger = logging.getLogger("helpdesk_bycategory")
total_tickets = cursor = conn = None
fromdate = datetime.date.today() + datetime.timedelta(days=-7)
category_data = []
class_bytickets = []
classes = []
try:
# This first query will pull out a list top categories by ticket count
conn = mysql.connector.connect(**mysql_config)
query = (
"SELECT class, count(class) AS count FROM calls "
"WHERE category IN ('Helpdesk', 'Application Support', 'Network Services') "
"AND track >= UNIX_TIMESTAMP(%s) "
"GROUP BY class "
"ORDER BY count(class) DESC "
"LIMIT %s "
)
logger.debug("Query: " + query)
cursor = conn.cursor()
cursor.execute(query, (fromdate.isoformat(), MAX_RESULTS))
for (hdclass, count) in cursor:
class_bytickets.append([hdclass, count])
classes.append(hdclass)
classes_string = ','.join(['%s'] * MAX_RESULTS) # Creates "%s, %s, %s, %s"
query = (
"SELECT c.class, count(c.class) "
"FROM calls c JOIN notes n ON c.id = n.call_id "
"WHERE c.category IN ('Helpdesk', 'Application Support', 'Network Services') "
"AND c.track >= UNIX_TIMESTAMP(%s) "
"AND c.class IN ( " + classes_string + ") "
"GROUP BY c.class "
"ORDER BY count(c.class) DESC "
)
logger.debug("Query: " + query)
query_parameters = [fromdate.isoformat()]
for (item) in classes:
query_parameters.append(item)
cursor.execute(query, query_parameters)
for (resp_class, resp_count) in cursor:
for (tick_class, tick_count) in class_bytickets:
if tick_class == resp_class:
category_data.append({"category": tick_class, "tickets": tick_count, "responses": resp_count})
break
query = (
"SELECT COUNT(*) "
"FROM calls "
"WHERE category IN ('Helpdesk', 'Application Support', 'Network Services') "
"AND status = 'OPEN' "
"AND track >= UNIX_TIMESTAMP(%s) "
)
logger.debug("Query: " + query)
cursor.execute(query, [fromdate.isoformat()])
for (count) in cursor:
total_tickets = count
except mysql.connector.Error as error:
logger.error("MySQL Error: " + str(error))
hd_monitor.json = json.dumps({"categories": [{"error": str(error)}]})
else:
hd_monitor.json = json.dumps({"categories": category_data, "total": total_tickets})
finally:
if cursor is not None:
cursor.close()
if conn is not None:
conn.close()
logger.debug(hd_monitor.json)
# ======================================================
# __main__
#
# If you run this module by itself, it will instantiate
# the MonitorJSON class and start an infinite loop
# printing data.
# ======================================================
#
if __name__ == '__main__':
# When run by itself, we need to create the logger object (which is normally created in webserver.py)
try:
f = open("log_settings.json", 'rt')
log_config = json.load(f)
f.close()
logging.config.dictConfig(log_config)
except FileNotFoundError as e:
print("Log configuration file not found: " + str(e))
logging.basicConfig(level=logging.DEBUG) # fallback to basic settings
except json.decoder.JSONDecodeError as e:
print("Error parsing logger config file: " + str(e))
raise
monitor = MonitorJSON()
while True:
main_logger = logging.getLogger(__name__)
generate_json(monitor)
# Wait X seconds for the next iteration
main_logger.debug("Waiting for " + str(SAMPLE_INTERVAL) + " seconds")
time.sleep(SAMPLE_INTERVAL)