forked from HariSekhon/DevOps-Python-tools
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathhadoop_hdfs_files_stats.jy
executable file
·259 lines (219 loc) · 9.23 KB
/
hadoop_hdfs_files_stats.jy
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
#!/usr/bin/env jython
#
# Author: Hari Sekhon
# Date: 2013-06-08 22:06:27 +0100 (Sat, 08 Jun 2013)
#
# https://github.com/harisekhon/devops-python-tools
#
# License: see accompanying LICENSE file
#
# vim:ts=4:sts=4:sw=4:et:filetype=python
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
__author__ = 'Hari Sekhon'
__version__ = '0.9.2'
# pylint: disable=wrong-import-position
import os
import sys
import time
import socket
# Refusing to use either optparse or argparse since it's annoyingly non-portable across different versions of Python
import getopt
libdir = os.path.abspath(os.path.join(os.path.dirname(__file__), 'pylib'))
sys.path.append(libdir)
try:
from harisekhon.utils import jython_only, die, printerr, ERRORS, \
isJavaOOM, java_oom_fix_msg, get_jython_exception # log_jython_exception
except ImportError as _:
print('module import failed: %s' % _, file=sys.stderr)
sys.exit(4)
jython_only()
#import array # using jarray for byte arrays
#import jarray
#try:
# from java.nio import ByteBuffer
#except ImportError as _:
# die("Couldn't find java.nio class, not running inside Jython?")
try:
from org.apache.hadoop.conf import Configuration
#from org.apache.hadoop.fs import FileSystem
from org.apache.hadoop.fs import Path
from org.apache.hadoop.hdfs import DistributedFileSystem
except ImportError as _:
die("Couldn't find Hadoop Java classes, try: jython -J-cp `hadoop classpath` %s <args>" % sys.argv[0])
def usage(*msg):
""" Print usage and exit """
if msg:
printerr(msg)
die("""
Hari Sekhon - https://github.com/harisekhon/devops-python-tools
================================================================================
%s - version %s
================================================================================
Jython program to output HDFS stats for files under given directories - shows blocksize, data size and number of HDFS blocks.
USAGE: jython -J-cp `hadoop classpath` %s [options] <list of HDFS files and/or directories>
-n --no-header Don't output headers and timing summary
I tested this on one of my NN HA Kerberized clusters, should work fine under all circumstances where you have Jython and correctly deployed Hadoop client configuration as long as you have enough RAM, should be no more than 1GB or ~ java -Xmx + HDFS blocksize
watch memory usage with top like so: top -p $(pgrep -f jython|tr '\n' ','|sed 's/,$//')
================================================================================
""" % (os.path.basename(__file__), __version__, os.path.basename(__file__)), ERRORS["UNKNOWN"])
#-a --all-blocks Fetch all copies of all blocks from all datanodes
# (--one-block-per-DN shortcuts this) [Not implemented yet]
# TODO: add multiple files and dir recursion
def main():
""" Parse cli args and call HDFS block read speed test for the given file / directory arguments """
noheader = False
try:
opts, args = getopt.gnu_getopt(sys.argv[1:], "hn", ["help", "usage" "no-header"])
except getopt.GetoptError as _:
usage("error: %s" % _)
for opt, arg in opts:
if opt in ("-h", "--help", "--usage"):
usage()
elif opt in ("-n", "--no=header"):
noheader = True
else:
usage()
filelist = set()
for arg in args:
filelist.add(arg)
if not filelist:
usage("no file / directory specified")
filelist = sorted(filelist)
try:
HDFSBlockCounter().print_block_counts(filelist, noheader)
except KeyboardInterrupt as _:
printerr("Caught Control-C...", 1)
sys.exit(ERRORS["OK"])
except Exception as _:
printerr("Error running HDFSBlockCounter: %s" % _, 1)
if isJavaOOM(_.message):
printerr(java_oom_fix_msg())
sys.exit(ERRORS["CRITICAL"])
except:
printerr("Error: %s" % get_jython_exception())
if isJavaOOM(get_jython_exception()):
printerr(java_oom_fix_msg())
sys.exit(ERRORS["CRITICAL"])
class HDFSBlockCounter(object):
""" Class to hold HDFS Block Counter State """
def __init__(self):
""" Instantiate HDFSBlockCounter State """
self.files = 0
self.block_num = 0
self.offset = 0
self.length = 1
self.filehandle = None
self.filename = None
try:
self.fqdn = socket.getfqdn()
# time.strftime("%z") doesn't work in Jython
print(">> %s Running on %s\n" % (time.strftime("%Y/%m/%d %H:%M:%S %Z"), self.fqdn), file=sys.stderr)
except:
printerr("Failed to get fqdn of local host, continuing anyway...\n\nError: %s\n" % get_jython_exception())
self.fqdn = None
conf = Configuration()
self.filesystem = DistributedFileSystem.get(conf)
#self.filehandle = self.filesystem.open(path)
# The client one tells you which DN you are reading from
try:
self.client = self.filesystem.getClient()
except:
raise Exception("Failed to create hdfs client: %s" % get_jython_exception())
#in = client.DFSDataInputStream(self.filehandle)
@staticmethod
def get_path(filename):
""" Return the path object for a given filename """
try:
path = Path(filename)
except Exception:
return None
if path:
return path
else:
return None
def print_block_counts(self, filelist, noheader):
""" Recurses directories and calls printFileBlocks(file) per file """
if not noheader:
start_time = time.time()
print("=" * 80)
print("Blocksize Blocks Replication Size Size Small Filename")
print(" (MB) Factor (bytes) (MB) file")
print("=" * 80)
for filename in filelist:
path = self.get_path(filename)
if not path:
printerr("Failed to get HDFS path object for file " + filename, 1)
continue
if not self.filesystem.exists(path):
#raise IOError("HDFS File not found: %s" % filename)
printerr("HDFS file/dir not found: %s" % filename, 1)
continue
self.recurse_path(filename, path)
if not noheader:
end_time = time.time()
total_time = end_time - start_time
plural = "s"
if self.files == 1:
plural = ""
print("\nFinished reading block counts from %s file%s in %.4f secs\n" % (self.files, plural, total_time))
def recurse_path(self, filename, path):
""" Recurse filename, path """
if self.filesystem.isFile(path):
try:
self.print_file_block_counts(filename, path)
except Exception as _:
printerr(_, 1)
elif self.filesystem.isDirectory(path):
# even if there is only one file under the whole directory tree since
# it's now different to the specified arg we should print it
try:
_ = self.filesystem.listStatus(path)
for i in range(0, len(_)):
try:
path = _[i].getPath()
self.recurse_path(path.toUri().getPath(), path)
except:
printerr(get_jython_exception().split("\n")[0], 1)
except:
printerr(get_jython_exception().split("\n")[0], 1)
else:
raise IOError(">>> %s is not a file or directory" % filename)
def print_file_block_counts(self, filename, path):
""" Prints block counts for the given file """
self.filename = filename
self.block_num = 0
self.offset = 0
self.length = 1
try:
self.filehandle = self.client.open(filename)
except:
raise IOError("Failed to get client filehandle to HDFS file %s: %s" % (filename, get_jython_exception()))
if self.filehandle is None:
raise IOError("Failed to get client filehandle to HDFS file %s" % filename)
try:
filestatus = self.filesystem.getFileStatus(path)
blocksize = filestatus.getBlockSize() # in bytes
size = filestatus.getLen() # in bytes
repl = filestatus.getReplication()
num_blocks = int((size / blocksize)+1)
except:
print("Failed to get file stats for HDFS file %s: %s" % (filename, get_jython_exception()))
return 0
if size < blocksize:
smallfile = "YES"
else:
smallfile = "NO"
#print("blocksize: %.0fMB blocks: %d replication factor: %d bytes: %d (%.2fMB) %s%s" %
# ((blocksize/(1024*1024)), num_blocks, repl, size, (size/(1024*1024)), filename, smallfile))
print("%-9.2f %7d %6d %17d %12.2f %-3s %s" %
((blocksize/(1024*1024)), num_blocks, repl, size, (size/(1024*1024)), smallfile, filename))
self.files += 1
return 1
if __name__ == "__main__":
try:
main()
except KeyboardInterrupt:
pass