Skip to content

Commit

Permalink
Added: easy to switch multi cluster by using HADOOP_CONF_DIR env
Browse files Browse the repository at this point in the history
  • Loading branch information
gary0416 committed Aug 26, 2019
1 parent 663d3a1 commit 161ec23
Show file tree
Hide file tree
Showing 3 changed files with 82 additions and 77 deletions.
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
## Added
- folder color is green (tree -C)
- support files by -a (tree -a)
- easy to switch multi cluster by using HADOOP_CONF_DIR env

## Changed
- update hadoop version
Expand Down
2 changes: 1 addition & 1 deletion build.gradle
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
apply plugin: 'java'
apply plugin: 'application'

version = '1.1'
version = '1.2'

mainClassName = "com.trovit.hdfstree.HdfsTree"

Expand Down
156 changes: 80 additions & 76 deletions src/main/java/com/trovit/hdfstree/fsinspectors/HDFSInspector.java
Original file line number Diff line number Diff line change
@@ -1,15 +1,15 @@
/**
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.trovit.hdfstree.fsinspectors;

Expand All @@ -24,82 +24,86 @@
import java.util.List;

public class HDFSInspector implements FSInspector {
FileSystem fs;
FileSystem fs;

public HDFSInspector() {
try {
Configuration conf = getHadoopConf();
fs = FileSystem.get(conf);

Runtime.getRuntime().addShutdownHook(new Thread(() -> {
public HDFSInspector() {
try {
fs.close();
} catch (IOException ignore) {
Configuration conf = getHadoopConf();
fs = FileSystem.get(conf);

Runtime.getRuntime().addShutdownHook(new Thread(() -> {
try {
fs.close();
} catch (IOException ignore) {
}
}));
} catch (Exception e) {
System.out.println(e.getMessage());
System.exit(1);
}
}));
} catch (Exception e) {
System.out.println(e.getMessage());
System.exit(1);
}
}

@Override
public boolean isDirectory(String path) {
boolean isDir = false;
try {
isDir = fs.getFileStatus(new Path(path)).isDirectory();
} catch (IOException e) {
System.out.println(e.getMessage());
@Override
public boolean isDirectory(String path) {
boolean isDir = false;
try {
isDir = fs.getFileStatus(new Path(path)).isDirectory();
} catch (IOException e) {
System.out.println(e.getMessage());
}
return isDir;
}
return isDir;
}

@Override
public List<String> list(String currentPath) {
List<String> subfiles = Lists.newArrayList();
try {
for (FileStatus fileStatus : fs.listStatus(new Path(currentPath))) {
subfiles.add(fileStatus.getPath().getName());
}
} catch (IOException e) {
System.out.println(e.getMessage());
@Override
public List<String> list(String currentPath) {
List<String> subfiles = Lists.newArrayList();
try {
for (FileStatus fileStatus : fs.listStatus(new Path(currentPath))) {
subfiles.add(fileStatus.getPath().getName());
}
} catch (IOException e) {
System.out.println(e.getMessage());
}
return subfiles;
}
return subfiles;
}

@Override
public String addSubdirToCurrent(String path, String subdir) {
Path current = new Path(path, subdir);
return current.toString();
}

@Override
public long getFileSize(String file) {
Path current = new Path(file);
try {
return fs.getFileStatus(current).getLen();
} catch (IOException e) {
System.out.println("Cannot read file: " + current.toString());
return 0;
@Override
public String addSubdirToCurrent(String path, String subdir) {
Path current = new Path(path, subdir);
return current.toString();
}
}

private Configuration getHadoopConf() throws Exception {
Configuration conf = new Configuration();
String hadoopHome = System.getenv("HADOOP_HOME");
String confDir;
if (hadoopHome != null) {
confDir = hadoopHome + "/etc/hadoop/";
} else {
confDir = "/etc/hadoop/";
@Override
public long getFileSize(String file) {
Path current = new Path(file);
try {
return fs.getFileStatus(current).getLen();
} catch (IOException e) {
System.out.println("Cannot read file: " + current.toString());
return 0;
}
}
String hdfsSitePath = confDir+"hdfs-site.xml";
if (!new File(hdfsSitePath).exists()) {
throw new Exception("HADOOP_HOME is not defined in the system.");

private Configuration getHadoopConf() throws Exception {
Configuration conf = new Configuration();
String confDir = System.getenv("HADOOP_CONF_DIR");
if (confDir == null) {
String hadoopHome = System.getenv("HADOOP_HOME");
if (hadoopHome != null) {
confDir = hadoopHome + "/etc/hadoop/";
} else {
confDir = "/etc/hadoop/";
}
} else {
confDir += "/";
}
String hdfsSitePath = confDir + "hdfs-site.xml";
if (!new File(hdfsSitePath).exists()) {
throw new Exception("HADOOP_HOME or HADOOP_CONF_DIR is not defined in the system.");
}
conf.addResource(new Path(hdfsSitePath));
conf.addResource(new Path(confDir + "mapred-site.xml"));
conf.addResource(new Path(confDir + "core-site.xml"));
return conf;
}
conf.addResource(new Path(hdfsSitePath));
conf.addResource(new Path(confDir+"mapred-site.xml"));
conf.addResource(new Path(confDir+"core-site.xml"));
return conf;
}
}

0 comments on commit 161ec23

Please sign in to comment.