Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion conf/zeppelin-site.xml.template
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@

<property>
<name>zeppelin.interpreters</name>
<value>com.nflabs.zeppelin.spark.SparkInterpreter,com.nflabs.zeppelin.spark.PySparkInterpreter,com.nflabs.zeppelin.spark.SparkSqlInterpreter,com.nflabs.zeppelin.spark.DepInterpreter,com.nflabs.zeppelin.markdown.Markdown,com.nflabs.zeppelin.shell.ShellInterpreter</value>
<value>com.nflabs.zeppelin.spark.SparkInterpreter,com.nflabs.zeppelin.spark.PySparkInterpreter,com.nflabs.zeppelin.spark.SparkSqlInterpreter,com.nflabs.zeppelin.spark.DepInterpreter,com.nflabs.zeppelin.markdown.Markdown,com.nflabs.zeppelin.shell.ShellInterpreter,org.apache.zeppelin.hive.HiveInterpreter</value>
<description>Comma separated interpreter configurations. First interpreter become a default</description>
</property>

Expand Down
130 changes: 130 additions & 0 deletions hive/pom.xml
Original file line number Diff line number Diff line change
@@ -0,0 +1,130 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>

<parent>
<artifactId>zeppelin</artifactId>
<groupId>com.nflabs.zeppelin</groupId>
<version>0.5.0-SNAPSHOT</version>
</parent>

<groupId>org.apache.zeppelin</groupId>
<artifactId>zeppelin-hive</artifactId>
<packaging>jar</packaging>
<version>0.5.0-SNAPSHOT</version>
<name>Zeppelin: Hive interpreter</name>
<url>http://www.apache.org</url>

<properties>
<hive.hive.version>0.14.0</hive.hive.version>
<hive.hadoop.version>2.6.0</hive.hadoop.version>
</properties>
<dependencies>
<dependency>
<groupId>com.nflabs.zeppelin</groupId>
<artifactId>zeppelin-interpreter</artifactId>
<version>${project.version}</version>
<scope>provided</scope>
</dependency>

<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-exec</artifactId>
<version>1.1</version>
</dependency>

<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
</dependency>

<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</dependency>

<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-jdbc</artifactId>
<version>${hive.hive.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>${hive.hadoop.version}</version>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
</dependencies>

<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-deploy-plugin</artifactId>
<version>2.7</version>
<configuration>
<skip>true</skip>
</configuration>
</plugin>

<plugin>
<artifactId>maven-enforcer-plugin</artifactId>
<version>1.3.1</version>
<executions>
<execution>
<id>enforce</id>
<phase>none</phase>
</execution>
</executions>
</plugin>

<plugin>
<artifactId>maven-dependency-plugin</artifactId>
<version>2.8</version>
<executions>
<execution>
<id>copy-dependencies</id>
<phase>package</phase>
<goals>
<goal>copy-dependencies</goal>
</goals>
<configuration>
<outputDirectory>${project.build.directory}/../../interpreter/hive</outputDirectory>
<overWriteReleases>false</overWriteReleases>
<overWriteSnapshots>false</overWriteSnapshots>
<overWriteIfNewer>true</overWriteIfNewer>
<includeScope>runtime</includeScope>
</configuration>
</execution>
<execution>
<id>copy-artifact</id>
<phase>package</phase>
<goals>
<goal>copy</goal>
</goals>
<configuration>
<outputDirectory>${project.build.directory}/../../interpreter/hive</outputDirectory>
<overWriteReleases>false</overWriteReleases>
<overWriteSnapshots>false</overWriteSnapshots>
<overWriteIfNewer>true</overWriteIfNewer>
<includeScope>runtime</includeScope>
<artifactItems>
<artifactItem>
<groupId>${project.groupId}</groupId>
<artifactId>${project.artifactId}</artifactId>
<version>${project.version}</version>
<type>${project.packaging}</type>
</artifactItem>
</artifactItems>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>

</project>
203 changes: 203 additions & 0 deletions hive/src/main/java/org/apache/zeppelin/hive/HiveInterpreter.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,203 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.hive;

import java.sql.*;
import java.util.List;
import java.util.Properties;

import com.nflabs.zeppelin.interpreter.*;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.nflabs.zeppelin.interpreter.InterpreterResult.Code;
import com.nflabs.zeppelin.scheduler.Scheduler;
import com.nflabs.zeppelin.scheduler.SchedulerFactory;

/**
* Hive interpreter for Zeppelin.
*/
public class HiveInterpreter extends Interpreter {
Logger logger = LoggerFactory.getLogger(HiveInterpreter.class);
int commandTimeOut = 600000;

static final String HIVESERVER_URL = "hive.hiveserver2.url";
static final String HIVESERVER_USER = "hive.hiveserver2.user";
static final String HIVESERVER_PASSWORD = "hive.hiveserver2.password";

static {
Interpreter.register(
"hive",
"hive",
HiveInterpreter.class.getName(),
new InterpreterPropertyBuilder()
.add(HIVESERVER_URL, "jdbc:hive2://localhost:10000", "The URL for HiveServer2.")
.add(HIVESERVER_USER, "hive", "The hive user")
.add(HIVESERVER_PASSWORD, "", "The password for the hive user").build());
}

public HiveInterpreter(Properties property) {
super(property);
}

Connection jdbcConnection;
Exception exceptionOnConnect;

//Test only method
public Connection getJdbcConnection()
throws SQLException {
String url = getProperty(HIVESERVER_URL);
String user = getProperty(HIVESERVER_USER);
String password = getProperty(HIVESERVER_PASSWORD);

return DriverManager.getConnection(url, user, password);
}

@Override
public void open() {
logger.info("Jdbc open connection called!");
try {
String driverName = "org.apache.hive.jdbc.HiveDriver";
Class.forName(driverName);
} catch (ClassNotFoundException e) {
logger.error("Can not open connection", e);
exceptionOnConnect = e;
return;
}
try {
jdbcConnection = getJdbcConnection();
exceptionOnConnect = null;
logger.info("Successfully created Jdbc connection");
}
catch (SQLException e) {
logger.error("Cannot open connection", e);
exceptionOnConnect = e;
}
}

@Override
public void close() {
try {
if (jdbcConnection != null) {
jdbcConnection.close();
}
}
catch (SQLException e) {
logger.error("Cannot close connection", e);
}
finally {
jdbcConnection = null;
exceptionOnConnect = null;
}
}

Statement currentStatement;
private InterpreterResult executeSql(String sql) {
try {
if (exceptionOnConnect != null) {
return new InterpreterResult(Code.ERROR, exceptionOnConnect.getMessage());
}
currentStatement = jdbcConnection.createStatement();
StringBuilder msg = null;
if (StringUtils.containsIgnoreCase(sql, "EXPLAIN ")) {
//return the explain as text, make this visual explain later
msg = new StringBuilder();
}
else {
msg = new StringBuilder("%table " + msg);
}
ResultSet res = currentStatement.executeQuery(sql);
try {
ResultSetMetaData md = res.getMetaData();
for (int i = 1; i < md.getColumnCount() + 1; i++) {
if (i == 1) {
msg.append(md.getColumnName(i));
} else {
msg.append("\t" + md.getColumnName(i));
}
}
msg.append("\n");
while (res.next()) {
for (int i = 1; i < md.getColumnCount() + 1; i++) {
msg.append(res.getString(i) + "\t");
}
msg.append("\n");
}
}
finally {
try {
res.close();
currentStatement.close();
}
finally {
currentStatement = null;
}
}

InterpreterResult rett = new InterpreterResult(Code.SUCCESS, msg.toString());
return rett;
}
catch (SQLException ex) {
logger.error("Can not run " + sql, ex);
return new InterpreterResult(Code.ERROR, ex.getMessage());
}
}

@Override
public InterpreterResult interpret(String cmd, InterpreterContext contextInterpreter) {
logger.info("Run SQL command '" + cmd + "'");
return executeSql(cmd);
}

@Override
public void cancel(InterpreterContext context) {
if (currentStatement != null) {
try {
currentStatement.cancel();
}
catch (SQLException ex) {
}
finally {
currentStatement = null;
}
}
}

@Override
public FormType getFormType() {
return FormType.SIMPLE;
}

@Override
public int getProgress(InterpreterContext context) {
return 0;
}

@Override
public Scheduler getScheduler() {
return SchedulerFactory.singleton().createOrGetFIFOScheduler(
HiveInterpreter.class.getName() + this.hashCode());
}

@Override
public List<String> completion(String buf, int cursor) {
return null;
}

}
Loading