diff --git a/hive-general-build-mods.patch b/hive-general-build-mods.patch deleted file mode 100644 index d5a3bb4..0000000 --- a/hive-general-build-mods.patch +++ /dev/null @@ -1,71 +0,0 @@ -diff --git a/ivy/ivysettings.xml b/ivy/ivysettings.xml -index d230f2c..44f3a04 100644 ---- a/ivy/ivysettings.xml -+++ b/ivy/ivysettings.xml -@@ -41,49 +41,25 @@ - - - -- -- -- -- -- -- -- -+ - -- -- -- -+ -+ - -- -- -- -+ -+ -+ -+ -+ -+ -+ -+ - - -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -+ -+ -+ - -- - - - --- -1.8.5.2 - diff --git a/hive-hcatalog-mods.patch b/hive-hcatalog-mods.patch deleted file mode 100644 index 8c37139..0000000 --- a/hive-hcatalog-mods.patch +++ /dev/null @@ -1,55 +0,0 @@ -From 0ce8b1eca852563e634016656149662f60a33bad Mon Sep 17 00:00:00 2001 -From: Peter MacKinnon -Date: Wed, 8 Jan 2014 12:11:57 -0500 -Subject: [PATCH 07/10] hcatalog mods - ---- - .../java/org/apache/hive/hcatalog/templeton/Main.java | 18 ++++++++++-------- - 1 file changed, 10 insertions(+), 8 deletions(-) - -diff --git a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Main.java b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Main.java -index 0f37278..fb3f825 100644 ---- a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Main.java -+++ b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Main.java -@@ -25,6 +25,8 @@ - import java.io.IOException; - import java.util.ArrayList; - import java.util.HashMap; -+import java.util.EnumSet; -+import javax.servlet.DispatcherType; - - import org.apache.commons.logging.Log; - import org.apache.commons.logging.LogFactory; -@@ -169,21 +171,21 @@ public Server runServer(int port) - * callbacks. So jetty would fail the request as unauthorized. - */ - root.addFilter(fHolder, "/" + SERVLET_PATH + "/v1/ddl/*", -- FilterMapping.REQUEST); -+ EnumSet.of(DispatcherType.REQUEST)); - root.addFilter(fHolder, "/" + SERVLET_PATH + "/v1/pig/*", -- FilterMapping.REQUEST); -+ EnumSet.of(DispatcherType.REQUEST)); - root.addFilter(fHolder, "/" + SERVLET_PATH + "/v1/hive/*", -- FilterMapping.REQUEST); -+ EnumSet.of(DispatcherType.REQUEST)); - root.addFilter(fHolder, "/" + SERVLET_PATH + "/v1/queue/*", -- FilterMapping.REQUEST); -+ EnumSet.of(DispatcherType.REQUEST)); - root.addFilter(fHolder, "/" + SERVLET_PATH + "/v1/jobs/*", -- FilterMapping.REQUEST); -+ EnumSet.of(DispatcherType.REQUEST)); - root.addFilter(fHolder, "/" + SERVLET_PATH + "/v1/mapreduce/*", -- FilterMapping.REQUEST); -+ EnumSet.of(DispatcherType.REQUEST)); - root.addFilter(fHolder, "/" + SERVLET_PATH + "/v1/status/*", -- FilterMapping.REQUEST); -+ EnumSet.of(DispatcherType.REQUEST)); - root.addFilter(fHolder, "/" + SERVLET_PATH + "/v1/version/*", -- FilterMapping.REQUEST); -+ EnumSet.of(DispatcherType.REQUEST)); - - // Connect Jersey - ServletHolder h = new ServletHolder(new ServletContainer(makeJerseyConfig())); --- -1.8.5.2 - diff --git a/hive-hwi-mods.patch b/hive-hwi-mods.patch deleted file mode 100644 index db7961d..0000000 --- a/hive-hwi-mods.patch +++ /dev/null @@ -1,39 +0,0 @@ -From 948c12866371a2722265f10303a8fca3b03360e3 Mon Sep 17 00:00:00 2001 -From: Peter MacKinnon -Date: Wed, 8 Jan 2014 12:12:35 -0500 -Subject: [PATCH 08/10] hwi mods - ---- - hwi/ivy.xml | 2 +- - hwi/src/java/org/apache/hadoop/hive/hwi/HWIServer.java | 2 +- - 2 files changed, 2 insertions(+), 2 deletions(-) - -diff --git a/hwi/ivy.xml b/hwi/ivy.xml -index 81fa89b..aa5435d 100644 ---- a/hwi/ivy.xml -+++ b/hwi/ivy.xml -@@ -28,7 +28,7 @@ - - -- -+ - - - + + +- ++ + +diff --git a/build.properties b/build.properties +index 008d1bb..4ae8e16 100644 +--- a/build.properties ++++ b/build.properties +@@ -75,8 +75,8 @@ common.jar=${hadoop.root}/lib/commons-httpclient-3.0.1.jar + # module names needed for build process + + # full profile +-iterate.hive.full.all=ant,shims,common,serde,metastore,ql,contrib,service,cli,jdbc,beeline,hwi,hbase-handler,testutils,hcatalog +-iterate.hive.full.modules=shims,common,serde,metastore,ql,contrib,service,cli,jdbc,beeline,hwi,hbase-handler,testutils,hcatalog ++iterate.hive.full.all=ant,shims,common,serde,metastore,ql,contrib,service,cli,jdbc,beeline,hwi ++iterate.hive.full.modules=shims,common,serde,metastore,ql,contrib,service,cli,jdbc,beeline,hwi + iterate.hive.full.tests=ql,contrib,hbase-handler,hwi,jdbc,beeline,metastore,odbc,serde,service,hcatalog + iterate.hive.full.thrift=ql,service,metastore,serde + iterate.hive.full.protobuf=ql +@@ -91,8 +91,8 @@ iterate.hive.nohcat.protobuf=ql + iterate.hive.nohcat.cpp=odbc + + # core profile +-iterate.hive.core.all=ant,shims,common,serde,metastore,ql,cli +-iterate.hive.core.modules=shims,common,serde,metastore,ql,cli ++iterate.hive.core.all=ant,shims,common,serde,metastore,ql,service,cli ++iterate.hive.core.modules=shims,common,serde,metastore,ql,service,cli + iterate.hive.core.tests=ql + iterate.hive.core.thrift=ql + iterate.hive.core.protobuf=ql +diff --git a/hbase-handler/ivy.xml b/hbase-handler/ivy.xml +index 7be8649..57c0a60 100644 +--- a/hbase-handler/ivy.xml ++++ b/hbase-handler/ivy.xml +@@ -36,7 +36,7 @@ + + +- ++ + + + +diff --git a/hcatalog/pom.xml b/hcatalog/pom.xml +index 499e8c9..d8271ee 100644 +--- a/hcatalog/pom.xml ++++ b/hcatalog/pom.xml +@@ -33,7 +33,7 @@ + ${project.version} + 1.9.2 + 1.14 +- 7.6.0.v20120127 ++ 8.1.14.v20131031 + 1.1 + 0.10.1 + 1.6.1 +@@ -109,13 +109,6 @@ + + + org.apache.hadoop +- hadoop-hdfs +- ${hadoop23.version} +- tests +- compile +- +- +- org.apache.hadoop + hadoop-mapreduce-client-core + ${hadoop23.version} + compile +@@ -129,26 +122,12 @@ + + + org.apache.hadoop +- hadoop-yarn-server-tests +- ${hadoop23.version} +- tests +- compile +- +- +- org.apache.hadoop + hadoop-mapreduce-client-app + ${hadoop23.version} + compile + + + org.apache.hadoop +- hadoop-common +- ${hadoop23.version} +- tests +- compile +- +- +- org.apache.hadoop + hadoop-mapreduce-client-hs + ${hadoop23.version} + compile +@@ -163,7 +142,6 @@ + org.apache.pig + pig + ${pig.version} +- h2 + compile + + +@@ -176,7 +154,6 @@ + server-extensions + webhcat/java-client + webhcat/svr +- storage-handlers/hbase + + + +diff --git a/hcatalog/server-extensions/pom.xml b/hcatalog/server-extensions/pom.xml +index f9ec4a5..6b76bfa 100644 +--- a/hcatalog/server-extensions/pom.xml ++++ b/hcatalog/server-extensions/pom.xml +@@ -65,6 +65,12 @@ + ${hcatalog.version} + compile + ++ ++ org.apache.hadoop ++ hadoop-common ++ ${hadoop23.version} ++ compile ++ + + + +diff --git a/hcatalog/webhcat/java-client/pom.xml b/hcatalog/webhcat/java-client/pom.xml +index 97ec5f6..2b692e5 100644 +--- a/hcatalog/webhcat/java-client/pom.xml ++++ b/hcatalog/webhcat/java-client/pom.xml +@@ -41,5 +41,17 @@ + ${hcatalog.version} + compile + ++ ++ org.apache.hadoop ++ hadoop-common ++ ${hadoop23.version} ++ compile ++ ++ ++ org.apache.hadoop ++ hadoop-mapreduce-client-core ++ ${hadoop23.version} ++ compile ++ + + +diff --git a/hcatalog/webhcat/svr/pom.xml b/hcatalog/webhcat/svr/pom.xml +index da6038a..a560604 100644 +--- a/hcatalog/webhcat/svr/pom.xml ++++ b/hcatalog/webhcat/svr/pom.xml +@@ -89,8 +89,20 @@ + compile + + +- org.eclipse.jetty.aggregate +- jetty-all-server ++ org.eclipse.jetty ++ jetty-server ++ ${jetty.webhcat.version} ++ compile ++ ++ ++ org.eclipse.jetty ++ jetty-util ++ ${jetty.webhcat.version} ++ compile ++ ++ ++ org.eclipse.jetty ++ jetty-rewrite + ${jetty.webhcat.version} + compile + +@@ -106,5 +118,23 @@ + ${slf4j.version} + compile + ++ ++ org.apache.hadoop ++ hadoop-common ++ ${hadoop23.version} ++ compile ++ ++ ++ org.apache.hadoop ++ hadoop-mapreduce-client-core ++ ${hadoop23.version} ++ compile ++ ++ ++ org.apache.hadoop ++ hadoop-hdfs ++ ${hadoop23.version} ++ compile ++ + + +diff --git a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Main.java b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Main.java +index 0f37278..fb3f825 100644 +--- a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Main.java ++++ b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Main.java +@@ -25,6 +25,8 @@ + import java.io.IOException; + import java.util.ArrayList; + import java.util.HashMap; ++import java.util.EnumSet; ++import javax.servlet.DispatcherType; + + import org.apache.commons.logging.Log; + import org.apache.commons.logging.LogFactory; +@@ -169,21 +171,21 @@ public Server runServer(int port) + * callbacks. So jetty would fail the request as unauthorized. + */ + root.addFilter(fHolder, "/" + SERVLET_PATH + "/v1/ddl/*", +- FilterMapping.REQUEST); ++ EnumSet.of(DispatcherType.REQUEST)); + root.addFilter(fHolder, "/" + SERVLET_PATH + "/v1/pig/*", +- FilterMapping.REQUEST); ++ EnumSet.of(DispatcherType.REQUEST)); + root.addFilter(fHolder, "/" + SERVLET_PATH + "/v1/hive/*", +- FilterMapping.REQUEST); ++ EnumSet.of(DispatcherType.REQUEST)); + root.addFilter(fHolder, "/" + SERVLET_PATH + "/v1/queue/*", +- FilterMapping.REQUEST); ++ EnumSet.of(DispatcherType.REQUEST)); + root.addFilter(fHolder, "/" + SERVLET_PATH + "/v1/jobs/*", +- FilterMapping.REQUEST); ++ EnumSet.of(DispatcherType.REQUEST)); + root.addFilter(fHolder, "/" + SERVLET_PATH + "/v1/mapreduce/*", +- FilterMapping.REQUEST); ++ EnumSet.of(DispatcherType.REQUEST)); + root.addFilter(fHolder, "/" + SERVLET_PATH + "/v1/status/*", +- FilterMapping.REQUEST); ++ EnumSet.of(DispatcherType.REQUEST)); + root.addFilter(fHolder, "/" + SERVLET_PATH + "/v1/version/*", +- FilterMapping.REQUEST); ++ EnumSet.of(DispatcherType.REQUEST)); + + // Connect Jersey + ServletHolder h = new ServletHolder(new ServletContainer(makeJerseyConfig())); +diff --git a/hwi/ivy.xml b/hwi/ivy.xml +index 81fa89b..aa5435d 100644 +--- a/hwi/ivy.xml ++++ b/hwi/ivy.xml +@@ -28,7 +28,7 @@ + + +- ++ + + + + + +- +- +- +- +- +- +- ++ + +- +- +- ++ ++ + +- +- +- ++ ++ ++ ++ ++ ++ ++ ++ + + +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- ++ ++ ++ + +- + + + +diff --git a/ivy/libraries.properties b/ivy/libraries.properties +index 92ba790..8c9789b 100644 +--- a/ivy/libraries.properties ++++ b/ivy/libraries.properties +@@ -50,7 +50,7 @@ javaewah.version=0.3.2 + jdo-api.version=3.0.1 + jdom.version=1.1 + jetty.version=6.1.26 +-jline.version=0.9.94 ++jline.version=1.0 + json.version=20090211 + junit.version=4.10 + libfb303.version=0.9.0 +diff --git a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java b/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java +index f155686..9a511bd 100644 +--- a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java ++++ b/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java +@@ -235,7 +235,6 @@ private void openSession() throws SQLException { + + try { + TOpenSessionResp openResp = client.OpenSession(openReq); +- + // validate connection + Utils.verifySuccess(openResp.getStatus()); + if (!supportedProtocols.contains(openResp.getServerProtocolVersion())) { +diff --git a/metastore/ivy.xml b/metastore/ivy.xml +index 4bbdfe6..1cd6399 100644 +--- a/metastore/ivy.xml ++++ b/metastore/ivy.xml +@@ -31,9 +31,6 @@ + + +- +- +- + + + +diff --git a/odbc/Makefile b/odbc/Makefile +index 2c55903..db6ba21 100644 +--- a/odbc/Makefile ++++ b/odbc/Makefile +@@ -46,7 +46,7 @@ ARXFLAGS = -x + CC = gcc + CFLAGS = -Wall -g -fPIC + CXX = g++ +-CXXFLAGS = -Wall -g -fPIC ++CXXFLAGS = -Wall -g -fPIC -DHAVE_STDINT_H + LD = g++ + INSTALL = /usr/bin/install -c + SHELL = /bin/sh +@@ -135,7 +135,7 @@ LIB_FB303_ADD = -L$(LIB_FB303_DIR) -lfb303 + LIB_FB303_AR = $(LIB_FB303_DIR)/libfb303.a + + +-all:: $(AR_TARGET) $(SO_TARGET) $(HIVE_CLIENT_TEST) ++all:: $(SO_TARGET) $(HIVE_CLIENT_TEST) + + $(AR_TARGET): $(METASTORE_OBJS) $(SERVICE_OBJS) $(QL_OBJS) $(ODBC_OBJS) + if test -z '$(THRIFT_HOME)'; then echo 'THRIFT_HOME directory?'; exit 1; else exit 0; fi +@@ -189,5 +189,5 @@ uninstall: + clean: + rm -rf $(ODBC_BUILD_DIR) $(OBJ_SERVICE_BUILD_DIR) $(OBJ_QL_BUILD_DIR) $(OBJ_METASTORE_BUILD_DIR) + +-test: $(AR_TARGET) $(SO_TARGET) $(HIVE_CLIENT_TEST) ++test: $(SO_TARGET) $(HIVE_CLIENT_TEST) + LD_LIBRARY_PATH=$(LIB_ODBC_BUILD_DIR):$(LIB_THRIFT_DIR):$(LIB_FB303_DIR):$(LD_LIBRARY_PATH) $(HIVE_CLIENT_TEST) +diff --git a/odbc/src/cpp/hiveclient.cpp b/odbc/src/cpp/hiveclient.cpp +index 450eb0b..dc5aeab 100644 +--- a/odbc/src/cpp/hiveclient.cpp ++++ b/odbc/src/cpp/hiveclient.cpp +@@ -18,6 +18,7 @@ + + #include + #include ++#include + #include + #include + +diff --git a/ql/build.xml b/ql/build.xml +index 64e7b59..95344fd 100644 +--- a/ql/build.xml ++++ b/ql/build.xml +@@ -197,7 +197,10 @@ + deprecation="${javac.deprecation}" + includeantruntime="false"> + +- ++ ++ ++ ++ + + + +@@ -208,108 +211,12 @@ + + + +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- + + + + + + +- +- +- +- +- +- +- +- +- +- +- +- +- + + +diff --git a/ql/ivy.xml b/ql/ivy.xml +index 08a8d6f..3ff4f40 100644 +--- a/ql/ivy.xml ++++ b/ql/ivy.xml +@@ -44,10 +44,10 @@ + + +- + +- ++ + + +@@ -57,13 +57,6 @@ + + + +- +- +- +- +- + +diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java +index 7e1f6ef..e5178b6 100644 +--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java ++++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java +@@ -48,9 +48,9 @@ + import org.apache.hadoop.hive.ql.plan.api.StageType; + import org.apache.hadoop.io.IOUtils; + import org.apache.hadoop.util.StringUtils; +-import org.json.JSONArray; +-import org.json.JSONException; +-import org.json.JSONObject; ++import org.codehaus.jettison.json.JSONArray; ++import org.codehaus.jettison.json.JSONException; ++import org.codehaus.jettison.json.JSONObject; + + /** + * ExplainTask implementation. +diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java +index 5beb48e..db0b623 100644 +--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java ++++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java +@@ -1129,9 +1129,7 @@ Object next(Object previous) throws IOException { + void seek(PositionProvider[] index) throws IOException { + super.seek(index); + for(TreeReader kid: fields) { +- if (kid != null) { +- kid.seek(index); +- } ++ kid.seek(index); + } + } + +@@ -1177,9 +1175,7 @@ void startStripe(Map streams, + void skipRows(long items) throws IOException { + items = countNonNulls(items); + for(TreeReader field: fields) { +- if (field != null) { +- field.skipRows(items); +- } ++ field.skipRows(items); + } + } + } +diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/SnappyCodec.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/SnappyCodec.java +index e3131a3..c8324ad 100644 +--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/SnappyCodec.java ++++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/SnappyCodec.java +@@ -18,7 +18,7 @@ + + package org.apache.hadoop.hive.ql.io.orc; + +-import org.iq80.snappy.Snappy; ++import org.xerial.snappy.Snappy; + + import java.io.IOException; + import java.nio.ByteBuffer; +diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java +index 3031d1c..222f276 100644 +--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java ++++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java +@@ -47,9 +47,9 @@ + import org.apache.thrift.TException; + import org.apache.thrift.TSerializer; + import org.apache.thrift.protocol.TJSONProtocol; +-import org.json.JSONArray; +-import org.json.JSONException; +-import org.json.JSONObject; ++import org.codehaus.jettison.json.JSONArray; ++import org.codehaus.jettison.json.JSONException; ++import org.codehaus.jettison.json.JSONObject; + + /** + * +diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/AbstractGenericUDFEWAHBitmapBop.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/AbstractGenericUDFEWAHBitmapBop.java +index 58ea3ba..198bab5 100644 +--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/AbstractGenericUDFEWAHBitmapBop.java ++++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/AbstractGenericUDFEWAHBitmapBop.java +@@ -22,7 +22,7 @@ + import java.util.ArrayList; + import java.util.List; + +-import javaewah.EWAHCompressedBitmap; ++import com.googlecode.javaewah.EWAHCompressedBitmap; + + import org.apache.hadoop.hive.ql.exec.UDFArgumentException; + import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException; +diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFEWAHBitmap.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFEWAHBitmap.java +index e4b412e..bb8afe7 100644 +--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFEWAHBitmap.java ++++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFEWAHBitmap.java +@@ -20,7 +20,7 @@ + import java.io.IOException; + import java.util.ArrayList; + +-import javaewah.EWAHCompressedBitmap; ++import com.googlecode.javaewah.EWAHCompressedBitmap; + + import org.apache.commons.logging.Log; + import org.apache.commons.logging.LogFactory; +diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEWAHBitmapAnd.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEWAHBitmapAnd.java +index 7838b54..b942988 100644 +--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEWAHBitmapAnd.java ++++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEWAHBitmapAnd.java +@@ -18,7 +18,7 @@ + + package org.apache.hadoop.hive.ql.udf.generic; + +-import javaewah.EWAHCompressedBitmap; ++import com.googlecode.javaewah.EWAHCompressedBitmap; + + import org.apache.hadoop.hive.ql.exec.Description; + +diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEWAHBitmapEmpty.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEWAHBitmapEmpty.java +index 4a14a65..f0617c1 100644 +--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEWAHBitmapEmpty.java ++++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEWAHBitmapEmpty.java +@@ -21,7 +21,7 @@ + import java.io.IOException; + import java.util.ArrayList; + +-import javaewah.EWAHCompressedBitmap; ++import com.googlecode.javaewah.EWAHCompressedBitmap; + + import org.apache.hadoop.hive.ql.exec.Description; + import org.apache.hadoop.hive.ql.exec.UDFArgumentException; +diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEWAHBitmapOr.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEWAHBitmapOr.java +index d438f82..47e9447 100644 +--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEWAHBitmapOr.java ++++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEWAHBitmapOr.java +@@ -18,7 +18,7 @@ + + package org.apache.hadoop.hive.ql.udf.generic; + +-import javaewah.EWAHCompressedBitmap; ++import com.googlecode.javaewah.EWAHCompressedBitmap; + + import org.apache.hadoop.hive.ql.exec.Description; + +diff --git a/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java b/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java +index 857e627..1098f08 100644 +--- a/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java ++++ b/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java +@@ -59,7 +59,7 @@ + protected int portNum; + protected InetSocketAddress serverAddress; + protected TServer server; +- protected org.mortbay.jetty.Server httpServer; ++ protected org.eclipse.jetty.server.Server httpServer; + + private boolean isStarted = false; + protected boolean isEmbedded = false; +diff --git a/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java b/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java +index e487a7f..e6e139b 100644 +--- a/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java ++++ b/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java +@@ -26,10 +26,11 @@ + import org.apache.thrift.protocol.TBinaryProtocol; + import org.apache.thrift.protocol.TProtocolFactory; + import org.apache.thrift.server.TServlet; +-import org.mortbay.jetty.nio.SelectChannelConnector; +-import org.mortbay.jetty.servlet.Context; +-import org.mortbay.jetty.servlet.ServletHolder; +-import org.mortbay.thread.QueuedThreadPool; ++import org.eclipse.jetty.server.Connector; ++import org.eclipse.jetty.server.nio.SelectChannelConnector; ++import org.eclipse.jetty.servlet.ServletContextHandler; ++import org.eclipse.jetty.servlet.ServletHolder; ++import org.eclipse.jetty.util.thread.QueuedThreadPool; + + + public class ThriftHttpCLIService extends ThriftCLIService { +@@ -75,11 +76,10 @@ public void run() { + } + } + +- httpServer = new org.mortbay.jetty.Server(); +- + QueuedThreadPool threadPool = new QueuedThreadPool(); + threadPool.setMinThreads(minWorkerThreads); + threadPool.setMaxThreads(maxWorkerThreads); ++ httpServer = new org.eclipse.jetty.server.Server(); + httpServer.setThreadPool(threadPool); + SelectChannelConnector connector = new SelectChannelConnector(); + connector.setPort(portNum); +@@ -93,7 +93,7 @@ public void run() { + + TProtocolFactory protocolFactory = new TBinaryProtocol.Factory(); + TServlet thriftHttpServlet = new ThriftHttpServlet(processor, protocolFactory); +- final Context context = new Context(httpServer, "/", Context.SESSIONS); ++ final ServletContextHandler context = new ServletContextHandler(httpServer, "/", ServletContextHandler.SESSIONS); + context.addServlet(new ServletHolder(thriftHttpServlet), httpPath); + + // TODO: check defaults: maxTimeout, keepalive, maxBodySize, bodyRecieveDuration, etc. +@@ -144,4 +144,4 @@ private static void verifyHttpConfiguration(HiveConf hiveConf) { + } + } + +-} +\ No newline at end of file ++} +diff --git a/shims/ivy.xml b/shims/ivy.xml +index c0312bc..842f336 100644 +--- a/shims/ivy.xml ++++ b/shims/ivy.xml +@@ -39,7 +39,7 @@ + + +- ++ + + + +@@ -48,9 +48,10 @@ + rev="${hadoop-0.23.version}" + conf="hadoop0.23.shim->default"> + +- ++ + + ++ + + + + ++ + + + + ++ + + + +- ++ + + ++ + + + + ++ + + + + ++ + + + +@@ -97,13 +103,7 @@ + + + +- +- +- +- +- ++ + + + + ++ + + + + ++ + + + +diff --git a/shims/src/0.23/java/org/apache/hadoop/hive/shims/Jetty23Shims.java b/shims/src/0.23/java/org/apache/hadoop/hive/shims/Jetty23Shims.java +index 9328749..786a9d6 100644 +--- a/shims/src/0.23/java/org/apache/hadoop/hive/shims/Jetty23Shims.java ++++ b/shims/src/0.23/java/org/apache/hadoop/hive/shims/Jetty23Shims.java +@@ -19,9 +19,11 @@ + + import java.io.IOException; + +-import org.mortbay.jetty.bio.SocketConnector; +-import org.mortbay.jetty.handler.RequestLogHandler; +-import org.mortbay.jetty.webapp.WebAppContext; ++import org.eclipse.jetty.server.Connector; ++import org.eclipse.jetty.server.Server; ++import org.eclipse.jetty.server.nio.SelectChannelConnector; ++import org.eclipse.jetty.server.handler.RequestLogHandler; ++import org.eclipse.jetty.webapp.WebAppContext; + + /** + * Jetty23Shims. +@@ -34,20 +36,20 @@ public Server startServer(String listen, int port) throws IOException { + return s; + } + +- private static class Server extends org.mortbay.jetty.Server implements JettyShims.Server { ++ private static class Server extends org.eclipse.jetty.server.Server implements JettyShims.Server { + public void addWar(String war, String contextPath) { + WebAppContext wac = new WebAppContext(); + wac.setContextPath(contextPath); + wac.setWar(war); + RequestLogHandler rlh = new RequestLogHandler(); + rlh.setHandler(wac); +- this.addHandler(rlh); ++ this.setHandler(rlh); + } + + public void setupListenerHostPort(String listen, int port) + throws IOException { + +- SocketConnector connector = new SocketConnector(); ++ Connector connector = new SelectChannelConnector(); + connector.setPort(port); + connector.setHost(listen); + this.addConnector(connector); diff --git a/hive-metastore-mods.patch b/hive-metastore-mods.patch deleted file mode 100644 index f645d71..0000000 --- a/hive-metastore-mods.patch +++ /dev/null @@ -1,26 +0,0 @@ -From a473b36b26ec609a13ae9729e66c090664e69d1d Mon Sep 17 00:00:00 2001 -From: Peter MacKinnon -Date: Wed, 8 Jan 2014 11:36:53 -0500 -Subject: [PATCH 03/10] metastore mods - ---- - metastore/ivy.xml | 3 --- - 1 file changed, 3 deletions(-) - -diff --git a/metastore/ivy.xml b/metastore/ivy.xml -index 4bbdfe6..1cd6399 100644 ---- a/metastore/ivy.xml -+++ b/metastore/ivy.xml -@@ -31,9 +31,6 @@ - - -- -- -- - - - --- -1.8.5.2 - diff --git a/hive-model-enhancer-asm.patch b/hive-model-enhancer-asm.patch deleted file mode 100644 index 2fea1d9..0000000 --- a/hive-model-enhancer-asm.patch +++ /dev/null @@ -1,28 +0,0 @@ -From 35410260d0e5617954c11bd9f99660eee6668907 Mon Sep 17 00:00:00 2001 -From: Peter MacKinnon -Date: Tue, 18 Feb 2014 19:10:06 +0000 -Subject: [PATCH] ensure model enhancer sees correct version of asm in - classpath - ---- - build-common.xml | 4 +++- - 1 file changed, 3 insertions(+), 1 deletion(-) - -diff --git a/build-common.xml b/build-common.xml -index 940f4e9..cb4d9b5 100644 ---- a/build-common.xml -+++ b/build-common.xml -@@ -191,7 +191,9 @@ - - - -- -+ - --- -1.8.5.3 - diff --git a/hive-ql-mods.patch b/hive-ql-mods.patch deleted file mode 100644 index c7bd4ed..0000000 --- a/hive-ql-mods.patch +++ /dev/null @@ -1,302 +0,0 @@ -From 29b3cd2a98dba63bc6a26202ff1b084dac5aa5d3 Mon Sep 17 00:00:00 2001 -From: Peter MacKinnon -Date: Wed, 8 Jan 2014 12:08:10 -0500 -Subject: [PATCH 04/10] ql mods - ---- - ql/build.xml | 101 +-------------------- - ql/ivy.xml | 4 +- - .../apache/hadoop/hive/ql/exec/ExplainTask.java | 6 +- - .../hadoop/hive/ql/io/orc/RecordReaderImpl.java | 8 +- - .../apache/hadoop/hive/ql/io/orc/SnappyCodec.java | 2 +- - .../org/apache/hadoop/hive/ql/parse/EximUtil.java | 6 +- - .../generic/AbstractGenericUDFEWAHBitmapBop.java | 2 +- - .../hive/ql/udf/generic/GenericUDAFEWAHBitmap.java | 2 +- - .../ql/udf/generic/GenericUDFEWAHBitmapAnd.java | 2 +- - .../ql/udf/generic/GenericUDFEWAHBitmapEmpty.java | 2 +- - .../ql/udf/generic/GenericUDFEWAHBitmapOr.java | 2 +- - 11 files changed, 20 insertions(+), 117 deletions(-) - -diff --git a/ql/build.xml b/ql/build.xml -index 64e7b59..95344fd 100644 ---- a/ql/build.xml -+++ b/ql/build.xml -@@ -197,7 +197,10 @@ - deprecation="${javac.deprecation}" - includeantruntime="false"> - -- -+ -+ -+ -+ - - - -@@ -208,108 +211,12 @@ - - - -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- - - - - - - -- -- -- -- -- -- -- -- -- -- -- -- -- - - -diff --git a/ql/ivy.xml b/ql/ivy.xml -index 08a8d6f..6fe43b4 100644 ---- a/ql/ivy.xml -+++ b/ql/ivy.xml -@@ -44,10 +44,10 @@ - - -- - -- -+ - - -diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java -index 7e1f6ef..e5178b6 100644 ---- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java -+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java -@@ -48,9 +48,9 @@ - import org.apache.hadoop.hive.ql.plan.api.StageType; - import org.apache.hadoop.io.IOUtils; - import org.apache.hadoop.util.StringUtils; --import org.json.JSONArray; --import org.json.JSONException; --import org.json.JSONObject; -+import org.codehaus.jettison.json.JSONArray; -+import org.codehaus.jettison.json.JSONException; -+import org.codehaus.jettison.json.JSONObject; - - /** - * ExplainTask implementation. -diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java -index 5beb48e..db0b623 100644 ---- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java -+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java -@@ -1129,9 +1129,7 @@ Object next(Object previous) throws IOException { - void seek(PositionProvider[] index) throws IOException { - super.seek(index); - for(TreeReader kid: fields) { -- if (kid != null) { -- kid.seek(index); -- } -+ kid.seek(index); - } - } - -@@ -1177,9 +1175,7 @@ void startStripe(Map streams, - void skipRows(long items) throws IOException { - items = countNonNulls(items); - for(TreeReader field: fields) { -- if (field != null) { -- field.skipRows(items); -- } -+ field.skipRows(items); - } - } - } -diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/SnappyCodec.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/SnappyCodec.java -index e3131a3..c8324ad 100644 ---- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/SnappyCodec.java -+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/SnappyCodec.java -@@ -18,7 +18,7 @@ - - package org.apache.hadoop.hive.ql.io.orc; - --import org.iq80.snappy.Snappy; -+import org.xerial.snappy.Snappy; - - import java.io.IOException; - import java.nio.ByteBuffer; -diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java -index 3031d1c..222f276 100644 ---- a/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java -+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java -@@ -47,9 +47,9 @@ - import org.apache.thrift.TException; - import org.apache.thrift.TSerializer; - import org.apache.thrift.protocol.TJSONProtocol; --import org.json.JSONArray; --import org.json.JSONException; --import org.json.JSONObject; -+import org.codehaus.jettison.json.JSONArray; -+import org.codehaus.jettison.json.JSONException; -+import org.codehaus.jettison.json.JSONObject; - - /** - * -diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/AbstractGenericUDFEWAHBitmapBop.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/AbstractGenericUDFEWAHBitmapBop.java -index 58ea3ba..198bab5 100644 ---- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/AbstractGenericUDFEWAHBitmapBop.java -+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/AbstractGenericUDFEWAHBitmapBop.java -@@ -22,7 +22,7 @@ - import java.util.ArrayList; - import java.util.List; - --import javaewah.EWAHCompressedBitmap; -+import com.googlecode.javaewah.EWAHCompressedBitmap; - - import org.apache.hadoop.hive.ql.exec.UDFArgumentException; - import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException; -diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFEWAHBitmap.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFEWAHBitmap.java -index e4b412e..bb8afe7 100644 ---- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFEWAHBitmap.java -+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFEWAHBitmap.java -@@ -20,7 +20,7 @@ - import java.io.IOException; - import java.util.ArrayList; - --import javaewah.EWAHCompressedBitmap; -+import com.googlecode.javaewah.EWAHCompressedBitmap; - - import org.apache.commons.logging.Log; - import org.apache.commons.logging.LogFactory; -diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEWAHBitmapAnd.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEWAHBitmapAnd.java -index 7838b54..b942988 100644 ---- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEWAHBitmapAnd.java -+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEWAHBitmapAnd.java -@@ -18,7 +18,7 @@ - - package org.apache.hadoop.hive.ql.udf.generic; - --import javaewah.EWAHCompressedBitmap; -+import com.googlecode.javaewah.EWAHCompressedBitmap; - - import org.apache.hadoop.hive.ql.exec.Description; - -diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEWAHBitmapEmpty.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEWAHBitmapEmpty.java -index 4a14a65..f0617c1 100644 ---- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEWAHBitmapEmpty.java -+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEWAHBitmapEmpty.java -@@ -21,7 +21,7 @@ - import java.io.IOException; - import java.util.ArrayList; - --import javaewah.EWAHCompressedBitmap; -+import com.googlecode.javaewah.EWAHCompressedBitmap; - - import org.apache.hadoop.hive.ql.exec.Description; - import org.apache.hadoop.hive.ql.exec.UDFArgumentException; -diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEWAHBitmapOr.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEWAHBitmapOr.java -index d438f82..47e9447 100644 ---- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEWAHBitmapOr.java -+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEWAHBitmapOr.java -@@ -18,7 +18,7 @@ - - package org.apache.hadoop.hive.ql.udf.generic; - --import javaewah.EWAHCompressedBitmap; -+import com.googlecode.javaewah.EWAHCompressedBitmap; - - import org.apache.hadoop.hive.ql.exec.Description; - --- -1.8.5.2 - diff --git a/hive-service-mods.patch b/hive-service-mods.patch deleted file mode 100644 index fe27c59..0000000 --- a/hive-service-mods.patch +++ /dev/null @@ -1,70 +0,0 @@ -From d689c943b09f1b88d9294a43691b3964093fe021 Mon Sep 17 00:00:00 2001 -From: Peter MacKinnon -Date: Wed, 8 Jan 2014 12:10:20 -0500 -Subject: [PATCH 05/10] service mods - ---- - .../apache/hive/service/cli/thrift/ThriftCLIService.java | 2 +- - .../hive/service/cli/thrift/ThriftHttpCLIService.java | 16 +++++++--------- - 2 files changed, 8 insertions(+), 10 deletions(-) - -diff --git a/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java b/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java -index 857e627..1098f08 100644 ---- a/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java -+++ b/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java -@@ -59,7 +59,7 @@ - protected int portNum; - protected InetSocketAddress serverAddress; - protected TServer server; -- protected org.mortbay.jetty.Server httpServer; -+ protected org.eclipse.jetty.server.Server httpServer; - - private boolean isStarted = false; - protected boolean isEmbedded = false; -diff --git a/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java b/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java -index e487a7f..b73df7d 100644 ---- a/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java -+++ b/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java -@@ -26,10 +26,10 @@ - import org.apache.thrift.protocol.TBinaryProtocol; - import org.apache.thrift.protocol.TProtocolFactory; - import org.apache.thrift.server.TServlet; --import org.mortbay.jetty.nio.SelectChannelConnector; --import org.mortbay.jetty.servlet.Context; --import org.mortbay.jetty.servlet.ServletHolder; --import org.mortbay.thread.QueuedThreadPool; -+import org.eclipse.jetty.server.ServerConnector; -+import org.eclipse.jetty.servlet.ServletContextHandler; -+import org.eclipse.jetty.servlet.ServletHolder; -+import org.eclipse.jetty.util.thread.QueuedThreadPool; - - - public class ThriftHttpCLIService extends ThriftCLIService { -@@ -75,13 +75,11 @@ public void run() { - } - } - -- httpServer = new org.mortbay.jetty.Server(); -- - QueuedThreadPool threadPool = new QueuedThreadPool(); - threadPool.setMinThreads(minWorkerThreads); - threadPool.setMaxThreads(maxWorkerThreads); -- httpServer.setThreadPool(threadPool); -- SelectChannelConnector connector = new SelectChannelConnector(); -+ httpServer = new org.eclipse.jetty.server.Server(threadPool); -+ ServerConnector connector = new ServerConnector(httpServer); - connector.setPort(portNum); - - // Linux:yes, Windows:no -@@ -93,7 +91,7 @@ public void run() { - - TProtocolFactory protocolFactory = new TBinaryProtocol.Factory(); - TServlet thriftHttpServlet = new ThriftHttpServlet(processor, protocolFactory); -- final Context context = new Context(httpServer, "/", Context.SESSIONS); -+ final ServletContextHandler context = new ServletContextHandler(httpServer, "/", ServletContextHandler.SESSIONS); - context.addServlet(new ServletHolder(thriftHttpServlet), httpPath); - - // TODO: check defaults: maxTimeout, keepalive, maxBodySize, bodyRecieveDuration, etc. --- -1.8.5.2 - diff --git a/hive-shims-mods.patch b/hive-shims-mods.patch deleted file mode 100644 index fa65824..0000000 --- a/hive-shims-mods.patch +++ /dev/null @@ -1,78 +0,0 @@ -From 43dfa35c041afb7071be97eb1ecdac034d9513d5 Mon Sep 17 00:00:00 2001 -From: Peter MacKinnon -Date: Wed, 8 Jan 2014 11:36:28 -0500 -Subject: [PATCH 02/10] shims mods - ---- - shims/ivy.xml | 6 ++++++ - .../0.23/java/org/apache/hadoop/hive/shims/Jetty23Shims.java | 12 ++++++------ - 2 files changed, 12 insertions(+), 6 deletions(-) - -diff --git a/shims/ivy.xml b/shims/ivy.xml -index c0312bc..39ca73b 100644 ---- a/shims/ivy.xml -+++ b/shims/ivy.xml -@@ -74,6 +74,9 @@ - - - -+ - -@@ -82,6 +85,9 @@ - - - -+ - -diff --git a/shims/src/0.23/java/org/apache/hadoop/hive/shims/Jetty23Shims.java b/shims/src/0.23/java/org/apache/hadoop/hive/shims/Jetty23Shims.java -index 9328749..c1b3529 100644 ---- a/shims/src/0.23/java/org/apache/hadoop/hive/shims/Jetty23Shims.java -+++ b/shims/src/0.23/java/org/apache/hadoop/hive/shims/Jetty23Shims.java -@@ -19,9 +19,9 @@ - - import java.io.IOException; - --import org.mortbay.jetty.bio.SocketConnector; --import org.mortbay.jetty.handler.RequestLogHandler; --import org.mortbay.jetty.webapp.WebAppContext; -+import org.eclipse.jetty.server.ServerConnector; -+import org.eclipse.jetty.server.handler.RequestLogHandler; -+import org.eclipse.jetty.webapp.WebAppContext; - - /** - * Jetty23Shims. -@@ -34,20 +34,20 @@ public Server startServer(String listen, int port) throws IOException { - return s; - } - -- private static class Server extends org.mortbay.jetty.Server implements JettyShims.Server { -+ private static class Server extends org.eclipse.jetty.server.Server implements JettyShims.Server { - public void addWar(String war, String contextPath) { - WebAppContext wac = new WebAppContext(); - wac.setContextPath(contextPath); - wac.setWar(war); - RequestLogHandler rlh = new RequestLogHandler(); - rlh.setHandler(wac); -- this.addHandler(rlh); -+ this.setHandler(rlh); - } - - public void setupListenerHostPort(String listen, int port) - throws IOException { - -- SocketConnector connector = new SocketConnector(); -+ ServerConnector connector = new ServerConnector(this); - connector.setPort(port); - connector.setHost(listen); - this.addConnector(connector); --- -1.8.5.2 - diff --git a/hive.spec b/hive.spec index 6310bc5..2847787 100644 --- a/hive.spec +++ b/hive.spec @@ -1,4 +1,4 @@ -%global hadoop_version 2.2.0 +%global hadoop_version 2.4.1 %global hadoop_generation 23 %global pig_version 0.12.0 %global jetty_version 8.1.14.v20131031 @@ -6,22 +6,14 @@ Name: hive Version: 0.12.0 -Release: 4%{?dist} +Release: 5%{?dist} Summary: The Apache Hadoop data warehouse License: ASL 2.0 URL: http://hive.apache.org/ Source0: https://github.com/apache/%{name}/archive/release-%{version}.tar.gz -Patch0: %{name}-general-build-mods.patch -# following patches are organized per module -Patch1: %{name}-shims-mods.patch -Patch2: %{name}-metastore-mods.patch -Patch3: %{name}-ql-mods.patch -Patch4: %{name}-service-mods.patch -Patch5: %{name}-hcatalog-mods.patch -Patch6: %{name}-model-enhancer-asm.patch -Patch7: %{name}-hwi-mods.patch +Patch0: %{name}-integ.patch BuildRequires: activemq-core BuildRequires: activemq-kahadb @@ -45,6 +37,7 @@ BuildRequires: jetty8 BuildRequires: json-lib BuildRequires: json_simple BuildRequires: libthrift-java +BuildRequires: make BuildRequires: maven-clean-plugin BuildRequires: maven-install-plugin BuildRequires: maven-local >= 3.5.0-2 @@ -82,7 +75,6 @@ Requires: glassfish-el-api Requires: guava Requires: hadoop-common Requires: hadoop-mapreduce -Requires: hbase Requires: jackson Requires: javaewah Requires: javolution @@ -127,13 +119,6 @@ This package contains javadoc for %{name}. %setup -q -n %{name}-release-%{version} %patch0 -p1 -%patch1 -p1 -%patch2 -p1 -%patch3 -p1 -%patch4 -p1 -%patch5 -p1 -%patch6 -p1 -%patch7 -p1 find -name "*.jar" -delete @@ -143,7 +128,6 @@ sed -i 's/\r//' LICENSE NOTICE README.txt sed -i "/ 0.12.0-5 +- collapse separate patches into single integ +- xmvn2 updates + * Sat Jun 07 2014 Fedora Release Engineering - 0.12.0-4 - Rebuilt for https://fedoraproject.org/wiki/Fedora_21_Mass_Rebuild