java - Dependency conflict in integrating with Cloudera Hbase 1.0.0 -


i tried connect play framework (2.4.2) web application cloudera hbase cluster. included hbase dependencies in bulid.sbt file , used hbase sample code insert cell table. however, got exception seems dependency conflict between play framework , hbase. attached sample code , build.sbt files well. grateful resolve error.

    [error] [07/21/2015 12:03:05.919] [application-akka.actor.default-dispatcher-5] [actorsystem(application)] uncaught fatal error thread [application-akka.actor.default-dispatcher-5] shutting down actorsystem [application]     java.lang.illegalaccesserror: tried access method com.google.common.base.stopwatch.<init>()v class org.apache.hadoop.hbase.zookeeper.metatablelocator         @ org.apache.hadoop.hbase.zookeeper.metatablelocator.blockuntilavailable(metatablelocator.java:434)         @ org.apache.hadoop.hbase.client.zookeeperregistry.getmetaregionlocation(zookeeperregistry.java:60)         @ org.apache.hadoop.hbase.client.connectionmanager$hconnectionimplementation.locateregion(connectionmanager.java:1123)         @ org.apache.hadoop.hbase.client.connectionmanager$hconnectionimplementation.locateregion(connectionmanager.java:1110)         @ org.apache.hadoop.hbase.client.connectionmanager$hconnectionimplementation.locateregioninmeta(connectionmanager.java:1262)         @ org.apache.hadoop.hbase.client.connectionmanager$hconnectionimplementation.locateregion(connectionmanager.java:1126)         @ org.apache.hadoop.hbase.client.asyncprocess.submit(asyncprocess.java:369)         @ org.apache.hadoop.hbase.client.asyncprocess.submit(asyncprocess.java:320)         @ org.apache.hadoop.hbase.client.bufferedmutatorimpl.backgroundflushcommits(bufferedmutatorimpl.java:206)         @ org.apache.hadoop.hbase.client.bufferedmutatorimpl.flush(bufferedmutatorimpl.java:183)         @ org.apache.hadoop.hbase.client.htable.flushcommits(htable.java:1496)         @ org.apache.hadoop.hbase.client.htable.put(htable.java:1107)         @ controllers.application.index(application.java:44)         @ router.routes$$anonfun$routes$1$$anonfun$applyorelse$1$$anonfun$apply$1.apply(routes.scala:95)         @ router.routes$$anonfun$routes$1$$anonfun$applyorelse$1$$anonfun$apply$1.apply(routes.scala:95)         @ play.core.routing.handlerinvokerfactory$$anon$4.resultcall(handlerinvoker.scala:136)         @ play.core.routing.handlerinvokerfactory$javaactioninvokerfactory$$anon$14$$anon$3$$anon$1.invocation(handlerinvoker.scala:127)         @ play.core.j.javaaction$$anon$1.call(javaaction.scala:70)         @ play.http.defaulthttprequesthandler$1.call(defaulthttprequesthandler.java:20)         @ play.core.j.javaaction$$anonfun$7.apply(javaaction.scala:94)         @ play.core.j.javaaction$$anonfun$7.apply(javaaction.scala:94)         @ scala.concurrent.impl.future$promisecompletingrunnable.liftedtree1$1(future.scala:24)         @ scala.concurrent.impl.future$promisecompletingrunnable.run(future.scala:24)         @ play.core.j.httpexecutioncontext$$anon$2.run(httpexecutioncontext.scala:40)         @ play.api.libs.iteratee.execution$trampoline$.execute(execution.scala:70)         @ play.core.j.httpexecutioncontext.execute(httpexecutioncontext.scala:32)         @ scala.concurrent.impl.future$.apply(future.scala:31)         @ scala.concurrent.future$.apply(future.scala:492)         @ play.core.j.javaaction.apply(javaaction.scala:94)         @ play.api.mvc.action$$anonfun$apply$1$$anonfun$apply$4$$anonfun$apply$5.apply(action.scala:105)         @ play.api.mvc.action$$anonfun$apply$1$$anonfun$apply$4$$anonfun$apply$5.apply(action.scala:105)         @ play.utils.threads$.withcontextclassloader(threads.scala:21)         @ play.api.mvc.action$$anonfun$apply$1$$anonfun$apply$4.apply(action.scala:104)         @ play.api.mvc.action$$anonfun$apply$1$$anonfun$apply$4.apply(action.scala:103)         @ scala.option.map(option.scala:146)         @ play.api.mvc.action$$anonfun$apply$1.apply(action.scala:103)         @ play.api.mvc.action$$anonfun$apply$1.apply(action.scala:96)         @ play.api.libs.iteratee.iteratee$$anonfun$mapm$1.apply(iteratee.scala:524)         @ play.api.libs.iteratee.iteratee$$anonfun$mapm$1.apply(iteratee.scala:524)         @ play.api.libs.iteratee.iteratee$$anonfun$flatmapm$1.apply(iteratee.scala:560)         @ play.api.libs.iteratee.iteratee$$anonfun$flatmapm$1.apply(iteratee.scala:560)         @ play.api.libs.iteratee.iteratee$$anonfun$flatmap$1$$anonfun$apply$13.apply(iteratee.scala:536)         @ play.api.libs.iteratee.iteratee$$anonfun$flatmap$1$$anonfun$apply$13.apply(iteratee.scala:536)         @ scala.concurrent.impl.future$promisecompletingrunnable.liftedtree1$1(future.scala:24)         @ scala.concurrent.impl.future$promisecompletingrunnable.run(future.scala:24)         @ akka.dispatch.taskinvocation.run(abstractdispatcher.scala:40)         @ akka.dispatch.forkjoinexecutorconfigurator$akkaforkjointask.exec(abstractdispatcher.scala:397)         @ scala.concurrent.forkjoin.forkjointask.doexec(forkjointask.java:260)         @ scala.concurrent.forkjoin.forkjoinpool$workqueue.runtask(forkjoinpool.java:1339)         @ scala.concurrent.forkjoin.forkjoinpool.runworker(forkjoinpool.java:1979)         @ scala.concurrent.forkjoin.forkjoinworkerthread.run(forkjoinworkerthread.java:107) 

this bulid.sbt file:

name := """hbasetest"""  version := "1.0-snapshot"  lazy val root = (project in file(".")).enableplugins(playjava)  scalaversion := "2.11.6"  librarydependencies ++= seq(   javajdbc,   cache,   javaws ) //hbase librarydependencies +="org.apache.hbase" % "hbase-client" % "1.0.0-cdh5.4.4"  librarydependencies +="org.apache.hbase" % "hbase-annotations" % "1.0.0-cdh5.4.4"  librarydependencies +="org.apache.hbase" % "hbase-common" % "1.0.0-cdh5.4.4" librarydependencies +="org.apache.hbase" % "hbase-protocol" % "1.0.0-cdh5.4.4"  //hadoop librarydependencies +="org.apache.hadoop" % "hadoop-common"%"2.6.0-cdh5.4.4"  librarydependencies +="org.apache.hadoop" % "hadoop-annotations"%"2.6.0-cdh5.4.4"  librarydependencies +="org.apache.hadoop" % "hadoop-auth"%"2.6.0-cdh5.4.4" // play provides 2 styles of routers, 1 expects actions injected, // other, legacy style, accesses actions statically. routesgenerator := injectedroutesgenerator 

this code:

package controllers;  import play.*; import play.mvc.*; import views.html.*;  import java.io.ioexception; import java.util.hashmap;  import org.apache.hadoop.conf.configuration; import org.apache.hadoop.hbase.hbaseconfiguration; import org.apache.hadoop.hbase.hcolumndescriptor; import org.apache.hadoop.hbase.htabledescriptor; import org.apache.hadoop.hbase.tablename; import org.apache.hadoop.hbase.client.connection; import org.apache.hadoop.hbase.client.connectionfactory; import org.apache.hadoop.hbase.client.hbaseadmin; import org.apache.hadoop.hbase.client.htable; import org.apache.hadoop.hbase.client.put; import org.apache.hadoop.hbase.io.compress.compression.algorithm; import org.apache.hadoop.hbase.util.bytes; public class application extends controller {      public result index() {                  string zookeeperip = "10.12.7.43";           string zookeeperport = "2181";           string hbasemaster = "10.12.7.43:60000";           configuration hbaseconfig;           connection  connection = null;           //tablename table_name = "sample";           hbaseconfig =  hbaseconfiguration.create();             hbaseconfig.set("hbase.zookeeper.quorum",zookeeperip);             hbaseconfig.set("hbase.zookeeper.property.clientport", zookeeperport);             hbaseconfig.set("hbase.master", hbasemaster);               //connection = connectionfactory.createconnection(hbaseconfig);              try {                 connection = connectionfactory.createconnection(hbaseconfig);                 htable table = new htable(hbaseconfig, "sample");                 put p = new put(bytes.tobytes("1"));                 p.add(bytes.tobytes("a"), bytes.tobytes("b"), bytes.tobytes("4"));                 table.put(p);             }catch (exception e) {                 e.printstacktrace();                 system.out.println(e.getmessage());                      }         return ok(index.render("your new application ready."));     }  } 

as can see, trouble dependencies.
guava library (which common problem hadoop).
play uses newer version of guava can see. doesn't have stopwatch class hbase requires.

you approach problem in multiple ways (all of them know 'hacky' unfortunately).

easy way use hack zipkin. add stopwatch ourselves.

another way somehow separate hbase operations. (which require lot of work , design changes)

it have been easier if sbt supported 'shading', know doesn't yet.
still workaround using sbt effort how spark deals similar problem.


Comments

Popular posts from this blog

c - Bitwise operation with (signed) enum value -

xslt - Unnest parent nodes by child node -

YouTubePlayerFragment cannot be cast to android.support.v4.app.Fragment -