Spark reading from Hbase throws java.lang.NoSuchMethodError: org.json4s.jackson.JsonMethods

classic Classic list List threaded Threaded
9 messages Options
Reply | Threaded
Open this post in threaded view
|

Spark reading from Hbase throws java.lang.NoSuchMethodError: org.json4s.jackson.JsonMethods

Mich Talebzadeh
Hi,

Spark version 2.4.3
Hbase 1.2.7

Data is stored in Hbase as Json. example of a row shown below
image.png
I am trying to read this table in Spark Scala

import org.apache.spark.sql.{SQLContext, _}
import org.apache.spark.sql.execution.datasources.hbase._
import org.apache.spark.{SparkConf, SparkContext}
import spark.sqlContext.implicits._
import org.json4s._
import org.json4s.jackson.JsonMethods._
import org.json4s.jackson.Serialization.{read => JsonRead}
import org.json4s.jackson.Serialization.{read, write}
def catalog = s"""{
     | "table":{"namespace":"trading", "name":"MARKETDATAHBASEBATCH",
     | "rowkey":"key",
     | "columns":{
     | "rowkey":{"cf":"rowkey", "col":"key", "type":"string"},
     |     |"ticker":{"cf":"PRICE_INFO", "col":"ticker", "type":"string"},
     |     |"timeissued":{"cf":"PRICE_INFO", "col":"timeissued", "type":"string"},
     |     |"price":{"cf":"PRICE_INFO", "col":"price", "type":"string"}
     |     |}
     | |}""".stripMargin
def withCatalog(cat: String): DataFrame = {
           spark.sqlContext
           .read
           .options(Map(HBaseTableCatalog.tableCatalog->cat))
           .format("org.apache.spark.sql.execution.datasources.hbase")
           .load()
        }
val df = withCatalog(catalog)


However, I am getting this error

Spark session available as 'spark'.
java.lang.NoSuchMethodError: org.json4s.jackson.JsonMethods$.parse(Lorg/json4s/JsonInput;Z)Lorg/json4s/JsonAST$JValue;
  at org.apache.spark.sql.execution.datasources.hbase.HBaseTableCatalog$.apply(HBaseTableCatalog.scala:257)
  at org.apache.spark.sql.execution.datasources.hbase.HBaseRelation.<init>(HBaseRelation.scala:80)
  at org.apache.spark.sql.execution.datasources.hbase.DefaultSource.createRelation(HBaseRelation.scala:51)
  at org.apache.spark.sql.execution.datasources.DataSource.resolveRelation(DataSource.scala:318)
  at org.apache.spark.sql.DataFrameReader.loadV1Source(DataFrameReader.scala:223)
  at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:211)
  at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:167)
  at withCatalog(testme.scala:49)
  ... 65 elided

I have Googled it but with little luck!

Thanks,
Mich

http://talebzadehmich.wordpress.com


Disclaimer: Use it at your own risk. Any and all responsibility for any loss, damage or destruction of data or any other property which may arise from relying on this email's technical content is explicitly disclaimed. The author will in no case be liable for any monetary damages arising from such loss, damage or destruction.

 

Reply | Threaded
Open this post in threaded view
|

Re: Spark reading from Hbase throws java.lang.NoSuchMethodError: org.json4s.jackson.JsonMethods

muthu
I suspect the spark job is somehow having an incorrect (newer) version of json4s in the classpath. json4s 3.5.3 is the utmost version that can be used.

Thanks,
Muthu

On Mon, Feb 17, 2020, 06:43 Mich Talebzadeh <[hidden email]> wrote:
Hi,

Spark version 2.4.3
Hbase 1.2.7

Data is stored in Hbase as Json. example of a row shown below
image.png
I am trying to read this table in Spark Scala

import org.apache.spark.sql.{SQLContext, _}
import org.apache.spark.sql.execution.datasources.hbase._
import org.apache.spark.{SparkConf, SparkContext}
import spark.sqlContext.implicits._
import org.json4s._
import org.json4s.jackson.JsonMethods._
import org.json4s.jackson.Serialization.{read => JsonRead}
import org.json4s.jackson.Serialization.{read, write}
def catalog = s"""{
     | "table":{"namespace":"trading", "name":"MARKETDATAHBASEBATCH",
     | "rowkey":"key",
     | "columns":{
     | "rowkey":{"cf":"rowkey", "col":"key", "type":"string"},
     |     |"ticker":{"cf":"PRICE_INFO", "col":"ticker", "type":"string"},
     |     |"timeissued":{"cf":"PRICE_INFO", "col":"timeissued", "type":"string"},
     |     |"price":{"cf":"PRICE_INFO", "col":"price", "type":"string"}
     |     |}
     | |}""".stripMargin
def withCatalog(cat: String): DataFrame = {
           spark.sqlContext
           .read
           .options(Map(HBaseTableCatalog.tableCatalog->cat))
           .format("org.apache.spark.sql.execution.datasources.hbase")
           .load()
        }
val df = withCatalog(catalog)


However, I am getting this error

Spark session available as 'spark'.
java.lang.NoSuchMethodError: org.json4s.jackson.JsonMethods$.parse(Lorg/json4s/JsonInput;Z)Lorg/json4s/JsonAST$JValue;
  at org.apache.spark.sql.execution.datasources.hbase.HBaseTableCatalog$.apply(HBaseTableCatalog.scala:257)
  at org.apache.spark.sql.execution.datasources.hbase.HBaseRelation.<init>(HBaseRelation.scala:80)
  at org.apache.spark.sql.execution.datasources.hbase.DefaultSource.createRelation(HBaseRelation.scala:51)
  at org.apache.spark.sql.execution.datasources.DataSource.resolveRelation(DataSource.scala:318)
  at org.apache.spark.sql.DataFrameReader.loadV1Source(DataFrameReader.scala:223)
  at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:211)
  at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:167)
  at withCatalog(testme.scala:49)
  ... 65 elided

I have Googled it but with little luck!

Thanks,
Mich

http://talebzadehmich.wordpress.com


Disclaimer: Use it at your own risk. Any and all responsibility for any loss, damage or destruction of data or any other property which may arise from relying on this email's technical content is explicitly disclaimed. The author will in no case be liable for any monetary damages arising from such loss, damage or destruction.

 

Reply | Threaded
Open this post in threaded view
|

Re: Spark reading from Hbase throws java.lang.NoSuchMethodError: org.json4s.jackson.JsonMethods

Mich Talebzadeh
Thanks Muthu,


I am using the following jar files for now in local mode i.e.  spark-shell_local --jars …..

json4s-jackson_2.10-3.2.10.jar
json4s_2.11-3.2.11.jar
json4s-native_2.10-3.4.0.jar

Which one is the incorrect one please/

Regards,

Mich



Disclaimer: Use it at your own risk. Any and all responsibility for any loss, damage or destruction of data or any other property which may arise from relying on this email's technical content is explicitly disclaimed. The author will in no case be liable for any monetary damages arising from such loss, damage or destruction.

 



On Mon, 17 Feb 2020 at 20:28, Muthu Jayakumar <[hidden email]> wrote:
I suspect the spark job is somehow having an incorrect (newer) version of json4s in the classpath. json4s 3.5.3 is the utmost version that can be used.

Thanks,
Muthu

On Mon, Feb 17, 2020, 06:43 Mich Talebzadeh <[hidden email]> wrote:
Hi,

Spark version 2.4.3
Hbase 1.2.7

Data is stored in Hbase as Json. example of a row shown below
image.png
I am trying to read this table in Spark Scala

import org.apache.spark.sql.{SQLContext, _}
import org.apache.spark.sql.execution.datasources.hbase._
import org.apache.spark.{SparkConf, SparkContext}
import spark.sqlContext.implicits._
import org.json4s._
import org.json4s.jackson.JsonMethods._
import org.json4s.jackson.Serialization.{read => JsonRead}
import org.json4s.jackson.Serialization.{read, write}
def catalog = s"""{
     | "table":{"namespace":"trading", "name":"MARKETDATAHBASEBATCH",
     | "rowkey":"key",
     | "columns":{
     | "rowkey":{"cf":"rowkey", "col":"key", "type":"string"},
     |     |"ticker":{"cf":"PRICE_INFO", "col":"ticker", "type":"string"},
     |     |"timeissued":{"cf":"PRICE_INFO", "col":"timeissued", "type":"string"},
     |     |"price":{"cf":"PRICE_INFO", "col":"price", "type":"string"}
     |     |}
     | |}""".stripMargin
def withCatalog(cat: String): DataFrame = {
           spark.sqlContext
           .read
           .options(Map(HBaseTableCatalog.tableCatalog->cat))
           .format("org.apache.spark.sql.execution.datasources.hbase")
           .load()
        }
val df = withCatalog(catalog)


However, I am getting this error

Spark session available as 'spark'.
java.lang.NoSuchMethodError: org.json4s.jackson.JsonMethods$.parse(Lorg/json4s/JsonInput;Z)Lorg/json4s/JsonAST$JValue;
  at org.apache.spark.sql.execution.datasources.hbase.HBaseTableCatalog$.apply(HBaseTableCatalog.scala:257)
  at org.apache.spark.sql.execution.datasources.hbase.HBaseRelation.<init>(HBaseRelation.scala:80)
  at org.apache.spark.sql.execution.datasources.hbase.DefaultSource.createRelation(HBaseRelation.scala:51)
  at org.apache.spark.sql.execution.datasources.DataSource.resolveRelation(DataSource.scala:318)
  at org.apache.spark.sql.DataFrameReader.loadV1Source(DataFrameReader.scala:223)
  at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:211)
  at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:167)
  at withCatalog(testme.scala:49)
  ... 65 elided

I have Googled it but with little luck!

Thanks,
Mich

http://talebzadehmich.wordpress.com


Disclaimer: Use it at your own risk. Any and all responsibility for any loss, damage or destruction of data or any other property which may arise from relying on this email's technical content is explicitly disclaimed. The author will in no case be liable for any monetary damages arising from such loss, damage or destruction.

 

Reply | Threaded
Open this post in threaded view
|

Re: Spark reading from Hbase throws java.lang.NoSuchMethodError: org.json4s.jackson.JsonMethods

muthu
Hello Mich,

Thank you for the mail. From, what I can understand from json4s history, spark and the versions you have...
1. Apache Spark 2.4.3 uses json4s 3.5.3 (to be specific it uses json4s-jackson)
2. json4s 3.2.11 and 3.2.10 is not compatible (ref: https://github.com/json4s/json4s/issues/212)
3. I notice that you are using scala 2.10 and scala 2.11 versions on jars. I believe spark 2.4.3 supports scala 2.11 or 2.12 only.

I would suggest using json4s-jackson, json4s and json4s-native be in version 3.5.3 (for scala 2.11 or 2.12 depending on your spark version). In case, if you want to use older version, make sure all of them are older than 3.2.11 at the least.

Hope it helps.

Thanks,
Muthu


On Mon, Feb 17, 2020 at 1:15 PM Mich Talebzadeh <[hidden email]> wrote:
Thanks Muthu,


I am using the following jar files for now in local mode i.e.  spark-shell_local --jars …..

json4s-jackson_2.10-3.2.10.jar
json4s_2.11-3.2.11.jar
json4s-native_2.10-3.4.0.jar

Which one is the incorrect one please/

Regards,

Mich



Disclaimer: Use it at your own risk. Any and all responsibility for any loss, damage or destruction of data or any other property which may arise from relying on this email's technical content is explicitly disclaimed. The author will in no case be liable for any monetary damages arising from such loss, damage or destruction.

 



On Mon, 17 Feb 2020 at 20:28, Muthu Jayakumar <[hidden email]> wrote:
I suspect the spark job is somehow having an incorrect (newer) version of json4s in the classpath. json4s 3.5.3 is the utmost version that can be used.

Thanks,
Muthu

On Mon, Feb 17, 2020, 06:43 Mich Talebzadeh <[hidden email]> wrote:
Hi,

Spark version 2.4.3
Hbase 1.2.7

Data is stored in Hbase as Json. example of a row shown below
image.png
I am trying to read this table in Spark Scala

import org.apache.spark.sql.{SQLContext, _}
import org.apache.spark.sql.execution.datasources.hbase._
import org.apache.spark.{SparkConf, SparkContext}
import spark.sqlContext.implicits._
import org.json4s._
import org.json4s.jackson.JsonMethods._
import org.json4s.jackson.Serialization.{read => JsonRead}
import org.json4s.jackson.Serialization.{read, write}
def catalog = s"""{
     | "table":{"namespace":"trading", "name":"MARKETDATAHBASEBATCH",
     | "rowkey":"key",
     | "columns":{
     | "rowkey":{"cf":"rowkey", "col":"key", "type":"string"},
     |     |"ticker":{"cf":"PRICE_INFO", "col":"ticker", "type":"string"},
     |     |"timeissued":{"cf":"PRICE_INFO", "col":"timeissued", "type":"string"},
     |     |"price":{"cf":"PRICE_INFO", "col":"price", "type":"string"}
     |     |}
     | |}""".stripMargin
def withCatalog(cat: String): DataFrame = {
           spark.sqlContext
           .read
           .options(Map(HBaseTableCatalog.tableCatalog->cat))
           .format("org.apache.spark.sql.execution.datasources.hbase")
           .load()
        }
val df = withCatalog(catalog)


However, I am getting this error

Spark session available as 'spark'.
java.lang.NoSuchMethodError: org.json4s.jackson.JsonMethods$.parse(Lorg/json4s/JsonInput;Z)Lorg/json4s/JsonAST$JValue;
  at org.apache.spark.sql.execution.datasources.hbase.HBaseTableCatalog$.apply(HBaseTableCatalog.scala:257)
  at org.apache.spark.sql.execution.datasources.hbase.HBaseRelation.<init>(HBaseRelation.scala:80)
  at org.apache.spark.sql.execution.datasources.hbase.DefaultSource.createRelation(HBaseRelation.scala:51)
  at org.apache.spark.sql.execution.datasources.DataSource.resolveRelation(DataSource.scala:318)
  at org.apache.spark.sql.DataFrameReader.loadV1Source(DataFrameReader.scala:223)
  at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:211)
  at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:167)
  at withCatalog(testme.scala:49)
  ... 65 elided

I have Googled it but with little luck!

Thanks,
Mich

http://talebzadehmich.wordpress.com


Disclaimer: Use it at your own risk. Any and all responsibility for any loss, damage or destruction of data or any other property which may arise from relying on this email's technical content is explicitly disclaimed. The author will in no case be liable for any monetary damages arising from such loss, damage or destruction.

 

Reply | Threaded
Open this post in threaded view
|

Re: Spark reading from Hbase throws java.lang.NoSuchMethodError: org.json4s.jackson.JsonMethods

Jörn Franke
In reply to this post by Mich Talebzadeh
Is there a reason why different Scala (it seems at least 2.10/2.11) versions are mixed? This never works.
Do you include by accident a dependency to with an old Scala version? Ie the Hbase datasource maybe?


Am 17.02.2020 um 22:15 schrieb Mich Talebzadeh <[hidden email]>:


Thanks Muthu,


I am using the following jar files for now in local mode i.e.  spark-shell_local --jars …..

json4s-jackson_2.10-3.2.10.jar
json4s_2.11-3.2.11.jar
json4s-native_2.10-3.4.0.jar

Which one is the incorrect one please/

Regards,

Mich



Disclaimer: Use it at your own risk. Any and all responsibility for any loss, damage or destruction of data or any other property which may arise from relying on this email's technical content is explicitly disclaimed. The author will in no case be liable for any monetary damages arising from such loss, damage or destruction.

 



On Mon, 17 Feb 2020 at 20:28, Muthu Jayakumar <[hidden email]> wrote:
I suspect the spark job is somehow having an incorrect (newer) version of json4s in the classpath. json4s 3.5.3 is the utmost version that can be used.

Thanks,
Muthu

On Mon, Feb 17, 2020, 06:43 Mich Talebzadeh <[hidden email]> wrote:
Hi,

Spark version 2.4.3
Hbase 1.2.7

Data is stored in Hbase as Json. example of a row shown below
<image.png>

I am trying to read this table in Spark Scala

import org.apache.spark.sql.{SQLContext, _}
import org.apache.spark.sql.execution.datasources.hbase._
import org.apache.spark.{SparkConf, SparkContext}
import spark.sqlContext.implicits._
import org.json4s._
import org.json4s.jackson.JsonMethods._
import org.json4s.jackson.Serialization.{read => JsonRead}
import org.json4s.jackson.Serialization.{read, write}
def catalog = s"""{
     | "table":{"namespace":"trading", "name":"MARKETDATAHBASEBATCH",
     | "rowkey":"key",
     | "columns":{
     | "rowkey":{"cf":"rowkey", "col":"key", "type":"string"},
     |     |"ticker":{"cf":"PRICE_INFO", "col":"ticker", "type":"string"},
     |     |"timeissued":{"cf":"PRICE_INFO", "col":"timeissued", "type":"string"},
     |     |"price":{"cf":"PRICE_INFO", "col":"price", "type":"string"}
     |     |}
     | |}""".stripMargin
def withCatalog(cat: String): DataFrame = {
           spark.sqlContext
           .read
           .options(Map(HBaseTableCatalog.tableCatalog->cat))
           .format("org.apache.spark.sql.execution.datasources.hbase")
           .load()
        }
val df = withCatalog(catalog)


However, I am getting this error

Spark session available as 'spark'.
java.lang.NoSuchMethodError: org.json4s.jackson.JsonMethods$.parse(Lorg/json4s/JsonInput;Z)Lorg/json4s/JsonAST$JValue;
  at org.apache.spark.sql.execution.datasources.hbase.HBaseTableCatalog$.apply(HBaseTableCatalog.scala:257)
  at org.apache.spark.sql.execution.datasources.hbase.HBaseRelation.<init>(HBaseRelation.scala:80)
  at org.apache.spark.sql.execution.datasources.hbase.DefaultSource.createRelation(HBaseRelation.scala:51)
  at org.apache.spark.sql.execution.datasources.DataSource.resolveRelation(DataSource.scala:318)
  at org.apache.spark.sql.DataFrameReader.loadV1Source(DataFrameReader.scala:223)
  at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:211)
  at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:167)
  at withCatalog(testme.scala:49)
  ... 65 elided

I have Googled it but with little luck!

Thanks,
Mich

http://talebzadehmich.wordpress.com


Disclaimer: Use it at your own risk. Any and all responsibility for any loss, damage or destruction of data or any other property which may arise from relying on this email's technical content is explicitly disclaimed. The author will in no case be liable for any monetary damages arising from such loss, damage or destruction.

 

Reply | Threaded
Open this post in threaded view
|

Re: Spark reading from Hbase throws java.lang.NoSuchMethodError: org.json4s.jackson.JsonMethods

Mich Talebzadeh

Dr Mich Talebzadeh

 

LinkedIn  https://www.linkedin.com/profile/view?id=AAEAAAAWh2gBxianrbJd6zP6AcPCCdOABUrV8Pw

 

http://talebzadehmich.wordpress.com


Disclaimer: Use it at your own risk. Any and all responsibility for any loss, damage or destruction of data or any other property which may arise from relying on this email's technical content is explicitly disclaimed. The author will in no case be liable for any monetary damages arising from such loss, damage or destruction.

 

Many thanks both.

Let me check and confirm. 

regards,

Mich


On Mon, 17 Feb 2020 at 21:33, Jörn Franke <[hidden email]> wrote:
Is there a reason why different Scala (it seems at least 2.10/2.11) versions are mixed? This never works.
Do you include by accident a dependency to with an old Scala version? Ie the Hbase datasource maybe?


Am 17.02.2020 um 22:15 schrieb Mich Talebzadeh <[hidden email]>:


Thanks Muthu,


I am using the following jar files for now in local mode i.e.  spark-shell_local --jars …..

json4s-jackson_2.10-3.2.10.jar
json4s_2.11-3.2.11.jar
json4s-native_2.10-3.4.0.jar

Which one is the incorrect one please/

Regards,

Mich



Disclaimer: Use it at your own risk. Any and all responsibility for any loss, damage or destruction of data or any other property which may arise from relying on this email's technical content is explicitly disclaimed. The author will in no case be liable for any monetary damages arising from such loss, damage or destruction.

 



On Mon, 17 Feb 2020 at 20:28, Muthu Jayakumar <[hidden email]> wrote:
I suspect the spark job is somehow having an incorrect (newer) version of json4s in the classpath. json4s 3.5.3 is the utmost version that can be used.

Thanks,
Muthu

On Mon, Feb 17, 2020, 06:43 Mich Talebzadeh <[hidden email]> wrote:
Hi,

Spark version 2.4.3
Hbase 1.2.7

Data is stored in Hbase as Json. example of a row shown below
<image.png>

I am trying to read this table in Spark Scala

import org.apache.spark.sql.{SQLContext, _}
import org.apache.spark.sql.execution.datasources.hbase._
import org.apache.spark.{SparkConf, SparkContext}
import spark.sqlContext.implicits._
import org.json4s._
import org.json4s.jackson.JsonMethods._
import org.json4s.jackson.Serialization.{read => JsonRead}
import org.json4s.jackson.Serialization.{read, write}
def catalog = s"""{
     | "table":{"namespace":"trading", "name":"MARKETDATAHBASEBATCH",
     | "rowkey":"key",
     | "columns":{
     | "rowkey":{"cf":"rowkey", "col":"key", "type":"string"},
     |     |"ticker":{"cf":"PRICE_INFO", "col":"ticker", "type":"string"},
     |     |"timeissued":{"cf":"PRICE_INFO", "col":"timeissued", "type":"string"},
     |     |"price":{"cf":"PRICE_INFO", "col":"price", "type":"string"}
     |     |}
     | |}""".stripMargin
def withCatalog(cat: String): DataFrame = {
           spark.sqlContext
           .read
           .options(Map(HBaseTableCatalog.tableCatalog->cat))
           .format("org.apache.spark.sql.execution.datasources.hbase")
           .load()
        }
val df = withCatalog(catalog)


However, I am getting this error

Spark session available as 'spark'.
java.lang.NoSuchMethodError: org.json4s.jackson.JsonMethods$.parse(Lorg/json4s/JsonInput;Z)Lorg/json4s/JsonAST$JValue;
  at org.apache.spark.sql.execution.datasources.hbase.HBaseTableCatalog$.apply(HBaseTableCatalog.scala:257)
  at org.apache.spark.sql.execution.datasources.hbase.HBaseRelation.<init>(HBaseRelation.scala:80)
  at org.apache.spark.sql.execution.datasources.hbase.DefaultSource.createRelation(HBaseRelation.scala:51)
  at org.apache.spark.sql.execution.datasources.DataSource.resolveRelation(DataSource.scala:318)
  at org.apache.spark.sql.DataFrameReader.loadV1Source(DataFrameReader.scala:223)
  at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:211)
  at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:167)
  at withCatalog(testme.scala:49)
  ... 65 elided

I have Googled it but with little luck!

Thanks,
Mich

http://talebzadehmich.wordpress.com


Disclaimer: Use it at your own risk. Any and all responsibility for any loss, damage or destruction of data or any other property which may arise from relying on this email's technical content is explicitly disclaimed. The author will in no case be liable for any monetary damages arising from such loss, damage or destruction.

 

Reply | Threaded
Open this post in threaded view
|

Re: Spark reading from Hbase throws java.lang.NoSuchMethodError: org.json4s.jackson.JsonMethods

Mich Talebzadeh
I stripped everything from the jar list. This is all I have

sspark-shell --jars shc-core-1.1.1-2.1-s_2.11.jar, \
              json4s-native_2.11-3.5.3.jar, \
              json4s-jackson_2.11-3.5.3.jar, \
              hbase-client-1.2.3.jar, \
              hbase-common-1.2.3.jar

Now I still get the same error!

scala> val df = withCatalog(catalog)
java.lang.NoSuchMethodError: org.json4s.jackson.JsonMethods$.parse(Lorg/json4s/JsonInput;Z)Lorg/json4s/JsonAST$JValue;
  at org.apache.spark.sql.execution.datasources.hbase.HBaseTableCatalog$.apply(HBaseTableCatalog.scala:257)
  at org.apache.spark.sql.execution.datasources.hbase.HBaseRelation.<init>(HBaseRelation.scala:80)
  at org.apache.spark.sql.execution.datasources.hbase.DefaultSource.createRelation(HBaseRelation.scala:51)
  at org.apache.spark.sql.execution.datasources.DataSource.resolveRelation(DataSource.scala:318)
  at org.apache.spark.sql.DataFrameReader.loadV1Source(DataFrameReader.scala:223)
  at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:211)
  at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:167)
  at withCatalog(<console>:54)

Thanks


Disclaimer: Use it at your own risk. Any and all responsibility for any loss, damage or destruction of data or any other property which may arise from relying on this email's technical content is explicitly disclaimed. The author will in no case be liable for any monetary damages arising from such loss, damage or destruction.

 



On Mon, 17 Feb 2020 at 21:37, Mich Talebzadeh <[hidden email]> wrote:

Dr Mich Talebzadeh

 

LinkedIn  https://www.linkedin.com/profile/view?id=AAEAAAAWh2gBxianrbJd6zP6AcPCCdOABUrV8Pw

 

http://talebzadehmich.wordpress.com


Disclaimer: Use it at your own risk. Any and all responsibility for any loss, damage or destruction of data or any other property which may arise from relying on this email's technical content is explicitly disclaimed. The author will in no case be liable for any monetary damages arising from such loss, damage or destruction.

 

Many thanks both.

Let me check and confirm. 

regards,

Mich


On Mon, 17 Feb 2020 at 21:33, Jörn Franke <[hidden email]> wrote:
Is there a reason why different Scala (it seems at least 2.10/2.11) versions are mixed? This never works.
Do you include by accident a dependency to with an old Scala version? Ie the Hbase datasource maybe?


Am 17.02.2020 um 22:15 schrieb Mich Talebzadeh <[hidden email]>:


Thanks Muthu,


I am using the following jar files for now in local mode i.e.  spark-shell_local --jars …..

json4s-jackson_2.10-3.2.10.jar
json4s_2.11-3.2.11.jar
json4s-native_2.10-3.4.0.jar

Which one is the incorrect one please/

Regards,

Mich



Disclaimer: Use it at your own risk. Any and all responsibility for any loss, damage or destruction of data or any other property which may arise from relying on this email's technical content is explicitly disclaimed. The author will in no case be liable for any monetary damages arising from such loss, damage or destruction.

 



On Mon, 17 Feb 2020 at 20:28, Muthu Jayakumar <[hidden email]> wrote:
I suspect the spark job is somehow having an incorrect (newer) version of json4s in the classpath. json4s 3.5.3 is the utmost version that can be used.

Thanks,
Muthu

On Mon, Feb 17, 2020, 06:43 Mich Talebzadeh <[hidden email]> wrote:
Hi,

Spark version 2.4.3
Hbase 1.2.7

Data is stored in Hbase as Json. example of a row shown below
<image.png>

I am trying to read this table in Spark Scala

import org.apache.spark.sql.{SQLContext, _}
import org.apache.spark.sql.execution.datasources.hbase._
import org.apache.spark.{SparkConf, SparkContext}
import spark.sqlContext.implicits._
import org.json4s._
import org.json4s.jackson.JsonMethods._
import org.json4s.jackson.Serialization.{read => JsonRead}
import org.json4s.jackson.Serialization.{read, write}
def catalog = s"""{
     | "table":{"namespace":"trading", "name":"MARKETDATAHBASEBATCH",
     | "rowkey":"key",
     | "columns":{
     | "rowkey":{"cf":"rowkey", "col":"key", "type":"string"},
     |     |"ticker":{"cf":"PRICE_INFO", "col":"ticker", "type":"string"},
     |     |"timeissued":{"cf":"PRICE_INFO", "col":"timeissued", "type":"string"},
     |     |"price":{"cf":"PRICE_INFO", "col":"price", "type":"string"}
     |     |}
     | |}""".stripMargin
def withCatalog(cat: String): DataFrame = {
           spark.sqlContext
           .read
           .options(Map(HBaseTableCatalog.tableCatalog->cat))
           .format("org.apache.spark.sql.execution.datasources.hbase")
           .load()
        }
val df = withCatalog(catalog)


However, I am getting this error

Spark session available as 'spark'.
java.lang.NoSuchMethodError: org.json4s.jackson.JsonMethods$.parse(Lorg/json4s/JsonInput;Z)Lorg/json4s/JsonAST$JValue;
  at org.apache.spark.sql.execution.datasources.hbase.HBaseTableCatalog$.apply(HBaseTableCatalog.scala:257)
  at org.apache.spark.sql.execution.datasources.hbase.HBaseRelation.<init>(HBaseRelation.scala:80)
  at org.apache.spark.sql.execution.datasources.hbase.DefaultSource.createRelation(HBaseRelation.scala:51)
  at org.apache.spark.sql.execution.datasources.DataSource.resolveRelation(DataSource.scala:318)
  at org.apache.spark.sql.DataFrameReader.loadV1Source(DataFrameReader.scala:223)
  at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:211)
  at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:167)
  at withCatalog(testme.scala:49)
  ... 65 elided

I have Googled it but with little luck!

Thanks,
Mich

http://talebzadehmich.wordpress.com


Disclaimer: Use it at your own risk. Any and all responsibility for any loss, damage or destruction of data or any other property which may arise from relying on this email's technical content is explicitly disclaimed. The author will in no case be liable for any monetary damages arising from such loss, damage or destruction.

 

Reply | Threaded
Open this post in threaded view
|

Re: Spark reading from Hbase throws java.lang.NoSuchMethodError: org.json4s.jackson.JsonMethods

Sean Busbey
Hi Mich!

Please try to keep your thread on a single mailing list. It's much easier to have things show up on a new list if you give a brief summary of the discussion and a pointer to the original thread (lists.apache.org is great for this).

It looks like you're using "SHC" aka the "Spark HBase Connector". This is a toolset from a third-party and isn't associated with either the Apache Spark or Apache HBase communities. You should address your concerns to the provider of said tool.

If you are interested in reading/writing with HBase from Spark jobs, the Apache HBase community provides its own integration through our "Apache HBase Connectors" project.

The project's reference guide includes some examples of using the integration:


And the bits are available from our download page:


The current documentation for deployment is thin, but if can bring specific questions to the user@hbase mailing list about it that'll help get push along improving that.


On Sun, Feb 23, 2020, 13:13 Mich Talebzadeh <[hidden email]> wrote:
Hi,

Does anyone has any more suggestion for the error I reported below please?

Thanks,

Mich



*Disclaimer:* Use it at your own risk. Any and all responsibility for any
loss, damage or destruction of data or any other property which may arise
from relying on this email's technical content is explicitly disclaimed.
The author will in no case be liable for any monetary damages arising from
such loss, damage or destruction.




On Mon, 17 Feb 2020 at 22:27, Mich Talebzadeh <[hidden email]>
wrote:

> I stripped everything from the jar list. This is all I have
>
> sspark-shell --jars shc-core-1.1.1-2.1-s_2.11.jar, \
>               json4s-native_2.11-3.5.3.jar, \
>               json4s-jackson_2.11-3.5.3.jar, \
>               hbase-client-1.2.3.jar, \
>               hbase-common-1.2.3.jar
>
> Now I still get the same error!
>
> scala> val df = withCatalog(catalog)
> java.lang.NoSuchMethodError:
> org.json4s.jackson.JsonMethods$.parse(Lorg/json4s/JsonInput;Z)Lorg/json4s/JsonAST$JValue;
>   at
> org.apache.spark.sql.execution.datasources.hbase.HBaseTableCatalog$.apply(HBaseTableCatalog.scala:257)
>   at
> org.apache.spark.sql.execution.datasources.hbase.HBaseRelation.<init>(HBaseRelation.scala:80)
>   at
> org.apache.spark.sql.execution.datasources.hbase.DefaultSource.createRelation(HBaseRelation.scala:51)
>   at
> org.apache.spark.sql.execution.datasources.DataSource.resolveRelation(DataSource.scala:318)
>   at
> org.apache.spark.sql.DataFrameReader.loadV1Source(DataFrameReader.scala:223)
>   at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:211)
>   at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:167)
>   at withCatalog(<console>:54)
>
> Thanks
>
>
> *Disclaimer:* Use it at your own risk. Any and all responsibility for any
> loss, damage or destruction of data or any other property which may arise
> from relying on this email's technical content is explicitly disclaimed.
> The author will in no case be liable for any monetary damages arising from
> such loss, damage or destruction.
>
>
>
>
> On Mon, 17 Feb 2020 at 21:37, Mich Talebzadeh <[hidden email]>
> wrote:
>
>>
>> Dr Mich Talebzadeh
>>
>>
>>
>> LinkedIn * https://www.linkedin.com/profile/view?id=AAEAAAAWh2gBxianrbJd6zP6AcPCCdOABUrV8Pw
>> <https://www.linkedin.com/profile/view?id=AAEAAAAWh2gBxianrbJd6zP6AcPCCdOABUrV8Pw>*
>>
>>
>>
>> http://talebzadehmich.wordpress.com
>>
>>
>> *Disclaimer:* Use it at your own risk. Any and all responsibility for
>> any loss, damage or destruction of data or any other property which may
>> arise from relying on this email's technical content is explicitly
>> disclaimed. The author will in no case be liable for any monetary damages
>> arising from such loss, damage or destruction.
>>
>>
>> Many thanks both.
>>
>> Let me check and confirm.
>>
>> regards,
>>
>> Mich
>>
>>
>> On Mon, 17 Feb 2020 at 21:33, Jörn Franke <[hidden email]> wrote:
>>
>>> Is there a reason why different Scala (it seems at least 2.10/2.11)
>>> versions are mixed? This never works.
>>> Do you include by accident a dependency to with an old Scala version? Ie
>>> the Hbase datasource maybe?
>>>
>>>
>>> Am 17.02.2020 um 22:15 schrieb Mich Talebzadeh <
>>> [hidden email]>:
>>>
>>> 
>>> Thanks Muthu,
>>>
>>>
>>> I am using the following jar files for now in local mode i.e.  spark-shell_local
>>> --jars …..
>>>
>>> json4s-jackson_2.10-3.2.10.jar
>>> json4s_2.11-3.2.11.jar
>>> json4s-native_2.10-3.4.0.jar
>>>
>>> Which one is the incorrect one please/
>>>
>>> Regards,
>>>
>>> Mich
>>>
>>>
>>>
>>> *Disclaimer:* Use it at your own risk. Any and all responsibility for
>>> any loss, damage or destruction of data or any other property which may
>>> arise from relying on this email's technical content is explicitly
>>> disclaimed. The author will in no case be liable for any monetary damages
>>> arising from such loss, damage or destruction.
>>>
>>>
>>>
>>>
>>> On Mon, 17 Feb 2020 at 20:28, Muthu Jayakumar <[hidden email]>
>>> wrote:
>>>
>>>> I suspect the spark job is somehow having an incorrect (newer) version
>>>> of json4s in the classpath. json4s 3.5.3 is the utmost version that can be
>>>> used.
>>>>
>>>> Thanks,
>>>> Muthu
>>>>
>>>> On Mon, Feb 17, 2020, 06:43 Mich Talebzadeh <[hidden email]>
>>>> wrote:
>>>>
>>>>> Hi,
>>>>>
>>>>> Spark version 2.4.3
>>>>> Hbase 1.2.7
>>>>>
>>>>> Data is stored in Hbase as Json. example of a row shown below
>>>>> <image.png>
>>>>>
>>>>> I am trying to read this table in Spark Scala
>>>>>
>>>>> import org.apache.spark.sql.{SQLContext, _}
>>>>> import org.apache.spark.sql.execution.datasources.hbase._
>>>>> import org.apache.spark.{SparkConf, SparkContext}
>>>>> import spark.sqlContext.implicits._
>>>>> import org.json4s._
>>>>> import org.json4s.jackson.JsonMethods._
>>>>> import org.json4s.jackson.Serialization.{read => JsonRead}
>>>>> import org.json4s.jackson.Serialization.{read, write}
>>>>> def catalog = s"""{
>>>>>      | "table":{"namespace":"trading", "name":"MARKETDATAHBASEBATCH",
>>>>>      | "rowkey":"key",
>>>>>      | "columns":{
>>>>>      | "rowkey":{"cf":"rowkey", "col":"key", "type":"string"},
>>>>>      |     |"ticker":{"cf":"PRICE_INFO", "col":"ticker",
>>>>> "type":"string"},
>>>>>      |     |"timeissued":{"cf":"PRICE_INFO", "col":"timeissued",
>>>>> "type":"string"},
>>>>>      |     |"price":{"cf":"PRICE_INFO", "col":"price", "type":"string"}
>>>>>      |     |}
>>>>>      | |}""".stripMargin
>>>>> def withCatalog(cat: String): DataFrame = {
>>>>>            spark.sqlContext
>>>>>            .read
>>>>>            .options(Map(HBaseTableCatalog.tableCatalog->cat))
>>>>>            .format("org.apache.spark.sql.execution.datasources.hbase")
>>>>>            .load()
>>>>>         }
>>>>> val df = withCatalog(catalog)
>>>>>
>>>>>
>>>>> However, I am getting this error
>>>>>
>>>>> Spark session available as 'spark'.
>>>>> java.lang.NoSuchMethodError:
>>>>> org.json4s.jackson.JsonMethods$.parse(Lorg/json4s/JsonInput;Z)Lorg/json4s/JsonAST$JValue;
>>>>>   at
>>>>> org.apache.spark.sql.execution.datasources.hbase.HBaseTableCatalog$.apply(HBaseTableCatalog.scala:257)
>>>>>   at
>>>>> org.apache.spark.sql.execution.datasources.hbase.HBaseRelation.<init>(HBaseRelation.scala:80)
>>>>>   at
>>>>> org.apache.spark.sql.execution.datasources.hbase.DefaultSource.createRelation(HBaseRelation.scala:51)
>>>>>   at
>>>>> org.apache.spark.sql.execution.datasources.DataSource.resolveRelation(DataSource.scala:318)
>>>>>   at
>>>>> org.apache.spark.sql.DataFrameReader.loadV1Source(DataFrameReader.scala:223)
>>>>>   at
>>>>> org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:211)
>>>>>   at
>>>>> org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:167)
>>>>>   at withCatalog(testme.scala:49)
>>>>>   ... 65 elided
>>>>>
>>>>> I have Googled it but with little luck!
>>>>>
>>>>> Thanks,
>>>>> Mich
>>>>>
>>>>> http://talebzadehmich.wordpress.com
>>>>>
>>>>>
>>>>> *Disclaimer:* Use it at your own risk. Any and all responsibility for
>>>>> any loss, damage or destruction of data or any other property which may
>>>>> arise from relying on this email's technical content is explicitly
>>>>> disclaimed. The author will in no case be liable for any monetary damages
>>>>> arising from such loss, damage or destruction.
>>>>>
>>>>>
>>>>>
>>>>
Reply | Threaded
Open this post in threaded view
|

Re: Spark reading from Hbase throws java.lang.NoSuchMethodError: org.json4s.jackson.JsonMethods

Jörn Franke
In reply to this post by Mich Talebzadeh
Yes I fear you have to shade and create an uberjar 

Am 17.02.2020 um 23:27 schrieb Mich Talebzadeh <[hidden email]>:


I stripped everything from the jar list. This is all I have

sspark-shell --jars shc-core-1.1.1-2.1-s_2.11.jar, \
              json4s-native_2.11-3.5.3.jar, \
              json4s-jackson_2.11-3.5.3.jar, \
              hbase-client-1.2.3.jar, \
              hbase-common-1.2.3.jar

Now I still get the same error!

scala> val df = withCatalog(catalog)
java.lang.NoSuchMethodError: org.json4s.jackson.JsonMethods$.parse(Lorg/json4s/JsonInput;Z)Lorg/json4s/JsonAST$JValue;
  at org.apache.spark.sql.execution.datasources.hbase.HBaseTableCatalog$.apply(HBaseTableCatalog.scala:257)
  at org.apache.spark.sql.execution.datasources.hbase.HBaseRelation.<init>(HBaseRelation.scala:80)
  at org.apache.spark.sql.execution.datasources.hbase.DefaultSource.createRelation(HBaseRelation.scala:51)
  at org.apache.spark.sql.execution.datasources.DataSource.resolveRelation(DataSource.scala:318)
  at org.apache.spark.sql.DataFrameReader.loadV1Source(DataFrameReader.scala:223)
  at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:211)
  at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:167)
  at withCatalog(<console>:54)

Thanks


Disclaimer: Use it at your own risk. Any and all responsibility for any loss, damage or destruction of data or any other property which may arise from relying on this email's technical content is explicitly disclaimed. The author will in no case be liable for any monetary damages arising from such loss, damage or destruction.

 



On Mon, 17 Feb 2020 at 21:37, Mich Talebzadeh <[hidden email]> wrote:

Dr Mich Talebzadeh

 

LinkedIn  https://www.linkedin.com/profile/view?id=AAEAAAAWh2gBxianrbJd6zP6AcPCCdOABUrV8Pw

 

http://talebzadehmich.wordpress.com


Disclaimer: Use it at your own risk. Any and all responsibility for any loss, damage or destruction of data or any other property which may arise from relying on this email's technical content is explicitly disclaimed. The author will in no case be liable for any monetary damages arising from such loss, damage or destruction.

 

Many thanks both.

Let me check and confirm. 

regards,

Mich


On Mon, 17 Feb 2020 at 21:33, Jörn Franke <[hidden email]> wrote:
Is there a reason why different Scala (it seems at least 2.10/2.11) versions are mixed? This never works.
Do you include by accident a dependency to with an old Scala version? Ie the Hbase datasource maybe?


Am 17.02.2020 um 22:15 schrieb Mich Talebzadeh <[hidden email]>:


Thanks Muthu,


I am using the following jar files for now in local mode i.e.  spark-shell_local --jars …..

json4s-jackson_2.10-3.2.10.jar
json4s_2.11-3.2.11.jar
json4s-native_2.10-3.4.0.jar

Which one is the incorrect one please/

Regards,

Mich



Disclaimer: Use it at your own risk. Any and all responsibility for any loss, damage or destruction of data or any other property which may arise from relying on this email's technical content is explicitly disclaimed. The author will in no case be liable for any monetary damages arising from such loss, damage or destruction.

 



On Mon, 17 Feb 2020 at 20:28, Muthu Jayakumar <[hidden email]> wrote:
I suspect the spark job is somehow having an incorrect (newer) version of json4s in the classpath. json4s 3.5.3 is the utmost version that can be used.

Thanks,
Muthu

On Mon, Feb 17, 2020, 06:43 Mich Talebzadeh <[hidden email]> wrote:
Hi,

Spark version 2.4.3
Hbase 1.2.7

Data is stored in Hbase as Json. example of a row shown below
<image.png>

I am trying to read this table in Spark Scala

import org.apache.spark.sql.{SQLContext, _}
import org.apache.spark.sql.execution.datasources.hbase._
import org.apache.spark.{SparkConf, SparkContext}
import spark.sqlContext.implicits._
import org.json4s._
import org.json4s.jackson.JsonMethods._
import org.json4s.jackson.Serialization.{read => JsonRead}
import org.json4s.jackson.Serialization.{read, write}
def catalog = s"""{
     | "table":{"namespace":"trading", "name":"MARKETDATAHBASEBATCH",
     | "rowkey":"key",
     | "columns":{
     | "rowkey":{"cf":"rowkey", "col":"key", "type":"string"},
     |     |"ticker":{"cf":"PRICE_INFO", "col":"ticker", "type":"string"},
     |     |"timeissued":{"cf":"PRICE_INFO", "col":"timeissued", "type":"string"},
     |     |"price":{"cf":"PRICE_INFO", "col":"price", "type":"string"}
     |     |}
     | |}""".stripMargin
def withCatalog(cat: String): DataFrame = {
           spark.sqlContext
           .read
           .options(Map(HBaseTableCatalog.tableCatalog->cat))
           .format("org.apache.spark.sql.execution.datasources.hbase")
           .load()
        }
val df = withCatalog(catalog)


However, I am getting this error

Spark session available as 'spark'.
java.lang.NoSuchMethodError: org.json4s.jackson.JsonMethods$.parse(Lorg/json4s/JsonInput;Z)Lorg/json4s/JsonAST$JValue;
  at org.apache.spark.sql.execution.datasources.hbase.HBaseTableCatalog$.apply(HBaseTableCatalog.scala:257)
  at org.apache.spark.sql.execution.datasources.hbase.HBaseRelation.<init>(HBaseRelation.scala:80)
  at org.apache.spark.sql.execution.datasources.hbase.DefaultSource.createRelation(HBaseRelation.scala:51)
  at org.apache.spark.sql.execution.datasources.DataSource.resolveRelation(DataSource.scala:318)
  at org.apache.spark.sql.DataFrameReader.loadV1Source(DataFrameReader.scala:223)
  at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:211)
  at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:167)
  at withCatalog(testme.scala:49)
  ... 65 elided

I have Googled it but with little luck!

Thanks,
Mich

http://talebzadehmich.wordpress.com


Disclaimer: Use it at your own risk. Any and all responsibility for any loss, damage or destruction of data or any other property which may arise from relying on this email's technical content is explicitly disclaimed. The author will in no case be liable for any monetary damages arising from such loss, damage or destruction.