gpt4 book ai didi

java - Apache ignite 和 Spark 迭代错误 java.lang.NoSuchMethodError : org. apache.spark.sql.SQLContext.createDataFrame

转载 作者:太空宇宙 更新时间:2023-11-04 11:47:47 26 4
gpt4 key购买 nike

我们正在尝试集成 apache ignite 和 apache Spark,但出现以下错误。

Maven 配置

<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>

<groupId>com.me</groupId>
<artifactId>igniteclient1</artifactId>
<version>0.0.1-SNAPSHOT</version>
<packaging>jar</packaging>

<name>igniteclient1</name>
<url>http://maven.apache.org</url>

<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
</properties>

<dependencies>
<!-- https://mvnrepository.com/artifact/org.apache.spark/spark-core_2.10 -->
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_2.10</artifactId>
<version>2.1.0</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.spark/spark-sql_2.10 -->
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-sql_2.10</artifactId>
<version>2.1.0</version>
</dependency>


<!-- https://mvnrepository.com/artifact/org.apache.ignite/ignite-core -->
<dependency>
<groupId>org.apache.ignite</groupId>
<artifactId>ignite-core</artifactId>
<version>1.8.0</version>
</dependency>

<!-- https://mvnrepository.com/artifact/org.apache.ignite/ignite-spring -->
<dependency>
<groupId>org.apache.ignite</groupId>
<artifactId>ignite-spring</artifactId>
<version>1.8.0</version>
</dependency>

<!-- https://mvnrepository.com/artifact/org.apache.ignite/ignite-indexing -->
<dependency>
<groupId>org.apache.ignite</groupId>
<artifactId>ignite-indexing</artifactId>
<version>1.8.0</version>
</dependency>

<!-- https://mvnrepository.com/artifact/org.apache.ignite/ignite-rest-http -->
<dependency>
<groupId>org.apache.ignite</groupId>
<artifactId>ignite-rest-http</artifactId>
<version>1.8.0</version>
</dependency>

<!-- https://mvnrepository.com/artifact/org.apache.ignite/ignite-spark -->
<dependency>
<groupId>org.apache.ignite</groupId>
<artifactId>ignite-spark</artifactId>
<version>1.8.0</version>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>3.8.1</version>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<version>3.0.0</version>
<configuration>
<filters>
<filter>
<artifact>*:*</artifact>
<excludes>
<exclude>META-INF/*.SF</exclude>
<exclude>META-INF/*.DSA</exclude>
<exclude>META-INF/*.RSA</exclude>
</excludes>
</filter>
</filters>
<!-- Additional configuration. -->
</configuration>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>shade</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>

代码:

package com.me.igniteclient1;

import org.apache.spark.api.java.*;
import org.apache.spark.sql.DataFrame;
import org.apache.spark.sql.Row;

import com.me.igniteclient1.ratio.Ratio;

import org.apache.ignite.Ignite;
import org.apache.ignite.Ignition;
import org.apache.ignite.configuration.IgniteConfiguration;
import org.apache.ignite.lang.IgniteOutClosure;
import org.apache.ignite.spark.JavaIgniteContext;
import org.apache.ignite.spark.JavaIgniteRDD;
import org.apache.spark.SparkConf;
import org.apache.spark.sql.SQLContext;


/**
*
*/
public class App
{
public static void main(String[] args) {

SparkConf conf = new SparkConf().setAppName("Stockrank styles");
JavaSparkContext sc = new JavaSparkContext(conf);

JavaIgniteContext igniteContext = new JavaIgniteContext(sc, new IgniteConfigProvider());
JavaIgniteRDD<String, Ratio> javaRdd = igniteContext.fromCache("RatioCache");

DataFrame df = javaRdd.sql("select ric, ratio1, ratio2, ratio3, ratio4 from Ratio where date = ? and ratios__Exchange = ? and ratio5 > ?", "2017-01-25", "LSE", 10);

df.printSchema();
df.show();

sc.stop();
}

/**
* Ignite configiration provider.
*/
static class IgniteConfigProvider implements IgniteOutClosure<IgniteConfiguration> {

public IgniteConfiguration apply() {

Ignition.setClientMode(true);

// Start Ignite in client mode.
Ignite ignite = Ignition.start();
return ignite.configuration();
}

}

}

错误:

 Exception in thread "main" java.lang.NoSuchMethodError: org.apache.spark.sql.SQLContext.createDataFrame(Lorg/apache/spark/rdd/RDD;Lorg/apache/spark/sql/types/StructType;)Lorg/apache/spark/sql/DataFrame;
at org.apache.ignite.spark.IgniteRDD.sql(IgniteRDD.scala:152)
at org.apache.ignite.spark.JavaIgniteRDD.sql(JavaIgniteRDD.scala:79)
at org.apache.ignite.spark.JavaIgniteRDD.sql(JavaIgniteRDD.scala:79)
at com.me.igniteclient1.App.main(App.java:38)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:738)
at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:187)
at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:212)
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:126)
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
17/02/10 17:55:10 INFO GridUpdateNotifier: Your version is up to date.

这些测试在 ignite Spark 模块中通过吗?

https://github.com/apache/ignite/blob/master/modules/spark/src/test/java/org/apache/ignite/spark/JavaStandaloneIgniteRDDSelfTest.java

https://github.com/apache/ignite/blob/master/modules/spark/src/test/java/org/apache/ignite/spark/JavaEmbeddedIgniteRDDSelfTest.java

我尝试了不同的 Spark 版本,1.6、1.5、1.4,但没有任何效果。我遇到同样的错误。

我们正在评估使用 ignite(作为内存数据结构)和 Spark(使用 mlib 进行分析)的可能性。但集成模块似乎不是很活跃。 future 的计划是什么?它会被弃用吗?

更新:Ignite 1.9 发布 https://blogs.apache.org/ignite/entry/apache-ignite-1-9-released

Spark

Ignite’s spark integration was upgraded to the latest Spark version. Presently, you can leverage from Ignite Shared RDDs in applications using latest Spark version.

最佳答案

根据 POM,您使用了 Spark 2.1.0。 Ignite基于1.5.2,目前不支持2.x。

此外,ignite-spark 模块是为 Scala 2.11 创建的。如果您有 2.10,请改用 ignite-spark_2.10

总的来说,这绝对是一个类路径问题。确保所有版本一致并且应该可以正常工作。

关于java - Apache ignite 和 Spark 迭代错误 java.lang.NoSuchMethodError : org. apache.spark.sql.SQLContext.createDataFrame,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/42159566/

26 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com