gpt4 book ai didi

java - 使用 Java 的 Spark 作业服务器

转载 作者:行者123 更新时间:2023-11-30 03:35:53 25 4
gpt4 key购买 nike

我正在使用sparkwhithjava,并且我想使用sparkJob-Server。为此,我遵循了此链接中的所有内容: https://github.com/spark-jobserver/spark-jobserver

这是我项目中的 scala 类:

import _root_.spark.jobserver.SparkJob
import _root_.spark.jobserver.SparkJobValid
import _root_.spark.jobserver.SparkJobValidation
import com.typesafe.config._

import org.apache.spark._
import org.apache.spark.api.java.JavaSparkContext
import spark.jobserver.{SparkJob, SparkJobValid, SparkJobValidation}

object JavaWord extends SparkJob {
def main(args: Array[String]) {
val ctx = new SparkContext("local[4]", "JavaWordCount")
val config = ConfigFactory.parseString("")

val results = runJob(ctx, config)
}

override def validate(sc: SparkContext, config: Config): SparkJobValidation = {
SparkJobValid;
}

override def runJob(sc: SparkContext, config: Config): Any = {
val jsc = new JavaSparkContext(sc)
val j = new JavaCount()
return j.Mafonction(jsc: JavaSparkContext)
}
}

Java 类“word wount”

import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.PairFunction;
import scala.Tuple2;
import java.io.Serializable;
import java.util.Arrays;
import java.util.regex.Pattern;



public final class JavaCount implements Serializable {
public static Object main(String[] args) throws Exception {

return null;
}

public Object Mafonction(JavaSparkContext sc){
String s= "a a a a b b c a";
JavaPairRDD<String, Integer> lines = sc.parallelize(Arrays.asList(s.split(" "))).mapToPair(new PairFunction<String, String, Integer>() {
@Override
public Tuple2<String, Integer> call(String s) {
return new Tuple2<String, Integer>(s, 1);
}
}).reduceByKey(new Function2<Integer, Integer, Integer>() {
@Override
public Integer call(Integer i1, Integer i2) {
return i1 + i2;
}
});
return lines.collect();
}
}

但是当我执行它时,我得到了 curl: (52) Empty returned from server 并在 Spark 作业服务器中出现此错误:

> job-server[ERROR] Uncaught error from thread [JobServer-akka.actor.default-dispatcher-13]       shutting down JVM since 'akka.jvm-exit-on-fatal-error' is enabled for ActorSystem[JobServer]
job-server[ERROR] java.lang.IncompatibleClassChangeError: Implementing class
job-server[ERROR] at java.lang.ClassLoader.defineClass1(Native Method)
job-server[ERROR] at java.lang.ClassLoader.defineClass(ClassLoader.java:800)
job-server[ERROR] at java.security.SecureClassLoader.defineClass(SecureClassLoader.java:142)
job-server[ERROR] at java.net.URLClassLoader.defineClass(URLClassLoader.java:449)
job-server[ERROR] at java.net.URLClassLoader.access$100(URLClassLoader.java:71)
job-server[ERROR] at java.net.URLClassLoader$1.run(URLClassLoader.java:361)
job-server[ERROR] at java.net.URLClassLoader$1.run(URLClassLoader.java:355)
job-server[ERROR] at java.security.AccessController.doPrivileged(Native Method)
job-server[ERROR] at java.net.URLClassLoader.findClass(URLClassLoader.java:354)
job-server[ERROR] at java.lang.ClassLoader.loadClass(ClassLoader.java:425)
job-server[ERROR] at java.lang.ClassLoader.loadClass(ClassLoader.java:358)

job-server[ERROR] at spark.jobserver.JobManagerActor$$anonfun$spark$jobserver$JobManagerActor$$getJobFuture$4.apply(JobMan agerActor.scala:222)
job-server[ERROR] at scala.concurrent.impl.Future$PromiseCompletingRunnable.liftedTree1$1(Future.scala:24)
job-server[ERROR] at scala.concurrent.impl.Future$PromiseCompletingRunnable.run(Future.scala:24)
job-server[ERROR] at akka.dispatch.TaskInvocation.run(AbstractDispatcher.scala:42)
job-server[ERROR] at akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
job-server[ERROR] at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
job-server[ERROR] at scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
job-server[ERROR] at scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
job-server[ERROR] at scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
job-server ... finished with exit code 255

最佳答案

我解决了这个问题。我刚刚删除了/tmp/Spark-JobServe 的所有内容,并重新编译了 JobServer,它可以工作了 ^^ 非常感谢您的帮助

关于java - 使用 Java 的 Spark 作业服务器,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/27904861/

25 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com