gpt4 book ai didi

java - 使用 Spark 和 Java 将问题连接到 MongoDB

转载 作者:可可西里 更新时间:2023-11-01 10:49:48 36 4
gpt4 key购买 nike

我一直在尝试在 Mongo 中使用 Spark 进行连接

我的 Spring 配置是:

@Bean
public SparkConf sparkConf() {
return new SparkConf()
.setMaster("local[*]")
.setAppName("test")
.set("spark.app.id", "test")
.set("spark.mongodb.input.uri", "mongodb://127.0.0.1/")
.set("spark.mongodb.output.uri", "mongodb://127.0.0.1/")
.set("spark.mongodb.input.database", "myDataBase")
.set("spark.mongodb.output.database", "myDataBase");
}

@Bean
public JavaSparkContext javaSparkContext() {
return new JavaSparkContext(sparkConf());
}

@Bean
public SQLContext sqlContext() {
return new SQLContext(SparkSession
.builder()
.appName("eat")
.master("local[*]")
.config(sparkConf())
.getOrCreate());
}

我正在使用 mongo-spark 连接器

<dependency>
<groupId>org.mongodb.spark</groupId>
<artifactId>mongo-spark-connector_2.11</artifactId>
<version>2.0.0</version>
</dependency>

当我试图在我的服务类中以这种方式从 Mongo 检索数据时 -

ReadConfig readConfig = ReadConfig.create(sparkContext)
.withOption("spark.mongodb.output.collection", "myCollection");
JavaRDD<Document> rdd = MongoSpark.load(sparkContext, readConfig);

我有一个异常(exception) -

"Missing collection name. Set via the 'spark.mongodb.input.uri'
or 'spark.mongodb.input.collection' property"

如果在 Spring 配置中设置 - 像这样的 SparkConf -

          SparkConf()
.setMaster("local[*]")
.setAppName("test")
.set("spark.app.id", "test")
.set("spark.mongodb.input.uri", "mongodb://127.0.0.1/myDataBase.myCollection")
.set("spark.mongodb.output.uri", "mongodb://127.0.0.1/myDataBase.myCollection")

一切正常。但是我想管理不同的集合。我做错了什么?

------------------------更新-------------------- ------解决。必须使用一些默认集合创建 Spark 上下文。无需定义其他集合选项的选项即可从该集合中检索数据。示例:

@SpringBootConfiguration
public class SparkConfiguration {

private final String MONGO_PREFIX = "mongodb://";
private final String MONGO_INPUT_COLLECTION = "faqs";

@Value(value = "${spring.data.mongodb.name}")
private String mongoName;

@Value(value = "${spring.data.mongodb.net.bindIp}")
private String mongoHost;

@Bean
public SparkSession sparkSession() {
return SparkSession.builder()
.master("local[*]")
.appName("eat-spark-cluster")
.config("spark.app.id", "Eat")
.config("spark.mongodb.input.uri", MONGO_PREFIX.concat(mongoHost).concat("/"))
.config("spark.mongodb.input.database", mongoName)
.config("spark.mongodb.input.collection", MONGO_INPUT_COLLECTION)
.getOrCreate();
}

@Bean
public JavaSparkContext javaSparkContext() {
return JavaSparkContext.fromSparkContext(sparkSession().sparkContext());
}
}


ReadConfig readConfig = ReadConfig.create(getJavaSparkContext()).withOption("collection", "my_collection");
JavaMongoRDD<Document> placesRdd = MongoSpark.load(getJavaSparkContext(), readConfig);

return placesRdd.collect();

最佳答案

Mongo 版本 3.4.8 Spark 版本 2.2

package mongo;

import com.mongodb.spark.MongoSpark;
import com.mongodb.spark.config.WriteConfig;
import com.mongodb.spark.rdd.api.java.JavaMongoRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.SparkSession;
import org.bson.Document;

import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import java.util.Scanner;

public class Connector {
String db1="mongodb://127.0.0.1/";
String db2= "mongodb://192.168.4.180/";
String dbUrl = db1;

String user = ";";
String pass = "";
String dbName = "test";
String collName="spark";


public static void main(String[] args) {
Connector con=new Connector();
JavaSparkContext jsc = con.connection();
// con.writeToMongo(jsc);
con.readFromMongo(jsc);

Scanner sc= new Scanner(System.in);
sc.next();
}
JavaSparkContext connection() {

SparkSession ss = SparkSession.builder()
.master("local")
.appName("MongoConnector")
.config("spark.mongodb.input.uri", dbUrl + dbName)
.config("spark.mongodb.output.uri", dbUrl + dbName)
.config("spark.mongodb.output.collection",collName)
.config("spark.mongodb.input.collection",collName)
.getOrCreate();

JavaSparkContext jsc=new JavaSparkContext(ss.sparkContext());


return jsc;

// jsc.close();
}


void readFromMongo(JavaSparkContext jsc){
JavaMongoRDD<Document> rdd = MongoSpark.load(jsc);
System.out.print(rdd.collect());
}

void writeToMongo(JavaSparkContext jsc){
JavaRDD<Document> rdd = jsc.parallelize(Arrays.asList(1, 2, 3))
.map(x -> Document.parse("{spark: "+x+"}"));



Map<String,String > writeconf=new HashMap<String,String>();

writeconf.put("collection","spark");
writeconf.put("writeConcern.w", "majority");

WriteConfig writeConfig = WriteConfig.create(jsc).withOptions(writeconf);
MongoSpark.save(rdd,writeConfig);

}
}

关于java - 使用 Spark 和 Java 将问题连接到 MongoDB,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/42752375/

36 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com