gpt4 book ai didi

java - 在生产环境中运行时 Storm 拓扑失败

转载 作者:行者123 更新时间:2023-11-29 07:49:41 25 4
gpt4 key购买 nike

您好,我在运行 Storm 集群时遇到了问题。它类似于

我的拓扑定义为:

            package com.abc.newsclassification;

import StormBase.KnowledgeGraph.ClassifierBolt;
import StormBase.KnowledgeGraph.ClientSpecificTwitterSpout;
import StormBase.KnowledgeGraph.LiveTwitterSpout;
import StormBase.KnowledgeGraph.NewsTwitterSpout;
import StormBase.KnowledgeGraph.TwitterTrainingBolt;
import StormBase.KnowledgeGraph.UrlExtractorBolt;
import backtype.storm.Config;
import backtype.storm.LocalCluster;
import backtype.storm.StormSubmitter;
import backtype.storm.generated.AlreadyAliveException;
import backtype.storm.generated.InvalidTopologyException;
import backtype.storm.topology.TopologyBuilder;
import backtype.storm.tuple.Fields;

public class ClassifierTopology {

public static void main(String[] args) throws Exception {
TopologyBuilder builder = new TopologyBuilder();

// add a spout
builder.setSpout("spout", new NewsTwitterSpout(), 1);
// configure
Config conf = new Config();
conf.setDebug(false);

// submit it to the cluster, or submit it locally
conf.setMaxTaskParallelism(10);
LocalCluster cluster = new LocalCluster();
System.out.println(conf.entrySet());
cluster.submitTopology("testTopology", conf,
builder.createTopology());

}
}

------------------------------------------------------------------------------------------------------
** END OF FILE **
------------------------------------------------------------------------------------------------------

pom.xml:

            <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>

<groupId>StormBase</groupId>
<artifactId>KnowledgeGraph</artifactId>
<version>0.0.1-SNAPSHOT</version>
<packaging>jar</packaging>

<name>KnowledgeGraph</name>
<url>http://maven.apache.org</url>

<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
</properties>

<repositories>
<repository>
<id>clojars.org</id>
<url>http://clojars.org/repo</url>
</repository>
</repositories>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<source>1.6</source>
<target>1.6</target>
</configuration>
</plugin>
<plugin>
<artifactId>maven-assembly-plugin</artifactId>
<configuration>
<archive>
<manifest>
<mainClass>com.abc.newsclassification.ClassifierTopology</mainClass>
</manifest>
</archive>
<descriptorRefs>
<descriptorRef>jar-with-dependencies</descriptorRef>
</descriptorRefs>
</configuration>
<executions>
<execution>
<id>make-assembly</id> <!-- this is used for inheritance merges -->
<phase>package</phase> <!-- bind to the packaging phase -->
<goals>
<goal>single</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>

<dependencies>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>3.8.1</version>
<scope>test</scope>
</dependency>


<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-jdk14</artifactId>
<version>1.6.1</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-nop</artifactId>
<version>1.6.0</version>
<exclusions>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
</exclusion>
</exclusions>
</dependency>

<dependency>
<groupId>storm</groupId>
<artifactId>storm</artifactId>
<version>0.8.2</version>
</dependency>
<dependency>
<groupId>org.apache.zookeeper</groupId>
<artifactId>zookeeper</artifactId>
<version>3.3.3</version>
<exclusions>
<exclusion>
<groupId>com.sun.jmx</groupId>
<artifactId>jmxri</artifactId>
</exclusion>

<exclusion>
<groupId>com.sun.jdmk</groupId>
<artifactId>jmxtools</artifactId>
</exclusion>

<exclusion>
<groupId>javax.jms</groupId>
<artifactId>jms</artifactId>
</exclusion>

</exclusions>
</dependency>
<dependency>
<groupId>com.yammer.metrics</groupId>
<artifactId>metrics-core</artifactId>
<version>2.2.0</version>
</dependency>
<dependency>
<groupId>com.101tec</groupId>
<artifactId>zkclient</artifactId>
<version>0.3</version>
</dependency>
<dependency>
<groupId>net.sf.jopt-simple</groupId>
<artifactId>jopt-simple</artifactId>
<version>4.5</version>
</dependency>
<dependency>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-core-asl</artifactId>
<version>1.9.2</version>
</dependency>
<dependency>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-mapper-asl</artifactId>
<version>1.9.2</version>
</dependency>
<dependency>
<groupId>com.netflix.curator</groupId>
<artifactId>curator-test</artifactId>
<version>1.2.5</version>

<exclusions>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.twitter4j</groupId>
<artifactId>twitter4j-stream</artifactId>
<version>3.0.5</version>
</dependency>

<dependency>
<groupId>redis.clients</groupId>
<artifactId>jedis</artifactId>
<version>2.2.1</version>
<type>jar</type>
<scope>compile</scope>
</dependency>

<dependency>
<groupId>org.languagetool</groupId>
<artifactId>language-en</artifactId>
<version>2.3.1</version>
</dependency>

<dependency>
<groupId>org.twitter4j</groupId>
<artifactId>twitter4j-core</artifactId>
<version>3.0.5</version>
</dependency>

<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-core</artifactId>
<version>4.6.0</version>
</dependency>

<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-analyzers</artifactId>
<version>3.6.2</version>
</dependency>

<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-snowball</artifactId>
<version>3.0.3</version>
</dependency>

<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-analyzers-common</artifactId>
<version>4.6.0</version>
</dependency>

<dependency>
<groupId>com.gravity</groupId>
<artifactId>goose</artifactId>
<version>2.1.23</version>
</dependency>

<dependency>
<groupId>nz.ac.waikato.cms.weka</groupId>
<artifactId>weka-dev</artifactId>
<version>3.7.9</version>
</dependency>

<dependency>
<groupId>org.apache.directory.studio</groupId>
<artifactId>org.apache.commons.io</artifactId>
<version>2.4</version>
</dependency>

<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
<version>3.1</version>
</dependency>

<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>5.0.8</version>
</dependency>

<dependency>
<groupId>org.javatuples</groupId>
<artifactId>javatuples</artifactId>
<version>1.2</version>
</dependency>
<dependency>
<groupId>org.apache.opennlp</groupId>
<artifactId>opennlp-tools</artifactId>
<version>1.5.3</version>
</dependency>

<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-io</artifactId>
<version>1.3.2</version>
</dependency>

<dependency>
<groupId>org.apache.directory.studio</groupId>
<artifactId>org.apache.commons.lang</artifactId>
<version>2.6</version>
</dependency>

<dependency>
<groupId>edu.stanford.nlp</groupId>
<artifactId>stanford-corenlp</artifactId>
<version>3.3.0</version>
</dependency>
<dependency>
<groupId>edu.stanford.nlp</groupId>
<artifactId>stanford-corenlp</artifactId>
<version>3.3.0</version>
<classifier>models</classifier>
</dependency>
<dependency>
<groupId>edu.washington.cs.knowitall.stanford-corenlp</groupId>
<artifactId>stanford-postag-models</artifactId>
<version>1.3.5</version>
</dependency>

<dependency>
<groupId>com.google.code.google-collections</groupId>
<artifactId>google-collect</artifactId>
<version>snapshot-20071022</version>
</dependency>

<dependency>
<groupId>com.googlecode.concurrent-trees</groupId>
<artifactId>concurrent-trees</artifactId>
<version>1.0.0</version>
</dependency>

<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<version>1.2.16</version>
</dependency>

<dependency>
<groupId>com.datastax.cassandra</groupId>
<artifactId>cassandra-driver-core</artifactId>
<version>1.0.3</version>
</dependency>

<dependency>
<groupId>org.hectorclient</groupId>
<artifactId>hector-core</artifactId>
<version>1.1-0</version>
<exclusions>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
</exclusions>
</dependency>

<dependency>
<groupId>me.prettyprint</groupId>
<artifactId>hector-test</artifactId>
<version>1.0-5</version>
</dependency>

<dependency>
<groupId>FBKeyWords</groupId>
<artifactId>FBKeyWords</artifactId>
<version>0.0.1-SNAPSHOT</version>
<classifier>jar-with-dependencies</classifier>
</dependency>

<dependency>
<groupId>KeyWordExtractor</groupId>
<artifactId>KeyWordExtractor</artifactId>
<version>0.0.1-SNAPSHOT</version>
<classifier>jar-with-dependencies</classifier>
</dependency>

<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>5.1.6</version>
</dependency>

</dependencies>
</project>



---------------------------------------------------------------

** END OF pom.xml **
---------------------------------------------------------------

以下是我遇到的错误:

-----------------------------------------------------------------------------------------
[topology.workers=3, topology.debug=false]
Exception in thread "main" java.lang.RuntimeException: org.apache.thrift7.transport.TTransportException: java.net.ConnectException: Connection refused
at backtype.storm.utils.NimbusClient.getConfiguredClient(NimbusClient.java:21)
at backtype.storm.StormSubmitter.submitTopology(StormSubmitter.java:70)
at backtype.storm.StormSubmitter.submitTopology(StormSubmitter.java:41)
at com.tookitaki.newsclassification.ClassifierTopology.main(ClassifierTopology.java:92)
Caused by: org.apache.thrift7.transport.TTransportException: java.net.ConnectException: Connection refused
at org.apache.thrift7.transport.TSocket.open(TSocket.java:183)
at org.apache.thrift7.transport.TFramedTransport.open(TFramedTransport.java:81)
at backtype.storm.security.auth.SimpleTransportPlugin.connect(SimpleTransportPlugin.java:66)
at backtype.storm.security.auth.ThriftClient.<init>(ThriftClient.java:46)
at backtype.storm.utils.NimbusClient.<init>(NimbusClient.java:30)
at backtype.storm.utils.NimbusClient.<init>(NimbusClient.java:26)
at backtype.storm.utils.NimbusClient.getConfiguredClient(NimbusClient.java:19)
... 3 more
Caused by: java.net.ConnectException: Connection refused
at java.net.PlainSocketImpl.socketConnect(Native Method)
at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:339)
at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:200)
at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:182)
at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392)
at java.net.Socket.connect(Socket.java:579)
at org.apache.thrift7.transport.TSocket.open(TSocket.java:178)
... 9 more

请让我知道我应该使用哪个版本的 Storm /其他包,或者是否有任何其他问题导致此错误。欢迎提出任何建议。

最佳答案

我也遇到了同样的问题。

我检查了 iptables、supervisors 和 nimbus,但这不是他们的问题。

我尝试提交一半的拓扑(通过移除某种 bolt )并且它通过了。

我检查了 nimbus.log 并发现一条错误行表明 thrift 缓冲区已满,因此您可以使用配置更改您的“storm.yaml”:

nimbus.thrift.max_buffer_size: 20480000

关于java - 在生产环境中运行时 Storm 拓扑失败,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/22264893/

25 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com