gpt4 book ai didi

java - Nifi JSON ETL : Custom Transformation Class not found with JoltTransformJSON Processor

转载 作者:行者123 更新时间:2023-12-03 18:26:57 25 4
gpt4 key购买 nike

我想使用实现 com.bazaarvoice.jolt.Transform 接口(interface)的自定义 JSON 转换。
我像这样使用“自定义转换类名称”和“自定义模块目录”:
enter image description here
但是,我无法让 JoltTransformJSON 处理器使用它;我得到一个 ClassNotFoundException:

2019-04-01 14:30:54,196 ERROR [Timer-Driven Process Thread-4] o.a.n.p.standard.JoltTransformJSON JoltTransformJSON[id=b407714f-0169-1000-d9b2-1709069238d7] Unable to transform StandardFlowFileRecord[uuid=72dc471b-c587-4da9-b54c-eb46247b0cf4,claim=StandardContentClaim [resourceClaim=StandardResourceClaim[id=1554129053747-21203, container=default, section=723], offset=607170, length=5363],offset=0,name=72dc471b-c587-4da9-b54c-eb46247b0cf4,size=5363] due to java.util.concurrent.CompletionException: java.lang.ClassNotFoundException: org.sentilo.nifi.elasticsearch.ElasticsearchToOpenTSDB: java.util.concurrent.CompletionException: java.lang.ClassNotFoundException: org.sentilo.nifi.elasticsearch.ElasticsearchToOpenTSDB
java.util.concurrent.CompletionException: java.lang.ClassNotFoundException: org.sentilo.nifi.elasticsearch.ElasticsearchToOpenTSDB
at com.github.benmanes.caffeine.cache.BoundedLocalCache$BoundedLocalLoadingCache.lambda$new$0(BoundedLocalCache.java:3373)
at com.github.benmanes.caffeine.cache.BoundedLocalCache.lambda$doComputeIfAbsent$14(BoundedLocalCache.java:2039)
at java.util.concurrent.ConcurrentHashMap.compute(ConcurrentHashMap.java:1853)
at com.github.benmanes.caffeine.cache.BoundedLocalCache.doComputeIfAbsent(BoundedLocalCache.java:2037)
at com.github.benmanes.caffeine.cache.BoundedLocalCache.computeIfAbsent(BoundedLocalCache.java:2020)
at com.github.benmanes.caffeine.cache.LocalCache.computeIfAbsent(LocalCache.java:112)
at com.github.benmanes.caffeine.cache.LocalLoadingCache.get(LocalLoadingCache.java:67)
at org.apache.nifi.processors.standard.JoltTransformJSON.getTransform(JoltTransformJSON.java:316)
at org.apache.nifi.processors.standard.JoltTransformJSON.onTrigger(JoltTransformJSON.java:277)
at org.apache.nifi.processor.AbstractProcessor.onTrigger(AbstractProcessor.java:27)
at org.apache.nifi.controller.StandardProcessorNode.onTrigger(StandardProcessorNode.java:1162)
at org.apache.nifi.controller.tasks.ConnectableTask.invoke(ConnectableTask.java:205)
at org.apache.nifi.controller.scheduling.TimerDrivenSchedulingAgent$1.run(TimerDrivenSchedulingAgent.java:117)
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
at java.util.concurrent.FutureTask.runAndReset(FutureTask.java:308)
at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$301(ScheduledThreadPoolExecutor.java:180)
at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:294)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)
Caused by: java.lang.ClassNotFoundException: org.sentilo.nifi.elasticsearch.ElasticsearchToOpenTSDB
at java.net.URLClassLoader.findClass(URLClassLoader.java:382)
at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
at org.apache.nifi.processors.standard.util.jolt.TransformFactory.getCustomTransform(TransformFactory.java:65)
at org.apache.nifi.processors.standard.JoltTransformJSON.createTransform(JoltTransformJSON.java:346)
at org.apache.nifi.processors.standard.JoltTransformJSON.lambda$setup$0(JoltTransformJSON.java:324)
at com.github.benmanes.caffeine.cache.BoundedLocalCache$BoundedLocalLoadingCache.lambda$new$0(BoundedLocalCache.java:3366)
... 19 common frames omitted
我使用 maven-assembly-plugin 编译了该类及其所有依赖项,并将其放在目录“/data/bin/nifi-1.9.1/jolt_modules”中。
目录和 jar 是可读的。
我还尝试将类名添加到规范中的操作中,如 here ),但我似乎是“自定义模块目录”由于某种原因没有效果......
编辑:我用 ElasticsearchToOpenTSDB 的代码完成了答案,以防有人发现它有用。只是将存储在 Elasticsearch 中的 Sentilo 消息转换为 OpenTSDB 数据点,在途中将一些嵌套的 JSON 结构展平。
package org.sentilo.nifi.elasticsearch;

import com.bazaarvoice.jolt.SpecDriven;
import com.bazaarvoice.jolt.Transform;
import com.bazaarvoice.jolt.exception.TransformException;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.commons.beanutils.BeanUtils;
import org.sentilo.agent.historian.domain.OpenTSDBDataPoint;
import org.sentilo.agent.historian.utils.OpenTSDBValueConverter;
import org.sentilo.common.domain.EventMessage;
import org.sentilo.nifi.elasticsearch.model.Hits;
import org.springframework.util.StringUtils;

import javax.inject.Inject;
import java.lang.reflect.InvocationTargetException;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;

import static org.sentilo.agent.historian.utils.OpenTSDBValueConverter.replaceIllegalCharacters;


public class ElasticsearchToOpenTSDB implements SpecDriven, Transform {

private final Object spec;

private final ObjectMapper mapper = new ObjectMapper();

public ElasticsearchToOpenTSDB() {
this.spec = "{}";
}

@Inject
public ElasticsearchToOpenTSDB( Object spec ) {
this.spec = spec;
}

public Object transform( final Object input ) {

try{
Hits hits = mapper.readValue(input.toString(), Hits.class);
List<EventMessage> newEventList = new ArrayList<EventMessage>();
List<OpenTSDBDataPoint> dataPoints = new ArrayList<OpenTSDBDataPoint>();

for(EventMessage event : hits.hits) {

if (OpenTSDBValueConverter.isComplexValue(event.getMessage())) {
addComplexValueToQueue(event,newEventList);
} else {
addSimpleValueToQueue(event, newEventList);
}
}

for(EventMessage event2 : newEventList) {
OpenTSDBDataPoint dp = unmarshal(event2);
dataPoints.add(dp);
}

return dataPoints;

}catch(Exception e) {
throw new TransformException(e.getMessage());
}


}


private void addComplexValueToQueue(final EventMessage event, List<EventMessage> eventList) throws IllegalAccessException, InvocationTargetException {
// Flatten JSON message into N measures
final String metricName = OpenTSDBValueConverter.createMetricName(event);
final Map<String, Object> unfoldValues = OpenTSDBValueConverter.extractMeasuresFromComplexType(metricName, event.getMessage());
for (final Map.Entry<String, Object> e : unfoldValues.entrySet()) {
final EventMessage newEvent = new EventMessage();
BeanUtils.copyProperties(newEvent, event);
newEvent.setTopic(e.getKey());
newEvent.setMessage(e.getValue().toString());
eventList.add(newEvent);
}
}

private void addSimpleValueToQueue(final EventMessage event, List<EventMessage> eventList) {
// The value should be long, float or boolean
try {
final Object numericValue = OpenTSDBValueConverter.getSimpleValue(event.getMessage());
final String metricName = OpenTSDBValueConverter.createMetricName(event);
event.setMessage(numericValue.toString());
event.setTopic(metricName);
eventList.add(event);

} catch (final ParseException e) {
// Probably String or some non-numeric value that we cannot store in OpenTSDB. Pass
return;
}
}

public static OpenTSDBDataPoint unmarshal(final EventMessage event) throws ParseException {
final OpenTSDBDataPoint dataPoint = new OpenTSDBDataPoint();

dataPoint.setMetric(event.getTopic());
dataPoint.setValue(OpenTSDBValueConverter.getSimpleValue(event.getMessage()));
if (event.getPublishedAt() != null) {
dataPoint.setTimestamp(event.getPublishedAt());
} else {
dataPoint.setTimestamp(event.getTime());
}

dataPoint.setTags(createTags(event));

return dataPoint;

}

private static Map<String, String> createTags(final EventMessage event) {
final Map<String, String> tags = new LinkedHashMap<String, String>();
putTag(tags, OpenTSDBDataPoint.Tags.type.name(), replaceIllegalCharacters(event.getType()));
putTag(tags, OpenTSDBDataPoint.Tags.sensor.name(), replaceIllegalCharacters(event.getSensor()));
putTag(tags, OpenTSDBDataPoint.Tags.provider.name(), replaceIllegalCharacters(event.getProvider()));
putTag(tags, OpenTSDBDataPoint.Tags.component.name(), replaceIllegalCharacters(event.getComponent()));
putTag(tags, OpenTSDBDataPoint.Tags.alertType.name(), replaceIllegalCharacters(event.getAlertType()));
putTag(tags, OpenTSDBDataPoint.Tags.sensorType.name(), replaceIllegalCharacters(event.getSensorType()));
putTag(tags, OpenTSDBDataPoint.Tags.publisher.name(), replaceIllegalCharacters(event.getPublisher()));
putTag(tags, OpenTSDBDataPoint.Tags.tenant.name(), replaceIllegalCharacters(event.getTenant()));
putTag(tags, OpenTSDBDataPoint.Tags.publisherTenant.name(), replaceIllegalCharacters(event.getPublisherTenant()));

return tags;
}

private static void putTag(final Map<String, String> tags, final String tagName, final String tagValue) {
if (StringUtils.hasText(tagValue)) {
tags.put(tagName, tagValue);
}
}
}
更新
如评论中所示,该问题尚未解决,已作为错误报告提交。最新状态可见: https://issues.apache.org/jira/browse/NIFI-6213

最佳答案

该问题尚未解决,已作为错误报告提交。最新状态可以在这里看到:

https://issues.apache.org/jira/browse/NIFI-6213

关于java - Nifi JSON ETL : Custom Transformation Class not found with JoltTransformJSON Processor,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/55458351/

25 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com