gpt4 book ai didi

org.apache.gobblin.util.WritableShimSerialization类的使用及代码示例

转载 作者:知者 更新时间:2024-03-25 03:43:05 25 4
gpt4 key购买 nike

本文整理了Java中org.apache.gobblin.util.WritableShimSerialization类的一些代码示例,展示了WritableShimSerialization类的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。WritableShimSerialization类的具体详情如下:
包路径:org.apache.gobblin.util.WritableShimSerialization
类名称:WritableShimSerialization

WritableShimSerialization介绍

[英]A serializer that understands how to write objects that implement WritableShim out to a Hadoop stream. This class must be present in the io.serializations key of a Hadoop config for the Hadoop runtime to find and instantiate it.
[中]了解如何将实现WritableShim的对象写入Hadoop流的序列化程序。这门课必须出现在io中。序列化Hadoop配置的键,以便Hadoop运行时查找并实例化它。

代码示例

代码示例来源:origin: apache/incubator-gobblin

/**
 * Get a Hadoop configuration that understands how to (de)serialize WritableShim objects.
 */
private Configuration getConf(Configuration otherConf) {
 Configuration conf;
 if (otherConf == null) {
  conf = new Configuration();
 } else {
  conf = new Configuration(otherConf);
 }
 WritableShimSerialization.addToHadoopConfiguration(conf);
 return conf;
}

代码示例来源:origin: apache/incubator-gobblin

@Override
 public Void call() throws Exception {
  Configuration conf = new Configuration(ParallelRunner.this.fs.getConf());
  WritableShimSerialization.addToHadoopConfiguration(conf);
  try (@SuppressWarnings("deprecation") SequenceFile.Reader reader = new SequenceFile.Reader(
    ParallelRunner.this.fs, inputFilePath, conf)) {
   Writable key = keyClass.newInstance();
   T state = stateClass.newInstance();
   while (reader.next(key)) {
    state = (T) reader.getCurrentValue(state);
    states.add(state);
    state = stateClass.newInstance();
   }
   if (deleteAfter) {
    HadoopUtils.deletePath(ParallelRunner.this.fs, inputFilePath, false);
   }
  }
  return null;
 }
}), "Deserialize state from file " + inputFilePath));

代码示例来源:origin: apache/incubator-gobblin

WritableShimSerialization.addToHadoopConfiguration(deserializeConf);
try (@SuppressWarnings("deprecation") SequenceFile.Reader reader = new SequenceFile.Reader(this.fs, tablePath,
  deserializeConf)) {

代码示例来源:origin: apache/incubator-gobblin

WritableShimSerialization.addToHadoopConfiguration(deserializeConfig);
try (@SuppressWarnings("deprecation") GobblinSequenceFileReader reader = new GobblinSequenceFileReader(this.fs,
  tablePath, deserializeConfig)) {

代码示例来源:origin: apache/incubator-gobblin

@Test
@SuppressWarnings("deprecation")
public void testSerializeToSequenceFile() throws IOException {
 Closer closer = Closer.create();
 Configuration conf = new Configuration();
 WritableShimSerialization.addToHadoopConfiguration(conf);
 try {
  SequenceFile.Writer writer1 = closer.register(SequenceFile.createWriter(this.fs, conf,
    new Path(this.outputPath, "seq1"), Text.class, WorkUnitState.class));
  Text key = new Text();
  WorkUnitState workUnitState = new WorkUnitState();
  TestWatermark watermark = new TestWatermark();
  watermark.setLongWatermark(10L);
  workUnitState.setActualHighWatermark(watermark);
  writer1.append(key, workUnitState);
  SequenceFile.Writer writer2 = closer.register(SequenceFile.createWriter(this.fs, conf,
    new Path(this.outputPath, "seq2"), Text.class, WorkUnitState.class));
  watermark.setLongWatermark(100L);
  workUnitState.setActualHighWatermark(watermark);
  writer2.append(key, workUnitState);
 } catch (Throwable t) {
  throw closer.rethrow(t);
 } finally {
  closer.close();
 }
}

代码示例来源:origin: org.apache.gobblin/gobblin-metastore

/**
 * Get a Hadoop configuration that understands how to (de)serialize WritableShim objects.
 */
private Configuration getConf(Configuration otherConf) {
 Configuration conf;
 if (otherConf == null) {
  conf = new Configuration();
 } else {
  conf = new Configuration(otherConf);
 }
 WritableShimSerialization.addToHadoopConfiguration(conf);
 return conf;
}

代码示例来源:origin: org.apache.gobblin/gobblin-utility

@Override
 public Void call() throws Exception {
  Configuration conf = new Configuration(ParallelRunner.this.fs.getConf());
  WritableShimSerialization.addToHadoopConfiguration(conf);
  try (@SuppressWarnings("deprecation") SequenceFile.Reader reader = new SequenceFile.Reader(
    ParallelRunner.this.fs, inputFilePath, conf)) {
   Writable key = keyClass.newInstance();
   T state = stateClass.newInstance();
   while (reader.next(key)) {
    state = (T) reader.getCurrentValue(state);
    states.add(state);
    state = stateClass.newInstance();
   }
   if (deleteAfter) {
    HadoopUtils.deletePath(ParallelRunner.this.fs, inputFilePath, false);
   }
  }
  return null;
 }
}), "Deserialize state from file " + inputFilePath));

代码示例来源:origin: org.apache.gobblin/gobblin-runtime

WritableShimSerialization.addToHadoopConfiguration(deserializeConf);
try (@SuppressWarnings("deprecation") SequenceFile.Reader reader = new SequenceFile.Reader(this.fs, tablePath,
  deserializeConf)) {

代码示例来源:origin: org.apache.gobblin/gobblin-runtime

WritableShimSerialization.addToHadoopConfiguration(deserializeConfig);
try (@SuppressWarnings("deprecation") GobblinSequenceFileReader reader = new GobblinSequenceFileReader(this.fs,
  tablePath, deserializeConfig)) {

25 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com