gpt4 book ai didi

org.apache.spark.sql.catalyst.expressions.XXH64.hashUnsafeWords()方法的使用及代码示例

转载 作者:知者 更新时间:2024-03-19 13:36:40 26 4
gpt4 key购买 nike

本文整理了Java中org.apache.spark.sql.catalyst.expressions.XXH64.hashUnsafeWords()方法的一些代码示例,展示了XXH64.hashUnsafeWords()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。XXH64.hashUnsafeWords()方法的具体详情如下:
包路径:org.apache.spark.sql.catalyst.expressions.XXH64
类名称:XXH64
方法名:hashUnsafeWords

XXH64.hashUnsafeWords介绍

暂无

代码示例

代码示例来源:origin: org.apache.spark/spark-catalyst_2.11

public long hashUnsafeWords(Object base, long offset, int length) {
 return hashUnsafeWords(base, offset, length, seed);
}

代码示例来源:origin: org.apache.spark/spark-catalyst_2.10

public long hashUnsafeWords(Object base, long offset, int length) {
 return hashUnsafeWords(base, offset, length, seed);
}

代码示例来源:origin: org.apache.spark/spark-catalyst

public long hashUnsafeWords(Object base, long offset, int length) {
 return hashUnsafeWords(base, offset, length, seed);
}

代码示例来源:origin: org.apache.spark/spark-catalyst_2.11

@Test
public void randomizedStressTestBytes() {
 int size = 65536;
 Random rand = new Random();
 // A set used to track collision rate.
 Set<Long> hashcodes = new HashSet<>();
 for (int i = 0; i < size; i++) {
  int byteArrSize = rand.nextInt(100) * 8;
  byte[] bytes = new byte[byteArrSize];
  rand.nextBytes(bytes);
  Assert.assertEquals(
      hasher.hashUnsafeWords(bytes, Platform.BYTE_ARRAY_OFFSET, byteArrSize),
      hasher.hashUnsafeWords(bytes, Platform.BYTE_ARRAY_OFFSET, byteArrSize));
  hashcodes.add(hasher.hashUnsafeWords(
      bytes, Platform.BYTE_ARRAY_OFFSET, byteArrSize));
 }
 // A very loose bound.
 Assert.assertTrue(hashcodes.size() > size * 0.95d);
}

代码示例来源:origin: org.apache.spark/spark-catalyst

@Test
public void randomizedStressTestBytes() {
 int size = 65536;
 Random rand = new Random();
 // A set used to track collision rate.
 Set<Long> hashcodes = new HashSet<>();
 for (int i = 0; i < size; i++) {
  int byteArrSize = rand.nextInt(100) * 8;
  byte[] bytes = new byte[byteArrSize];
  rand.nextBytes(bytes);
  Assert.assertEquals(
      hasher.hashUnsafeWords(bytes, Platform.BYTE_ARRAY_OFFSET, byteArrSize),
      hasher.hashUnsafeWords(bytes, Platform.BYTE_ARRAY_OFFSET, byteArrSize));
  hashcodes.add(hasher.hashUnsafeWords(
      bytes, Platform.BYTE_ARRAY_OFFSET, byteArrSize));
 }
 // A very loose bound.
 Assert.assertTrue(hashcodes.size() > size * 0.95d);
}

代码示例来源:origin: org.apache.spark/spark-catalyst_2.10

@Test
public void randomizedStressTestBytes() {
 int size = 65536;
 Random rand = new Random();
 // A set used to track collision rate.
 Set<Long> hashcodes = new HashSet<>();
 for (int i = 0; i < size; i++) {
  int byteArrSize = rand.nextInt(100) * 8;
  byte[] bytes = new byte[byteArrSize];
  rand.nextBytes(bytes);
  Assert.assertEquals(
      hasher.hashUnsafeWords(bytes, Platform.BYTE_ARRAY_OFFSET, byteArrSize),
      hasher.hashUnsafeWords(bytes, Platform.BYTE_ARRAY_OFFSET, byteArrSize));
  hashcodes.add(hasher.hashUnsafeWords(
      bytes, Platform.BYTE_ARRAY_OFFSET, byteArrSize));
 }
 // A very loose bound.
 Assert.assertTrue(hashcodes.size() > size * 0.95d);
}

代码示例来源:origin: org.apache.spark/spark-catalyst_2.10

@Test
 public void randomizedStressTestPaddedStrings() {
  int size = 64000;
  // A set used to track collision rate.
  Set<Long> hashcodes = new HashSet<>();
  for (int i = 0; i < size; i++) {
   int byteArrSize = 8;
   byte[] strBytes = String.valueOf(i).getBytes(StandardCharsets.UTF_8);
   byte[] paddedBytes = new byte[byteArrSize];
   System.arraycopy(strBytes, 0, paddedBytes, 0, strBytes.length);

   Assert.assertEquals(
       hasher.hashUnsafeWords(paddedBytes, Platform.BYTE_ARRAY_OFFSET, byteArrSize),
       hasher.hashUnsafeWords(paddedBytes, Platform.BYTE_ARRAY_OFFSET, byteArrSize));

   hashcodes.add(hasher.hashUnsafeWords(
       paddedBytes, Platform.BYTE_ARRAY_OFFSET, byteArrSize));
  }

  // A very loose bound.
  Assert.assertTrue(hashcodes.size() > size * 0.95d);
 }
}

代码示例来源:origin: org.apache.spark/spark-catalyst_2.11

@Test
 public void randomizedStressTestPaddedStrings() {
  int size = 64000;
  // A set used to track collision rate.
  Set<Long> hashcodes = new HashSet<>();
  for (int i = 0; i < size; i++) {
   int byteArrSize = 8;
   byte[] strBytes = String.valueOf(i).getBytes(StandardCharsets.UTF_8);
   byte[] paddedBytes = new byte[byteArrSize];
   System.arraycopy(strBytes, 0, paddedBytes, 0, strBytes.length);

   Assert.assertEquals(
       hasher.hashUnsafeWords(paddedBytes, Platform.BYTE_ARRAY_OFFSET, byteArrSize),
       hasher.hashUnsafeWords(paddedBytes, Platform.BYTE_ARRAY_OFFSET, byteArrSize));

   hashcodes.add(hasher.hashUnsafeWords(
       paddedBytes, Platform.BYTE_ARRAY_OFFSET, byteArrSize));
  }

  // A very loose bound.
  Assert.assertTrue(hashcodes.size() > size * 0.95d);
 }
}

代码示例来源:origin: org.apache.spark/spark-catalyst

@Test
 public void randomizedStressTestPaddedStrings() {
  int size = 64000;
  // A set used to track collision rate.
  Set<Long> hashcodes = new HashSet<>();
  for (int i = 0; i < size; i++) {
   int byteArrSize = 8;
   byte[] strBytes = String.valueOf(i).getBytes(StandardCharsets.UTF_8);
   byte[] paddedBytes = new byte[byteArrSize];
   System.arraycopy(strBytes, 0, paddedBytes, 0, strBytes.length);

   Assert.assertEquals(
       hasher.hashUnsafeWords(paddedBytes, Platform.BYTE_ARRAY_OFFSET, byteArrSize),
       hasher.hashUnsafeWords(paddedBytes, Platform.BYTE_ARRAY_OFFSET, byteArrSize));

   hashcodes.add(hasher.hashUnsafeWords(
       paddedBytes, Platform.BYTE_ARRAY_OFFSET, byteArrSize));
  }

  // A very loose bound.
  Assert.assertTrue(hashcodes.size() > size * 0.95d);
 }
}

26 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com