Java 类org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope 实例源码

项目:ditb    文件:WALKey.java   
public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.Builder getBuilder(
    WALCellCodec.ByteStringCompressor compressor) throws IOException {
  org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.Builder builder =
      org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.newBuilder();
  if (compressionContext == null) {
    builder.setEncodedRegionName(ByteStringer.wrap(this.encodedRegionName));
    builder.setTableName(ByteStringer.wrap(this.tablename.getName()));
  } else {
    builder.setEncodedRegionName(compressor.compress(this.encodedRegionName,
        compressionContext.regionDict));
    builder.setTableName(compressor.compress(this.tablename.getName(),
        compressionContext.tableDict));
  }
  builder.setLogSequenceNumber(this.logSeqNum);
  builder.setWriteTime(writeTime);
  if (this.origLogSeqNum > 0) {
    builder.setOrigSequenceNumber(this.origLogSeqNum);
  }
  if (this.nonce != HConstants.NO_NONCE) {
    builder.setNonce(nonce);
  }
  if (this.nonceGroup != HConstants.NO_NONCE) {
    builder.setNonceGroup(nonceGroup);
  }
  HBaseProtos.UUID.Builder uuidBuilder = HBaseProtos.UUID.newBuilder();
  for (UUID clusterId : clusterIds) {
    uuidBuilder.setLeastSigBits(clusterId.getLeastSignificantBits());
    uuidBuilder.setMostSigBits(clusterId.getMostSignificantBits());
    builder.addClusterIds(uuidBuilder.build());
  }
  if (scopes != null) {
    for (Map.Entry<byte[], Integer> e : scopes.entrySet()) {
      ByteString family = (compressionContext == null) ? ByteStringer.wrap(e.getKey())
          : compressor.compress(e.getKey(), compressionContext.familyDict);
      builder.addScopes(FamilyScope.newBuilder()
          .setFamily(family).setScopeType(ScopeType.valueOf(e.getValue())));
    }
  }
  return builder;
}
项目:pbase    文件:WALKey.java   
public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.Builder getBuilder(WALCellCodec.ByteStringCompressor compressor)
throws IOException {
  org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.Builder builder = org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.newBuilder();
  if (compressionContext == null) {
    builder.setEncodedRegionName(ByteStringer.wrap(this.encodedRegionName));
    builder.setTableName(ByteStringer.wrap(this.tablename.getName()));
  } else {
    builder.setEncodedRegionName(compressor.compress(this.encodedRegionName,
      compressionContext.regionDict));
    builder.setTableName(compressor.compress(this.tablename.getName(),
      compressionContext.tableDict));
  }
  builder.setLogSequenceNumber(this.logSeqNum);
  builder.setWriteTime(writeTime);
  if(this.origLogSeqNum > 0) {
    builder.setOrigSequenceNumber(this.origLogSeqNum);
  }
  if (this.nonce != HConstants.NO_NONCE) {
    builder.setNonce(nonce);
  }
  if (this.nonceGroup != HConstants.NO_NONCE) {
    builder.setNonceGroup(nonceGroup);
  }
  HBaseProtos.UUID.Builder uuidBuilder = HBaseProtos.UUID.newBuilder();
  for (UUID clusterId : clusterIds) {
    uuidBuilder.setLeastSigBits(clusterId.getLeastSignificantBits());
    uuidBuilder.setMostSigBits(clusterId.getMostSignificantBits());
    builder.addClusterIds(uuidBuilder.build());
  }
  if (scopes != null) {
    for (Map.Entry<byte[], Integer> e : scopes.entrySet()) {
      ByteString family = (compressionContext == null) ? ByteStringer.wrap(e.getKey())
          : compressor.compress(e.getKey(), compressionContext.familyDict);
      builder.addScopes(FamilyScope.newBuilder()
          .setFamily(family).setScopeType(ScopeType.valueOf(e.getValue())));
    }
  }
  return builder;
}
项目:HIndex    文件:HLogKey.java   
public WALKey.Builder getBuilder(
    WALCellCodec.ByteStringCompressor compressor) throws IOException {
  WALKey.Builder builder = WALKey.newBuilder();
  if (compressionContext == null) {
    builder.setEncodedRegionName(HBaseZeroCopyByteString.wrap(this.encodedRegionName));
    builder.setTableName(HBaseZeroCopyByteString.wrap(this.tablename.getName()));
  } else {
    builder.setEncodedRegionName(
        compressor.compress(this.encodedRegionName, compressionContext.regionDict));
    builder.setTableName(compressor.compress(this.tablename.getName(),
        compressionContext.tableDict));
  }
  builder.setLogSequenceNumber(this.logSeqNum);
  builder.setWriteTime(writeTime);
  if (this.nonce != HConstants.NO_NONCE) {
    builder.setNonce(nonce);
  }
  if (this.nonceGroup != HConstants.NO_NONCE) {
    builder.setNonceGroup(nonceGroup);
  }
  HBaseProtos.UUID.Builder uuidBuilder = HBaseProtos.UUID.newBuilder();
  for (UUID clusterId : clusterIds) {
    uuidBuilder.setLeastSigBits(clusterId.getLeastSignificantBits());
    uuidBuilder.setMostSigBits(clusterId.getMostSignificantBits());
    builder.addClusterIds(uuidBuilder.build());
  }
  if (scopes != null) {
    for (Map.Entry<byte[], Integer> e : scopes.entrySet()) {
      ByteString family = (compressionContext == null) ? HBaseZeroCopyByteString.wrap(e.getKey())
          : compressor.compress(e.getKey(), compressionContext.familyDict);
      builder.addScopes(FamilyScope.newBuilder()
          .setFamily(family).setScopeType(ScopeType.valueOf(e.getValue())));
    }
  }
  return builder;
}
项目:HIndex    文件:HLogKey.java   
public void readFieldsFromPb(
    WALKey walKey, WALCellCodec.ByteStringUncompressor uncompressor) throws IOException {
  if (this.compressionContext != null) {
    this.encodedRegionName = uncompressor.uncompress(
        walKey.getEncodedRegionName(), compressionContext.regionDict);
    byte[] tablenameBytes = uncompressor.uncompress(
        walKey.getTableName(), compressionContext.tableDict);
    this.tablename = TableName.valueOf(tablenameBytes);
  } else {
    this.encodedRegionName = walKey.getEncodedRegionName().toByteArray();
    this.tablename = TableName.valueOf(walKey.getTableName().toByteArray());
  }
  clusterIds.clear();
  if (walKey.hasClusterId()) {
    //When we are reading the older log (0.95.1 release)
    //This is definitely the originating cluster
    clusterIds.add(new UUID(walKey.getClusterId().getMostSigBits(), walKey.getClusterId()
        .getLeastSigBits()));
  }
  for (HBaseProtos.UUID clusterId : walKey.getClusterIdsList()) {
    clusterIds.add(new UUID(clusterId.getMostSigBits(), clusterId.getLeastSigBits()));
  }
  if (walKey.hasNonceGroup()) {
    this.nonceGroup = walKey.getNonceGroup();
  }
  if (walKey.hasNonce()) {
    this.nonce = walKey.getNonce();
  }
  this.scopes = null;
  if (walKey.getScopesCount() > 0) {
    this.scopes = new TreeMap<byte[], Integer>(Bytes.BYTES_COMPARATOR);
    for (FamilyScope scope : walKey.getScopesList()) {
      byte[] family = (compressionContext == null) ? scope.getFamily().toByteArray() :
        uncompressor.uncompress(scope.getFamily(), compressionContext.familyDict);
      this.scopes.put(family, scope.getScopeType().getNumber());
    }
  }
  this.logSeqNum = walKey.getLogSequenceNumber();
  this.writeTime = walKey.getWriteTime();
}
项目:PyroDB    文件:HLogKey.java   
public WALKey.Builder getBuilder(WALCellCodec.ByteStringCompressor compressor)
throws IOException {
  WALKey.Builder builder = WALKey.newBuilder();
  if (compressionContext == null) {
    builder.setEncodedRegionName(HBaseZeroCopyByteString.wrap(this.encodedRegionName));
    builder.setTableName(HBaseZeroCopyByteString.wrap(this.tablename.getName()));
  } else {
    builder.setEncodedRegionName(compressor.compress(this.encodedRegionName,
      compressionContext.regionDict));
    builder.setTableName(compressor.compress(this.tablename.getName(),
      compressionContext.tableDict));
  }
  builder.setLogSequenceNumber(this.logSeqNum);
  builder.setWriteTime(writeTime);
  if (this.nonce != HConstants.NO_NONCE) {
    builder.setNonce(nonce);
  }
  if (this.nonceGroup != HConstants.NO_NONCE) {
    builder.setNonceGroup(nonceGroup);
  }
  HBaseProtos.UUID.Builder uuidBuilder = HBaseProtos.UUID.newBuilder();
  for (UUID clusterId : clusterIds) {
    uuidBuilder.setLeastSigBits(clusterId.getLeastSignificantBits());
    uuidBuilder.setMostSigBits(clusterId.getMostSignificantBits());
    builder.addClusterIds(uuidBuilder.build());
  }
  if (scopes != null) {
    for (Map.Entry<byte[], Integer> e : scopes.entrySet()) {
      ByteString family = (compressionContext == null) ?
          HBaseZeroCopyByteString.wrap(e.getKey())
          : compressor.compress(e.getKey(), compressionContext.familyDict);
      builder.addScopes(FamilyScope.newBuilder()
          .setFamily(family).setScopeType(ScopeType.valueOf(e.getValue())));
    }
  }
  return builder;
}
项目:PyroDB    文件:HLogKey.java   
public void readFieldsFromPb(
    WALKey walKey, WALCellCodec.ByteStringUncompressor uncompressor) throws IOException {
  if (this.compressionContext != null) {
    this.encodedRegionName = uncompressor.uncompress(
        walKey.getEncodedRegionName(), compressionContext.regionDict);
    byte[] tablenameBytes = uncompressor.uncompress(
        walKey.getTableName(), compressionContext.tableDict);
    this.tablename = TableName.valueOf(tablenameBytes);
  } else {
    this.encodedRegionName = walKey.getEncodedRegionName().toByteArray();
    this.tablename = TableName.valueOf(walKey.getTableName().toByteArray());
  }
  clusterIds.clear();
  if (walKey.hasClusterId()) {
    //When we are reading the older log (0.95.1 release)
    //This is definitely the originating cluster
    clusterIds.add(new UUID(walKey.getClusterId().getMostSigBits(), walKey.getClusterId()
        .getLeastSigBits()));
  }
  for (HBaseProtos.UUID clusterId : walKey.getClusterIdsList()) {
    clusterIds.add(new UUID(clusterId.getMostSigBits(), clusterId.getLeastSigBits()));
  }
  if (walKey.hasNonceGroup()) {
    this.nonceGroup = walKey.getNonceGroup();
  }
  if (walKey.hasNonce()) {
    this.nonce = walKey.getNonce();
  }
  this.scopes = null;
  if (walKey.getScopesCount() > 0) {
    this.scopes = new TreeMap<byte[], Integer>(Bytes.BYTES_COMPARATOR);
    for (FamilyScope scope : walKey.getScopesList()) {
      byte[] family = (compressionContext == null) ? scope.getFamily().toByteArray() :
        uncompressor.uncompress(scope.getFamily(), compressionContext.familyDict);
      this.scopes.put(family, scope.getScopeType().getNumber());
    }
  }
  this.logSeqNum = walKey.getLogSequenceNumber();
  this.writeTime = walKey.getWriteTime();
}
项目:c5    文件:HLogKey.java   
public WALKey.Builder getBuilder(
    WALCellCodec.ByteStringCompressor compressor) throws IOException {
  WALKey.Builder builder = WALKey.newBuilder();
  if (compressionContext == null) {
    builder.setEncodedRegionName(ZeroCopyLiteralByteString.wrap(this.encodedRegionName));
    builder.setTableName(ZeroCopyLiteralByteString.wrap(this.tablename.getName()));
  } else {
    builder.setEncodedRegionName(
        compressor.compress(this.encodedRegionName, compressionContext.regionDict));
    builder.setTableName(compressor.compress(this.tablename.getName(),
        compressionContext.tableDict));
  }
  builder.setLogSequenceNumber(this.logSeqNum);
  builder.setWriteTime(writeTime);
  HBaseProtos.UUID.Builder uuidBuilder = HBaseProtos.UUID.newBuilder();
  for (UUID clusterId : clusterIds) {
    uuidBuilder.setLeastSigBits(clusterId.getLeastSignificantBits());
    uuidBuilder.setMostSigBits(clusterId.getMostSignificantBits());
    builder.addClusterIds(uuidBuilder.build());
  }
  if (scopes != null) {
    for (Map.Entry<byte[], Integer> e : scopes.entrySet()) {
      ByteString family = (compressionContext == null) ? ZeroCopyLiteralByteString.wrap(e.getKey())
          : compressor.compress(e.getKey(), compressionContext.familyDict);
      builder.addScopes(FamilyScope.newBuilder()
          .setFamily(family).setScopeType(ScopeType.valueOf(e.getValue())));
    }
  }
  return builder;
}
项目:c5    文件:HLogKey.java   
public void readFieldsFromPb(
    WALKey walKey, WALCellCodec.ByteStringUncompressor uncompressor) throws IOException {
  if (this.compressionContext != null) {
    this.encodedRegionName = uncompressor.uncompress(
        walKey.getEncodedRegionName(), compressionContext.regionDict);
    byte[] tablenameBytes = uncompressor.uncompress(
        walKey.getTableName(), compressionContext.tableDict);
    this.tablename = TableName.valueOf(tablenameBytes);
  } else {
    this.encodedRegionName = walKey.getEncodedRegionName().toByteArray();
    this.tablename = TableName.valueOf(walKey.getTableName().toByteArray());
  }
  clusterIds.clear();
  if (walKey.hasClusterId()) {
    //When we are reading the older log (0.95.1 release)
    //This is definitely the originating cluster
    clusterIds.add(new UUID(walKey.getClusterId().getMostSigBits(), walKey.getClusterId()
        .getLeastSigBits()));
  }
  for (HBaseProtos.UUID clusterId : walKey.getClusterIdsList()) {
    clusterIds.add(new UUID(clusterId.getMostSigBits(), clusterId.getLeastSigBits()));
  }
  this.scopes = null;
  if (walKey.getScopesCount() > 0) {
    this.scopes = new TreeMap<byte[], Integer>(Bytes.BYTES_COMPARATOR);
    for (FamilyScope scope : walKey.getScopesList()) {
      byte[] family = (compressionContext == null) ? scope.getFamily().toByteArray() :
        uncompressor.uncompress(scope.getFamily(), compressionContext.familyDict);
      this.scopes.put(family, scope.getScopeType().getNumber());
    }
  }
  this.logSeqNum = walKey.getLogSequenceNumber();
  this.writeTime = walKey.getWriteTime();
}
项目:ditb    文件:WALKey.java   
public void readFieldsFromPb(org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey walKey,
                             WALCellCodec.ByteStringUncompressor uncompressor)
    throws IOException {
  if (this.compressionContext != null) {
    this.encodedRegionName = uncompressor.uncompress(
        walKey.getEncodedRegionName(), compressionContext.regionDict);
    byte[] tablenameBytes = uncompressor.uncompress(
        walKey.getTableName(), compressionContext.tableDict);
    this.tablename = TableName.valueOf(tablenameBytes);
  } else {
    this.encodedRegionName = walKey.getEncodedRegionName().toByteArray();
    this.tablename = TableName.valueOf(walKey.getTableName().toByteArray());
  }
  clusterIds.clear();
  if (walKey.hasClusterId()) {
    //When we are reading the older log (0.95.1 release)
    //This is definitely the originating cluster
    clusterIds.add(new UUID(walKey.getClusterId().getMostSigBits(), walKey.getClusterId()
        .getLeastSigBits()));
  }
  for (HBaseProtos.UUID clusterId : walKey.getClusterIdsList()) {
    clusterIds.add(new UUID(clusterId.getMostSigBits(), clusterId.getLeastSigBits()));
  }
  if (walKey.hasNonceGroup()) {
    this.nonceGroup = walKey.getNonceGroup();
  }
  if (walKey.hasNonce()) {
    this.nonce = walKey.getNonce();
  }
  this.scopes = null;
  if (walKey.getScopesCount() > 0) {
    this.scopes = new TreeMap<byte[], Integer>(Bytes.BYTES_COMPARATOR);
    for (FamilyScope scope : walKey.getScopesList()) {
      byte[] family = (compressionContext == null) ? scope.getFamily().toByteArray() :
        uncompressor.uncompress(scope.getFamily(), compressionContext.familyDict);
      this.scopes.put(family, scope.getScopeType().getNumber());
    }
  }
  this.logSeqNum = walKey.getLogSequenceNumber();
  this.writeTime = walKey.getWriteTime();
  if(walKey.hasOrigSequenceNumber()) {
    this.origLogSeqNum = walKey.getOrigSequenceNumber();
  }
}
项目:pbase    文件:WALKey.java   
public void readFieldsFromPb(
    org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey walKey, WALCellCodec.ByteStringUncompressor uncompressor) throws IOException {
  if (this.compressionContext != null) {
    this.encodedRegionName = uncompressor.uncompress(
        walKey.getEncodedRegionName(), compressionContext.regionDict);
    byte[] tablenameBytes = uncompressor.uncompress(
        walKey.getTableName(), compressionContext.tableDict);
    this.tablename = TableName.valueOf(tablenameBytes);
  } else {
    this.encodedRegionName = walKey.getEncodedRegionName().toByteArray();
    this.tablename = TableName.valueOf(walKey.getTableName().toByteArray());
  }
  clusterIds.clear();
  if (walKey.hasClusterId()) {
    //When we are reading the older log (0.95.1 release)
    //This is definitely the originating cluster
    clusterIds.add(new UUID(walKey.getClusterId().getMostSigBits(), walKey.getClusterId()
        .getLeastSigBits()));
  }
  for (HBaseProtos.UUID clusterId : walKey.getClusterIdsList()) {
    clusterIds.add(new UUID(clusterId.getMostSigBits(), clusterId.getLeastSigBits()));
  }
  if (walKey.hasNonceGroup()) {
    this.nonceGroup = walKey.getNonceGroup();
  }
  if (walKey.hasNonce()) {
    this.nonce = walKey.getNonce();
  }
  this.scopes = null;
  if (walKey.getScopesCount() > 0) {
    this.scopes = new TreeMap<byte[], Integer>(Bytes.BYTES_COMPARATOR);
    for (FamilyScope scope : walKey.getScopesList()) {
      byte[] family = (compressionContext == null) ? scope.getFamily().toByteArray() :
        uncompressor.uncompress(scope.getFamily(), compressionContext.familyDict);
      this.scopes.put(family, scope.getScopeType().getNumber());
    }
  }
  this.logSeqNum = walKey.getLogSequenceNumber();
  this.writeTime = walKey.getWriteTime();
  if(walKey.hasOrigSequenceNumber()) {
    this.origLogSeqNum = walKey.getOrigSequenceNumber();
  }
}