Java 类org.apache.hadoop.hbase.codec.prefixtree.encode.tokenize.TokenizerNode 实例源码

项目:ditb    文件:RowNodeWriter.java   
public void reset(TokenizerNode node) {
  this.blockMeta = prefixTreeEncoder.getBlockMeta();// changes between blocks
  this.tokenizerNode = node;
  this.tokenWidth = 0;
  this.fanOut = 0;
  this.numCells = 0;
  this.width = 0;
  calculateOffsetsAndLengths();
}
项目:ditb    文件:RowNodeWriter.java   
/**
 * UVInt: numFanBytes/fanOut
 * bytes: each fan byte
 */
public void writeFan(OutputStream os) throws IOException {
  UVIntTool.writeBytes(fanOut, os);
  if (fanOut <= 0) {
    return;
  }
  ArrayList<TokenizerNode> children = tokenizerNode.getChildren();
  for (int i = 0; i < children.size(); ++i) {
    TokenizerNode child = children.get(i);
    os.write(child.getToken().get(0));// first byte of each child's token
  }
}
项目:ditb    文件:RowNodeWriter.java   
/**
 * If a branch or a nub, the last thing we append are the UFInt offsets to the child row nodes.
 */
protected void writeNextRowTrieNodeOffsets(OutputStream os) throws IOException {
  ArrayList<TokenizerNode> children = tokenizerNode.getChildren();
  for (int i = 0; i < children.size(); ++i) {
    TokenizerNode child = children.get(i);
    int distanceToChild = tokenizerNode.getNegativeIndex() - child.getNegativeIndex();
    UFIntTool.writeBytes(blockMeta.getNextNodeOffsetWidth(), distanceToChild, os);
  }
}
项目:ditb    文件:RowSectionWriter.java   
protected RowNodeWriter initializeWriter(List<RowNodeWriter> list, int index,
    TokenizerNode builderNode) {
  RowNodeWriter rowNodeWriter = null;
  //check if there is an existing node we can recycle
  if (index >= list.size()) {
    //there are not enough existing nodes, so add a new one which will be retrieved below
    list.add(new RowNodeWriter(prefixTreeEncoder, builderNode));
  }
  rowNodeWriter = list.get(index);
  rowNodeWriter.reset(builderNode);
  return rowNodeWriter;
}
项目:ditb    文件:RowSectionWriter.java   
/***************** static ******************************/

  protected static ArrayList<TokenizerNode> filterByLeafAndReverse(
      ArrayList<TokenizerNode> ins, boolean leaves) {
    ArrayList<TokenizerNode> outs = Lists.newArrayList();
    for (int i = ins.size() - 1; i >= 0; --i) {
      TokenizerNode n = ins.get(i);
      if (n.isLeaf() && leaves || (!n.isLeaf() && !leaves)) {
        outs.add(ins.get(i));
      }
    }
    return outs;
  }
项目:ditb    文件:ColumnNodeWriter.java   
/*************** construct **************************/

  public ColumnNodeWriter(PrefixTreeBlockMeta blockMeta, TokenizerNode builderNode,
      ColumnNodeType nodeType) {
    this.blockMeta = blockMeta;
    this.builderNode = builderNode;
    this.nodeType = nodeType;
    calculateTokenLength();
  }
项目:ditb    文件:TestTokenizer.java   
@Test
public void testSearching() {
  for (byte[] input : inputs) {
    TokenizerRowSearchResult resultHolder = new TokenizerRowSearchResult();
    builder.getNode(resultHolder, input, 0, input.length);
    TokenizerNode n = resultHolder.getMatchingNode();
    byte[] output = n.getNewByteArray();
    Assert.assertTrue(Bytes.equals(input, output));
  }
}
项目:pbase    文件:RowNodeWriter.java   
public void reset(TokenizerNode node) {
  this.blockMeta = prefixTreeEncoder.getBlockMeta();// changes between blocks
  this.tokenizerNode = node;
  this.tokenWidth = 0;
  this.fanOut = 0;
  this.numCells = 0;
  this.width = 0;
  calculateOffsetsAndLengths();
}
项目:pbase    文件:RowNodeWriter.java   
/**
 * UVInt: numFanBytes/fanOut
 * bytes: each fan byte
 */
public void writeFan(OutputStream os) throws IOException {
  UVIntTool.writeBytes(fanOut, os);
  if (fanOut <= 0) {
    return;
  }
  ArrayList<TokenizerNode> children = tokenizerNode.getChildren();
  for (int i = 0; i < children.size(); ++i) {
    TokenizerNode child = children.get(i);
    os.write(child.getToken().get(0));// first byte of each child's token
  }
}
项目:pbase    文件:RowNodeWriter.java   
/**
 * If a branch or a nub, the last thing we append are the UFInt offsets to the child row nodes.
 */
protected void writeNextRowTrieNodeOffsets(OutputStream os) throws IOException {
  ArrayList<TokenizerNode> children = tokenizerNode.getChildren();
  for (int i = 0; i < children.size(); ++i) {
    TokenizerNode child = children.get(i);
    int distanceToChild = tokenizerNode.getNegativeIndex() - child.getNegativeIndex();
    UFIntTool.writeBytes(blockMeta.getNextNodeOffsetWidth(), distanceToChild, os);
  }
}
项目:pbase    文件:RowSectionWriter.java   
protected RowNodeWriter initializeWriter(List<RowNodeWriter> list, int index,
    TokenizerNode builderNode) {
  RowNodeWriter rowNodeWriter = null;
  //check if there is an existing node we can recycle
  if (index >= list.size()) {
    //there are not enough existing nodes, so add a new one which will be retrieved below
    list.add(new RowNodeWriter(prefixTreeEncoder, builderNode));
  }
  rowNodeWriter = list.get(index);
  rowNodeWriter.reset(builderNode);
  return rowNodeWriter;
}
项目:pbase    文件:RowSectionWriter.java   
/***************** static ******************************/

  protected static ArrayList<TokenizerNode> filterByLeafAndReverse(
      ArrayList<TokenizerNode> ins, boolean leaves) {
    ArrayList<TokenizerNode> outs = Lists.newArrayList();
    for (int i = ins.size() - 1; i >= 0; --i) {
      TokenizerNode n = ins.get(i);
      if (n.isLeaf() && leaves || (!n.isLeaf() && !leaves)) {
        outs.add(ins.get(i));
      }
    }
    return outs;
  }
项目:pbase    文件:ColumnNodeWriter.java   
/*************** construct **************************/

  public ColumnNodeWriter(PrefixTreeBlockMeta blockMeta, TokenizerNode builderNode,
      ColumnNodeType nodeType) {
    this.blockMeta = blockMeta;
    this.builderNode = builderNode;
    this.nodeType = nodeType;
    calculateTokenLength();
  }
项目:pbase    文件:TestTokenizer.java   
@Test
public void testSearching() {
  for (byte[] input : inputs) {
    TokenizerRowSearchResult resultHolder = new TokenizerRowSearchResult();
    builder.getNode(resultHolder, input, 0, input.length);
    TokenizerNode n = resultHolder.getMatchingNode();
    byte[] output = n.getNewByteArray();
    Assert.assertTrue(Bytes.equals(input, output));
  }
}
项目:HIndex    文件:RowNodeWriter.java   
public void reset(TokenizerNode node) {
  this.blockMeta = prefixTreeEncoder.getBlockMeta();// changes between blocks
  this.tokenizerNode = node;
  this.tokenWidth = 0;
  this.fanOut = 0;
  this.numCells = 0;
  this.width = 0;
  calculateOffsetsAndLengths();
}
项目:HIndex    文件:RowNodeWriter.java   
/**
 * UVInt: numFanBytes/fanOut
 * bytes: each fan byte
 */
public void writeFan(OutputStream os) throws IOException {
  UVIntTool.writeBytes(fanOut, os);
  if (fanOut <= 0) {
    return;
  }
  ArrayList<TokenizerNode> children = tokenizerNode.getChildren();
  for (int i = 0; i < children.size(); ++i) {
    TokenizerNode child = children.get(i);
    os.write(child.getToken().get(0));// first byte of each child's token
  }
}
项目:HIndex    文件:RowNodeWriter.java   
/**
 * If a branch or a nub, the last thing we append are the UFInt offsets to the child row nodes.
 */
protected void writeNextRowTrieNodeOffsets(OutputStream os) throws IOException {
  ArrayList<TokenizerNode> children = tokenizerNode.getChildren();
  for (int i = 0; i < children.size(); ++i) {
    TokenizerNode child = children.get(i);
    int distanceToChild = tokenizerNode.getNegativeIndex() - child.getNegativeIndex();
    UFIntTool.writeBytes(blockMeta.getNextNodeOffsetWidth(), distanceToChild, os);
  }
}
项目:HIndex    文件:RowSectionWriter.java   
protected RowNodeWriter initializeWriter(List<RowNodeWriter> list, int index,
    TokenizerNode builderNode) {
  RowNodeWriter rowNodeWriter = null;
  //check if there is an existing node we can recycle
  if (index >= list.size()) {
    //there are not enough existing nodes, so add a new one which will be retrieved below
    list.add(new RowNodeWriter(prefixTreeEncoder, builderNode));
  }
  rowNodeWriter = list.get(index);
  rowNodeWriter.reset(builderNode);
  return rowNodeWriter;
}
项目:HIndex    文件:RowSectionWriter.java   
/***************** static ******************************/

  protected static ArrayList<TokenizerNode> filterByLeafAndReverse(
      ArrayList<TokenizerNode> ins, boolean leaves) {
    ArrayList<TokenizerNode> outs = Lists.newArrayList();
    for (int i = ins.size() - 1; i >= 0; --i) {
      TokenizerNode n = ins.get(i);
      if (n.isLeaf() && leaves || (!n.isLeaf() && !leaves)) {
        outs.add(ins.get(i));
      }
    }
    return outs;
  }
项目:HIndex    文件:ColumnNodeWriter.java   
/*************** construct **************************/

  public ColumnNodeWriter(PrefixTreeBlockMeta blockMeta, TokenizerNode builderNode,
      ColumnNodeType nodeType) {
    this.blockMeta = blockMeta;
    this.builderNode = builderNode;
    this.nodeType = nodeType;
    calculateTokenLength();
  }
项目:HIndex    文件:TestTokenizer.java   
@Test
public void testSearching() {
  for (byte[] input : inputs) {
    TokenizerRowSearchResult resultHolder = new TokenizerRowSearchResult();
    builder.getNode(resultHolder, input, 0, input.length);
    TokenizerNode n = resultHolder.getMatchingNode();
    byte[] output = n.getNewByteArray();
    Assert.assertTrue(Bytes.equals(input, output));
  }
}
项目:PyroDB    文件:RowNodeWriter.java   
public void reset(TokenizerNode node) {
  this.blockMeta = prefixTreeEncoder.getBlockMeta();// changes between blocks
  this.tokenizerNode = node;
  this.tokenWidth = 0;
  this.fanOut = 0;
  this.numCells = 0;
  this.width = 0;
  calculateOffsetsAndLengths();
}
项目:PyroDB    文件:RowNodeWriter.java   
/**
 * UVInt: numFanBytes/fanOut
 * bytes: each fan byte
 */
public void writeFan(OutputStream os) throws IOException {
  UVIntTool.writeBytes(fanOut, os);
  if (fanOut <= 0) {
    return;
  }
  ArrayList<TokenizerNode> children = tokenizerNode.getChildren();
  for (int i = 0; i < children.size(); ++i) {
    TokenizerNode child = children.get(i);
    os.write(child.getToken().get(0));// first byte of each child's token
  }
}
项目:PyroDB    文件:RowNodeWriter.java   
/**
 * If a branch or a nub, the last thing we append are the UFInt offsets to the child row nodes.
 */
protected void writeNextRowTrieNodeOffsets(OutputStream os) throws IOException {
  ArrayList<TokenizerNode> children = tokenizerNode.getChildren();
  for (int i = 0; i < children.size(); ++i) {
    TokenizerNode child = children.get(i);
    int distanceToChild = tokenizerNode.getNegativeIndex() - child.getNegativeIndex();
    UFIntTool.writeBytes(blockMeta.getNextNodeOffsetWidth(), distanceToChild, os);
  }
}
项目:PyroDB    文件:RowSectionWriter.java   
protected RowNodeWriter initializeWriter(List<RowNodeWriter> list, int index,
    TokenizerNode builderNode) {
  RowNodeWriter rowNodeWriter = null;
  //check if there is an existing node we can recycle
  if (index >= list.size()) {
    //there are not enough existing nodes, so add a new one which will be retrieved below
    list.add(new RowNodeWriter(prefixTreeEncoder, builderNode));
  }
  rowNodeWriter = list.get(index);
  rowNodeWriter.reset(builderNode);
  return rowNodeWriter;
}
项目:PyroDB    文件:RowSectionWriter.java   
/***************** static ******************************/

  protected static ArrayList<TokenizerNode> filterByLeafAndReverse(
      ArrayList<TokenizerNode> ins, boolean leaves) {
    ArrayList<TokenizerNode> outs = Lists.newArrayList();
    for (int i = ins.size() - 1; i >= 0; --i) {
      TokenizerNode n = ins.get(i);
      if (n.isLeaf() && leaves || (!n.isLeaf() && !leaves)) {
        outs.add(ins.get(i));
      }
    }
    return outs;
  }
项目:PyroDB    文件:ColumnNodeWriter.java   
/*************** construct **************************/

  public ColumnNodeWriter(PrefixTreeBlockMeta blockMeta, TokenizerNode builderNode,
      ColumnNodeType nodeType) {
    this.blockMeta = blockMeta;
    this.builderNode = builderNode;
    this.nodeType = nodeType;
    calculateTokenLength();
  }
项目:PyroDB    文件:TestTokenizer.java   
@Test
public void testSearching() {
  for (byte[] input : inputs) {
    TokenizerRowSearchResult resultHolder = new TokenizerRowSearchResult();
    builder.getNode(resultHolder, input, 0, input.length);
    TokenizerNode n = resultHolder.getMatchingNode();
    byte[] output = n.getNewByteArray();
    Assert.assertTrue(Bytes.equals(input, output));
  }
}
项目:c5    文件:RowNodeWriter.java   
public void reset(TokenizerNode node) {
  this.blockMeta = prefixTreeEncoder.getBlockMeta();// changes between blocks
  this.tokenizerNode = node;
  this.tokenWidth = 0;
  this.fanOut = 0;
  this.numCells = 0;
  this.width = 0;
  calculateOffsetsAndLengths();
}
项目:c5    文件:RowNodeWriter.java   
/**
 * UVInt: numFanBytes/fanOut
 * bytes: each fan byte
 */
public void writeFan(OutputStream os) throws IOException {
  UVIntTool.writeBytes(fanOut, os);
  if (fanOut <= 0) {
    return;
  }
  ArrayList<TokenizerNode> children = tokenizerNode.getChildren();
  for (int i = 0; i < children.size(); ++i) {
    TokenizerNode child = children.get(i);
    os.write(child.getToken().get(0));// first byte of each child's token
  }
}
项目:c5    文件:RowNodeWriter.java   
/**
 * If a branch or a nub, the last thing we append are the UFInt offsets to the child row nodes.
 */
protected void writeNextRowTrieNodeOffsets(OutputStream os) throws IOException {
  ArrayList<TokenizerNode> children = tokenizerNode.getChildren();
  for (int i = 0; i < children.size(); ++i) {
    TokenizerNode child = children.get(i);
    int distanceToChild = tokenizerNode.getNegativeIndex() - child.getNegativeIndex();
    UFIntTool.writeBytes(blockMeta.getNextNodeOffsetWidth(), distanceToChild, os);
  }
}
项目:c5    文件:RowSectionWriter.java   
protected RowNodeWriter initializeWriter(List<RowNodeWriter> list, int index,
    TokenizerNode builderNode) {
  RowNodeWriter rowNodeWriter = null;
  //check if there is an existing node we can recycle
  if (index >= list.size()) {
    //there are not enough existing nodes, so add a new one which will be retrieved below
    list.add(new RowNodeWriter(prefixTreeEncoder, builderNode));
  }
  rowNodeWriter = list.get(index);
  rowNodeWriter.reset(builderNode);
  return rowNodeWriter;
}
项目:c5    文件:RowSectionWriter.java   
/***************** static ******************************/

  protected static ArrayList<TokenizerNode> filterByLeafAndReverse(
      ArrayList<TokenizerNode> ins, boolean leaves) {
    ArrayList<TokenizerNode> outs = Lists.newArrayList();
    for (int i = ins.size() - 1; i >= 0; --i) {
      TokenizerNode n = ins.get(i);
      if (n.isLeaf() && leaves || (!n.isLeaf() && !leaves)) {
        outs.add(ins.get(i));
      }
    }
    return outs;
  }
项目:c5    文件:ColumnNodeWriter.java   
/*************** construct **************************/

  public ColumnNodeWriter(PrefixTreeBlockMeta blockMeta, TokenizerNode builderNode,
      boolean familyVsQualifier) {
    this.blockMeta = blockMeta;
    this.builderNode = builderNode;
    this.familyVsQualifier = familyVsQualifier;
    calculateTokenLength();
  }
项目:c5    文件:TestTokenizer.java   
@Test
public void testSearching() {
  for (byte[] input : inputs) {
    TokenizerRowSearchResult resultHolder = new TokenizerRowSearchResult();
    builder.getNode(resultHolder, input, 0, input.length);
    TokenizerNode n = resultHolder.getMatchingNode();
    byte[] output = n.getNewByteArray();
    Assert.assertTrue(Bytes.equals(input, output));
  }
}
项目:ditb    文件:RowNodeWriter.java   
/*********************** construct *************************/

  public RowNodeWriter(PrefixTreeEncoder keyValueBuilder, TokenizerNode tokenizerNode) {
    reconstruct(keyValueBuilder, tokenizerNode);
  }
项目:ditb    文件:RowNodeWriter.java   
public void reconstruct(PrefixTreeEncoder prefixTreeEncoder, TokenizerNode tokenizerNode) {
  this.prefixTreeEncoder = prefixTreeEncoder;
  reset(tokenizerNode);
}
项目:ditb    文件:RowSectionWriter.java   
public ArrayList<TokenizerNode> getNonLeaves() {
  return nonLeaves;
}
项目:ditb    文件:RowSectionWriter.java   
public ArrayList<TokenizerNode> getLeaves() {
  return leaves;
}
项目:ditb    文件:ColumnSectionWriter.java   
public ArrayList<TokenizerNode> getNonLeaves() {
  return nonLeaves;
}