|
||||||||||
PREV NEXT | FRAMES NO FRAMES |
Packages that use TokenizerNode | |
---|---|
org.apache.hadoop.hbase.codec.prefixtree.encode.column | |
org.apache.hadoop.hbase.codec.prefixtree.encode.row | |
org.apache.hadoop.hbase.codec.prefixtree.encode.tokenize |
Uses of TokenizerNode in org.apache.hadoop.hbase.codec.prefixtree.encode.column |
---|
Fields in org.apache.hadoop.hbase.codec.prefixtree.encode.column declared as TokenizerNode | |
---|---|
protected TokenizerNode |
ColumnNodeWriter.builderNode
fields |
Methods in org.apache.hadoop.hbase.codec.prefixtree.encode.column that return types with arguments of type TokenizerNode | |
---|---|
ArrayList<TokenizerNode> |
ColumnSectionWriter.getLeaves()
|
ArrayList<TokenizerNode> |
ColumnSectionWriter.getNonLeaves()
|
Constructors in org.apache.hadoop.hbase.codec.prefixtree.encode.column with parameters of type TokenizerNode | |
---|---|
ColumnNodeWriter(PrefixTreeBlockMeta blockMeta,
TokenizerNode builderNode,
ColumnNodeType nodeType)
construct |
Uses of TokenizerNode in org.apache.hadoop.hbase.codec.prefixtree.encode.row |
---|
Fields in org.apache.hadoop.hbase.codec.prefixtree.encode.row declared as TokenizerNode | |
---|---|
protected TokenizerNode |
RowNodeWriter.tokenizerNode
|
Fields in org.apache.hadoop.hbase.codec.prefixtree.encode.row with type parameters of type TokenizerNode | |
---|---|
protected ArrayList<TokenizerNode> |
RowSectionWriter.leaves
|
protected ArrayList<TokenizerNode> |
RowSectionWriter.nonLeaves
|
Methods in org.apache.hadoop.hbase.codec.prefixtree.encode.row that return types with arguments of type TokenizerNode | |
---|---|
protected static ArrayList<TokenizerNode> |
RowSectionWriter.filterByLeafAndReverse(ArrayList<TokenizerNode> ins,
boolean leaves)
static |
ArrayList<TokenizerNode> |
RowSectionWriter.getLeaves()
|
ArrayList<TokenizerNode> |
RowSectionWriter.getNonLeaves()
|
Methods in org.apache.hadoop.hbase.codec.prefixtree.encode.row with parameters of type TokenizerNode | |
---|---|
protected RowNodeWriter |
RowSectionWriter.initializeWriter(List<RowNodeWriter> list,
int index,
TokenizerNode builderNode)
|
void |
RowNodeWriter.reconstruct(PrefixTreeEncoder prefixTreeEncoder,
TokenizerNode tokenizerNode)
|
void |
RowNodeWriter.reset(TokenizerNode node)
|
Method parameters in org.apache.hadoop.hbase.codec.prefixtree.encode.row with type arguments of type TokenizerNode | |
---|---|
protected static ArrayList<TokenizerNode> |
RowSectionWriter.filterByLeafAndReverse(ArrayList<TokenizerNode> ins,
boolean leaves)
static |
Constructors in org.apache.hadoop.hbase.codec.prefixtree.encode.row with parameters of type TokenizerNode | |
---|---|
RowNodeWriter(PrefixTreeEncoder keyValueBuilder,
TokenizerNode tokenizerNode)
construct |
Uses of TokenizerNode in org.apache.hadoop.hbase.codec.prefixtree.encode.tokenize |
---|
Fields in org.apache.hadoop.hbase.codec.prefixtree.encode.tokenize declared as TokenizerNode | |
---|---|
protected TokenizerNode |
TokenizerRowSearchResult.matchingNode
|
protected TokenizerNode |
TokenizerNode.parent
Tree content/structure used during tokenization |
protected TokenizerNode |
Tokenizer.root
|
Fields in org.apache.hadoop.hbase.codec.prefixtree.encode.tokenize with type parameters of type TokenizerNode | |
---|---|
protected ArrayList<TokenizerNode> |
TokenizerNode.children
|
protected ArrayList<TokenizerNode> |
Tokenizer.nodes
|
Methods in org.apache.hadoop.hbase.codec.prefixtree.encode.tokenize that return TokenizerNode | |
---|---|
protected TokenizerNode |
Tokenizer.addNode(TokenizerNode parent,
int nodeDepth,
int tokenStartOffset,
ByteRange token,
int inputTokenOffset)
|
TokenizerNode |
TokenizerNode.getLastChild()
|
TokenizerNode |
TokenizerRowSearchResult.getMatchingNode()
|
TokenizerNode |
TokenizerNode.getParent()
|
TokenizerNode |
Tokenizer.getRoot()
get/set |
Methods in org.apache.hadoop.hbase.codec.prefixtree.encode.tokenize that return types with arguments of type TokenizerNode | |
---|---|
ArrayList<TokenizerNode> |
TokenizerNode.getChildren()
|
ArrayList<TokenizerNode> |
Tokenizer.getNodes(boolean includeNonLeaves,
boolean includeLeaves)
|
Methods in org.apache.hadoop.hbase.codec.prefixtree.encode.tokenize with parameters of type TokenizerNode | |
---|---|
protected void |
TokenizerNode.addChild(TokenizerNode node)
|
protected TokenizerNode |
Tokenizer.addNode(TokenizerNode parent,
int nodeDepth,
int tokenStartOffset,
ByteRange token,
int inputTokenOffset)
|
int |
TokenDepthComparator.compare(TokenizerNode a,
TokenizerNode b)
|
protected void |
TokenizerNode.moveChildrenToDifferentParent(TokenizerNode newParent)
|
void |
TokenizerNode.reconstruct(Tokenizer builder,
TokenizerNode parent,
int nodeDepth,
int tokenStartOffset,
int tokenOffset,
int tokenLength)
|
void |
TokenizerRowSearchResult.set(TokenizerRowSearchPosition difference,
TokenizerNode matchingNode)
|
void |
TokenizerNode.setParent(TokenizerNode parent)
|
Method parameters in org.apache.hadoop.hbase.codec.prefixtree.encode.tokenize with type arguments of type TokenizerNode | |
---|---|
void |
Tokenizer.appendNodes(List<TokenizerNode> appendTo,
boolean includeNonLeaves,
boolean includeLeaves)
|
void |
TokenizerNode.appendNodesToExternalList(List<TokenizerNode> appendTo,
boolean includeNonLeaves,
boolean includeLeaves)
moving nodes around |
Constructors in org.apache.hadoop.hbase.codec.prefixtree.encode.tokenize with parameters of type TokenizerNode | |
---|---|
TokenizerNode(Tokenizer builder,
TokenizerNode parent,
int nodeDepth,
int tokenStartOffset,
int tokenOffset,
int tokenLength)
construct |
|
TokenizerRowSearchResult(TokenizerNode matchingNode)
|
|
||||||||||
PREV NEXT | FRAMES NO FRAMES |