Unnamed: 0
int64 0
305k
| body
stringlengths 7
52.9k
| name
stringlengths 1
185
|
|---|---|---|
294,000
|
boolean () { return delegate.hasNext() && !filter.accepts(delegate.current()); }
|
hasNext
|
294,001
|
void (int number) { while(number > 0) { delegate.rewind(); rewindToPrevious(); --number; } }
|
rewind
|
294,002
|
PsiElement () { return delegate.current(); }
|
current
|
294,003
|
void () { delegate.advance(); advanceToNext(); }
|
advance
|
294,004
|
void () { delegate.rewind(); rewindToPrevious(); }
|
rewind
|
294,005
|
void () { delegate.reset(); advanceToNext(); }
|
reset
|
294,006
|
MyType () { return myType; }
|
getType
|
294,007
|
MyType () { return myType; }
|
getType
|
294,008
|
PsiElement () { return myElement; }
|
getElement
|
294,009
|
TokenSet () { return TokenSet.EMPTY; }
|
getIgnoredTokens
|
294,010
|
List<SingleChildDescriptor> () { return mySingleChildDescriptors; }
|
getSingleChildDescriptors
|
294,011
|
List<MultiChildDescriptor> () { return myMultiChildDescriptors; }
|
getMultiChildDescriptors
|
294,012
|
List<Object> () { return myConstants; }
|
getConstants
|
294,013
|
List<PsiElement[]> () { return myCodeBlocks; }
|
getCodeBlocks
|
294,014
|
EquivalenceDescriptorBuilder (PsiElement @Nullable [] block) { myCodeBlocks.add(block); return this; }
|
codeBlock
|
294,015
|
EquivalenceDescriptorBuilder (@Nullable PsiElement element) { return add(SingleChildDescriptor.MyType.DEFAULT, element); }
|
element
|
294,016
|
EquivalenceDescriptorBuilder (PsiElement @Nullable [] elements) { return add(MultiChildDescriptor.MyType.DEFAULT, elements); }
|
elements
|
294,017
|
EquivalenceDescriptorBuilder (@Nullable PsiElement element) { return add(SingleChildDescriptor.MyType.CHILDREN, element); }
|
children
|
294,018
|
EquivalenceDescriptorBuilder (@Nullable PsiElement element) { return add(SingleChildDescriptor.MyType.OPTIONALLY, element); }
|
optionally
|
294,019
|
EquivalenceDescriptorBuilder (@Nullable PsiElement element) { return add(SingleChildDescriptor.MyType.OPTIONALLY_IN_PATTERN, element); }
|
optionallyInPattern
|
294,020
|
EquivalenceDescriptorBuilder (PsiElement @Nullable [] elements) { return add(MultiChildDescriptor.MyType.OPTIONALLY, elements); }
|
optionally
|
294,021
|
EquivalenceDescriptorBuilder (PsiElement @Nullable [] elements) { return add(MultiChildDescriptor.MyType.OPTIONALLY_IN_PATTERN, elements); }
|
optionallyInPattern
|
294,022
|
EquivalenceDescriptorBuilder (@Nullable PsiElement element) { return add(SingleChildDescriptor.MyType.CHILDREN_OPTIONALLY, element); }
|
childrenOptionally
|
294,023
|
EquivalenceDescriptorBuilder (@Nullable PsiElement element) { return add(SingleChildDescriptor.MyType.CHILDREN_OPTIONALLY_IN_PATTERN, element); }
|
childrenOptionallyInPattern
|
294,024
|
EquivalenceDescriptorBuilder (PsiElement[] elements) { return add(MultiChildDescriptor.MyType.IN_ANY_ORDER, elements); }
|
inAnyOrder
|
294,025
|
EquivalenceDescriptorBuilder (@Nullable PsiElement element) { return add(SingleChildDescriptor.MyType.CHILDREN_IN_ANY_ORDER, element); }
|
childrenInAnyOrder
|
294,026
|
EquivalenceDescriptorBuilder (@Nullable Object constant) { myConstants.add(constant); return this; }
|
constant
|
294,027
|
EquivalenceDescriptorBuilder (MultiChildDescriptor.MyType type, PsiElement[] elements) { myMultiChildDescriptors.add(new MultiChildDescriptor(type, elements)); return this; }
|
add
|
294,028
|
EquivalenceDescriptorBuilder (SingleChildDescriptor.MyType type, PsiElement element) { mySingleChildDescriptors.add(new SingleChildDescriptor(type, element)); return this; }
|
add
|
294,029
|
TreeHashResult (AbstractTreeHasher treeHasher, FragmentsCollector callBack, List<? extends PsiElement> statements, PsiFragment upper, NodeSpecificHasher hasher) { final int statementsSize = statements.size(); if (statementsSize > 0) { final PsiFragment fragment = treeHasher.buildFragment(hasher, statements, 0, statementsSize - 1); fragment.setParent(upper); int cost = 0; int hash = 0; for (PsiElement statement : statements) { final TreeHashResult res = treeHasher.hash(statement, null, hasher); hash = hash* 31 + res.getHash(); cost += res.getCost(); } TreeHashResult result = new TreeHashResult(hash, cost, treeHasher.buildFragment(hasher, statements, 0, statementsSize - 1)); if (callBack != null && statementsSize > 1) callBack.add(hash, cost, fragment); return result; } return new TreeHashResult(1, 0, treeHasher.buildFragment(hasher, statements, 0, statementsSize - 1)); }
|
hashCodeBlockForIndexing
|
294,030
|
TreeHashResult (AbstractTreeHasher base, FragmentsCollector callBack, PsiElement root, PsiFragment upper, NodeSpecificHasher hasher ) { final List<PsiElement> children = hasher.getNodeChildren(root); final PsiFragment fragment = base.buildFragment(hasher, root, base.getCost(root)); if (upper != null) { fragment.setParent(upper); } final int size = children.size(); if (size == 0 && !(root instanceof LeafElement)) { // contains only whitespaces and other unmeaning children return new TreeHashResult(0, hasher.getNodeCost(root), fragment); } final int discardCost = base.getDiscardCost(root); int c = hasher.getNodeCost(root); int h = hasher.getNodeHash(root); for (int i = 0; i < size; i++) { PsiElement child = children.get(i); final TreeHashResult res = base.hash(child, fragment, hasher); int childCost = res.getCost(); c += childCost; if (childCost > discardCost || !base.ignoreChildHash(child)) { h += res.getHash(); } } if (base.shouldAnonymize(root, hasher)) { h = 0; } if (callBack != null) { callBack.add(h, c, fragment); } return new TreeHashResult(h, c, fragment); }
|
computeElementHashForIndexing
|
294,031
|
boolean (PsiElement element) { return DuplocatorUtil.isIgnoredNode(element) || isToSkipAsLiteral(element); }
|
accepts
|
294,032
|
boolean (PsiElement element) { return isLiteral(element) && !myDuplicatesProfile.getDuplocatorState(myDuplicatesProfile.getLanguage(element)).distinguishLiterals(); }
|
isToSkipAsLiteral
|
294,033
|
NodeFilter () { return myNodeFilter; }
|
getNodeFilter
|
294,034
|
int (PsiElement node) { if (node == null) { return 0; } if (node instanceof PsiWhiteSpace || node instanceof PsiErrorElement) { return 0; } else if (node instanceof LeafElement) { if (isToSkipAsLiteral(node)) { return 0; } return node.getText().hashCode(); } return node.getClass().getName().hashCode(); }
|
getNodeHash
|
294,035
|
boolean (PsiElement node) { if (node instanceof LeafElement) { final IElementType elementType = ((LeafElement)node).getElementType(); if (myDuplicatesProfile.getLiterals().contains(elementType)) { return true; } } return false; }
|
isLiteral
|
294,036
|
int (PsiElement node) { return node != null ? myDuplicatesProfile.getNodeCost(node) : 0; }
|
getNodeCost
|
294,037
|
List<PsiElement> (PsiElement node) { final List<PsiElement> result = new ArrayList<>(); final FilteringNodeIterator it = new FilteringNodeIterator(SiblingNodeIterator.create(node.getFirstChild()), myNodeFilter); while (it.hasNext()) { result.add(it.current()); it.advance(); } return result; }
|
getNodeChildren
|
294,038
|
boolean (@NotNull PsiElement node1, @NotNull PsiElement node2) { return false; }
|
areNodesEqual
|
294,039
|
boolean (@NotNull PsiElement root1, @NotNull PsiElement root2, int discardCost) { if (root1 == root2) { return true; } return new DuplicatesMatchingVisitor(this, myNodeFilter, discardCost).match(root1, root2); }
|
areTreesEqual
|
294,040
|
DuplicatesProfileBase () { return myDuplicatesProfile; }
|
getDuplicatesProfile
|
294,041
|
boolean (PsiElement node1, PsiElement node2) { // todo: try to optimize this return true; }
|
checkDeep
|
294,042
|
void (@NotNull PsiElement node) { Language language = null; if (node instanceof PsiFile) { FileType fileType = ((PsiFile)node).getFileType(); if (fileType instanceof LanguageFileType) { language = ((LanguageFileType)fileType).getLanguage(); } } if (language == null) language = node.getLanguage(); if ((myForIndexing || mySettings.SELECTED_PROFILES.contains(language.getDisplayName())) && myDuplicatesProfile.isMyLanguage(language)) { myTreeHasher.hash(node, this); } }
|
visitNode
|
294,043
|
void () { }
|
hashingFinished
|
294,044
|
void (final boolean readOnly) { myReadOnly = readOnly; }
|
setReadOnly
|
294,045
|
void (int hash, int cost, PsiFragment frag, NodeSpecificHasher visitor) { forceAdd(hash, cost, frag); }
|
add
|
294,046
|
void (int hash, int cost, PsiFragment frag) { if (frag == null) { //fake fragment myDuplicates.put(hash, new ArrayList<>()); return; } frag.setCost(cost); List<List<PsiFragment>> fragments = myDuplicates.get(hash); if (fragments == null) { // do not add new hash codes if (!myReadOnly) { List<List<PsiFragment>> list = new ArrayList<>(); List<PsiFragment> listF = new ArrayList<>(); listF.add(frag); list.add(listF); myDuplicates.put(hash, list); } return; } boolean found = false; final PsiElement[] elements = frag.getElements(); int discardCost = 0; if (myDiscardCost >= 0) { discardCost = myDiscardCost; } else { final DuplocatorState state = DuplocatorUtil.getDuplocatorState(frag); if (state != null) { discardCost = state.getDiscardCost(); } } for (Iterator<List<PsiFragment>> i = fragments.iterator(); i.hasNext() && !found; ) { List<PsiFragment> fi = i.next(); PsiFragment aFrag = fi.get(0); if (aFrag.isEqual(elements, discardCost)) { boolean skipNew = false; for (Iterator<PsiFragment> frags = fi.iterator(); frags.hasNext() && !skipNew; ) { final PsiFragment old = frags.next(); if (frag.intersectsWith(old)) { if (old.getCost() < frag.getCost() || frag.contains(old)) { frags.remove(); } else { skipNew = true; } } } if (!skipNew) fi.add(frag); found = true; } } if (!found) { List<PsiFragment> newFrags = new ArrayList<>(); newFrags.add(frag); fragments.add(newFrags); } }
|
forceAdd
|
294,047
|
void (int hash, int cost, PsiFragment frag) { int bound; if (myBound >= 0) { bound = myBound; } else { final DuplocatorState duplocatorState = DuplocatorUtil.getDuplocatorState(frag); if (duplocatorState == null) { return; } bound = duplocatorState.getLowerBound(); } if (cost >= bound) { forceAdd(hash, cost, frag); } }
|
add
|
294,048
|
DupInfo () { Object2IntMap<PsiFragment[]> duplicateList = new Object2IntOpenHashMap<>(); for (Int2ObjectMap.Entry<List<List<PsiFragment>>> entry : myDuplicates.int2ObjectEntrySet()) { for (List<PsiFragment> list : entry.getValue()) { int len = list.size(); if (len > 1) { PsiFragment[] filtered = new PsiFragment[len]; int idx = 0; for (PsiFragment fragment : list) { fragment.markDuplicate(); filtered[idx++] = fragment; } duplicateList.put(filtered, entry.getIntKey()); } } } myDuplicates = null; for (ObjectIterator<Object2IntMap.Entry<PsiFragment[]>> iterator = duplicateList.object2IntEntrySet().iterator(); iterator.hasNext(); ) { Object2IntMap.Entry<PsiFragment[]> entry = iterator.next(); PsiFragment[] fragments = entry.getKey(); LOG.assertTrue(fragments.length > 1); boolean nested = false; for (PsiFragment fragment : fragments) { if (fragment.isNested()) { nested = true; break; } } if (nested) { iterator.remove(); } } PsiFragment[][] duplicates = duplicateList.keySet().toArray(new PsiFragment[][]{}); Arrays.sort(duplicates, (x, y) -> y[0].getCost() - x[0].getCost()); return new DupInfo() { private final Int2ObjectMap<GroupNodeDescription> myPattern2Description = new Int2ObjectOpenHashMap<>(); @Override public int getPatterns() { return duplicates.length; } @Override public int getPatternCost(int number) { return ((PsiFragment[])duplicates[number])[0].getCost(); } @Override public int getPatternDensity(int number) { return duplicates[number].length; } @Override public PsiFragment[] getFragmentOccurences(int pattern) { return duplicates[pattern]; } @Override public UsageInfo[] getUsageOccurences(int pattern) { PsiFragment[] occurrences = getFragmentOccurences(pattern); UsageInfo[] infos = new UsageInfo[occurrences.length]; for (int i = 0; i < infos.length; i++) { infos[i] = occurrences[i].getUsageInfo(); } return infos; } @Override public int getFileCount(final int pattern) { if (myPattern2Description.containsKey(pattern)) { return myPattern2Description.get(pattern).getFilesCount(); } return cacheGroupNodeDescription(pattern).getFilesCount(); } private GroupNodeDescription cacheGroupNodeDescription(final int pattern) { final Set<PsiFile> files = new HashSet<>(); final PsiFragment[] occurrences = getFragmentOccurences(pattern); for (PsiFragment occurrence : occurrences) { final PsiFile file = occurrence.getFile(); if (file != null) { files.add(file); } } final int fileCount = files.size(); final PsiFile psiFile = occurrences[0].getFile(); DuplicatesProfile profile = DuplicatesProfile.findProfileForDuplicate(this, pattern); String comment = profile != null ? profile.getComment(this, pattern) : ""; String filename = psiFile != null ? psiFile.getName() : DupLocatorBundle.message("duplicates.unknown.file.node.title"); final GroupNodeDescription description = new GroupNodeDescription(fileCount, filename, comment); myPattern2Description.put(pattern, description); return description; } @Override public @Nullable @Nls String getTitle(int pattern) { if (getFileCount(pattern) == 1) { if (myPattern2Description.containsKey(pattern)) { return myPattern2Description.get(pattern).getTitle(); } return cacheGroupNodeDescription(pattern).getTitle(); } return null; } @Override public @Nullable @Nls String getComment(int pattern) { if (getFileCount(pattern) == 1) { if (myPattern2Description.containsKey(pattern)) { return myPattern2Description.get(pattern).getComment(); } return cacheGroupNodeDescription(pattern).getComment(); } return null; } @Override public int getHash(final int i) { return duplicateList.getInt(duplicates[i]); } }; }
|
getInfo
|
294,049
|
int () { return duplicates.length; }
|
getPatterns
|
294,050
|
int (int number) { return ((PsiFragment[])duplicates[number])[0].getCost(); }
|
getPatternCost
|
294,051
|
int (int number) { return duplicates[number].length; }
|
getPatternDensity
|
294,052
|
PsiFragment[] (int pattern) { return duplicates[pattern]; }
|
getFragmentOccurences
|
294,053
|
UsageInfo[] (int pattern) { PsiFragment[] occurrences = getFragmentOccurences(pattern); UsageInfo[] infos = new UsageInfo[occurrences.length]; for (int i = 0; i < infos.length; i++) { infos[i] = occurrences[i].getUsageInfo(); } return infos; }
|
getUsageOccurences
|
294,054
|
int (final int pattern) { if (myPattern2Description.containsKey(pattern)) { return myPattern2Description.get(pattern).getFilesCount(); } return cacheGroupNodeDescription(pattern).getFilesCount(); }
|
getFileCount
|
294,055
|
GroupNodeDescription (final int pattern) { final Set<PsiFile> files = new HashSet<>(); final PsiFragment[] occurrences = getFragmentOccurences(pattern); for (PsiFragment occurrence : occurrences) { final PsiFile file = occurrence.getFile(); if (file != null) { files.add(file); } } final int fileCount = files.size(); final PsiFile psiFile = occurrences[0].getFile(); DuplicatesProfile profile = DuplicatesProfile.findProfileForDuplicate(this, pattern); String comment = profile != null ? profile.getComment(this, pattern) : ""; String filename = psiFile != null ? psiFile.getName() : DupLocatorBundle.message("duplicates.unknown.file.node.title"); final GroupNodeDescription description = new GroupNodeDescription(fileCount, filename, comment); myPattern2Description.put(pattern, description); return description; }
|
cacheGroupNodeDescription
|
294,056
|
int (final int i) { return duplicateList.getInt(duplicates[i]); }
|
getHash
|
294,057
|
int () { return myFilesCount; }
|
getFilesCount
|
294,058
|
int (PsiElement root) { if (myDiscardCost >= 0) { return myDiscardCost; } return myProfile.getDuplocatorState(myProfile.getLanguage(root)).getDiscardCost(); }
|
getDiscardCost
|
294,059
|
TreeHashResult (@NotNull PsiElement root, PsiFragment upper, @NotNull NodeSpecificHasher hasher) { final TreeHashResult result = computeHash(root, upper, hasher); // todo: try to optimize (ex. compute cost and hash separately) final int discardCost = getDiscardCost(root); if (result.getCost() < discardCost) { return new TreeHashResult(0, result.getCost(), result.getFragment()); } return result; }
|
hash
|
294,060
|
TreeHashResult (PsiElement root, PsiFragment upper, NodeSpecificHasher hasher) { final EquivalenceDescriptorProvider descriptorProvider = EquivalenceDescriptorProvider.getInstance(root); if (descriptorProvider != null) { final EquivalenceDescriptor descriptor = descriptorProvider.buildDescriptor(root); if (descriptor != null) { return computeHash(root, upper, descriptor, hasher); } } if (root instanceof PsiFile) { final List<PsiElement> children = hasher.getNodeChildren(root); if (children.size() <= 20) { return hashCodeBlock(children, upper, hasher, true); } } final NodeSpecificHasherBase ssrNodeSpecificHasher = (NodeSpecificHasherBase)hasher; if (shouldBeAnonymized(root, ssrNodeSpecificHasher)) { return computeElementHash(root, upper, hasher); } if (myForIndexing) { return computeElementHash(root, upper, hasher); } final PsiElement element = DuplocatorUtil.getOnlyChild(root, ssrNodeSpecificHasher.getNodeFilter()); if (element != root) { final TreeHashResult result = hash(element, upper, hasher); final int cost = hasher.getNodeCost(root); return new TreeHashResult(result.getHash(), result.getCost() + cost, result.getFragment()); } return computeElementHash(element, upper, hasher); }
|
computeHash
|
294,061
|
boolean (PsiElement root, NodeSpecificHasher hasher) { return shouldBeAnonymized(root, (NodeSpecificHasherBase)hasher); }
|
shouldAnonymize
|
294,062
|
TreeHashResult (@NotNull PsiElement root, PsiFragment upper, NodeSpecificHasher hasher) { if (myForIndexing) { return TreeHashingUtils.computeElementHashForIndexing(this, myCallBack, root, upper, hasher); } final List<PsiElement> children = hasher.getNodeChildren(root); final int size = children.size(); final int[] childHashes = new int[size]; final int[] childCosts = new int[size]; final PsiFragment fragment = buildFragment(hasher, root, getCost(root)); if (upper != null) { fragment.setParent(upper); } if (size == 0 && !(root instanceof LeafElement)) { // contains only whitespaces and other unmeaning children return new TreeHashResult(0, hasher.getNodeCost(root), fragment); } for (int i = 0; i < size; i++) { final TreeHashResult res = this.hash(children.get(i), fragment, hasher); childHashes[i] = res.getHash(); childCosts[i] = res.getCost(); } final int c = hasher.getNodeCost(root) + AbstractTreeHasher.vector(childCosts); final int h1 = hasher.getNodeHash(root); final int discardCost = getDiscardCost(root); for (int i = 0; i < size; i++) { if (childCosts[i] <= discardCost && ignoreChildHash(children.get(i))) { childHashes[i] = 0; } } int h = h1 + AbstractTreeHasher.vector(childHashes); if (shouldBeAnonymized(root, (NodeSpecificHasherBase)hasher)) { h = 0; } if (myCallBack != null) { myCallBack.add(h, c, fragment); } return new TreeHashResult(h, c, fragment); }
|
computeElementHash
|
294,063
|
TreeHashResult (List<? extends PsiElement> statements, PsiFragment upper, NodeSpecificHasher hasher, boolean forceHash) { if (!myForIndexing) return super.hashCodeBlock(statements, upper, hasher, forceHash); return TreeHashingUtils.hashCodeBlockForIndexing(this, myCallBack, statements, upper, hasher); }
|
hashCodeBlock
|
294,064
|
TreeHashResult (PsiElement element, PsiFragment parent, EquivalenceDescriptor descriptor, NodeSpecificHasher hasher) { final NodeSpecificHasherBase ssrHasher = (NodeSpecificHasherBase)hasher; final PsiElement element2 = DuplocatorUtil.skipNodeIfNecessary(element, descriptor, ssrHasher.getNodeFilter()); final boolean canSkip = element2 != element; final PsiFragment fragment = buildFragment(hasher, element, 0); if (parent != null) { fragment.setParent(parent); } int hash = canSkip ? 0 : hasher.getNodeHash(element); int cost = hasher.getNodeCost(element); for (SingleChildDescriptor childDescriptor : descriptor.getSingleChildDescriptors()) { final Couple<Integer> childHashResult = computeHash(childDescriptor, fragment, hasher); hash = hash * 31 + childHashResult.first; cost += childHashResult.second; } for (MultiChildDescriptor childDescriptor : descriptor.getMultiChildDescriptors()) { final Couple<Integer> childHashResult = computeHash(childDescriptor, fragment, hasher); hash = hash * 31 + childHashResult.first; cost += childHashResult.second; } for (Object constant : descriptor.getConstants()) { final int constantHash = constant != null ? constant.hashCode() : 0; hash = hash * 31 + constantHash; } for (PsiElement[] codeBlock : descriptor.getCodeBlocks()) { final List<PsiElement> filteredBlock = filter(codeBlock, ssrHasher); final TreeHashResult childHashResult = hashCodeBlock(filteredBlock, fragment, hasher); hash = hash * 31 + childHashResult.getHash(); cost += childHashResult.getCost(); } if (myCallback != null) { myCallback.add(hash, cost, fragment); } return new TreeHashResult(hash, cost, fragment); }
|
computeHash
|
294,065
|
List<PsiElement> (PsiElement[] elements, NodeSpecificHasherBase hasher) { List<PsiElement> filteredElements = new ArrayList<>(); for (PsiElement element : elements) { if (!hasher.getNodeFilter().accepts(element)) { filteredElements.add(element); } } return filteredElements; }
|
filter
|
294,066
|
Couple<Integer> (SingleChildDescriptor childDescriptor, PsiFragment parentFragment, NodeSpecificHasher nodeSpecificHasher) { final PsiElement element = childDescriptor.getElement(); if (element == null) { return Couple.of(0, 0); } final Couple<Integer> result = doComputeHash(childDescriptor, parentFragment, nodeSpecificHasher); final DuplicatesProfileBase duplicatesProfile = ((NodeSpecificHasherBase)nodeSpecificHasher).getDuplicatesProfile(); final PsiElementRole role = duplicatesProfile.getRole(element); if (role != null && !duplicatesProfile.getDuplocatorState(duplicatesProfile.getLanguage(element)).distinguishRole(role)) { return Couple.of(0, result.second); } return result; }
|
computeHash
|
294,067
|
boolean (PsiElement element, NodeSpecificHasherBase nodeSpecificHasher) { final DuplicatesProfileBase duplicatesProfile = nodeSpecificHasher.getDuplicatesProfile(); final PsiElementRole role = duplicatesProfile.getRole(element); return role != null && !duplicatesProfile.getDuplocatorState(duplicatesProfile.getLanguage(element)).distinguishRole(role); }
|
shouldBeAnonymized
|
294,068
|
Couple<Integer> (SingleChildDescriptor childDescriptor, PsiFragment parentFragment, NodeSpecificHasher nodeSpecificHasher) { final PsiElement element = childDescriptor.getElement(); if (element == null) { return Couple.of(0, 0); } return switch (childDescriptor.getType()) { case OPTIONALLY_IN_PATTERN, DEFAULT -> { final TreeHashResult result = hash(element, parentFragment, nodeSpecificHasher); yield Couple.of(result.getHash(), result.getCost()); } case CHILDREN_OPTIONALLY_IN_PATTERN, CHILDREN -> hashChildResults(computeHashesForChildren(element, parentFragment, nodeSpecificHasher), 31); case CHILDREN_IN_ANY_ORDER -> hashChildResults(computeHashesForChildren(element, parentFragment, nodeSpecificHasher), 1); default -> Couple.of(0, 0); }; }
|
doComputeHash
|
294,069
|
Couple<Integer> (MultiChildDescriptor childDescriptor, PsiFragment parentFragment, NodeSpecificHasher nodeSpecificHasher) { final PsiElement[] elements = childDescriptor.getElements(); return switch (childDescriptor.getType()) { case OPTIONALLY_IN_PATTERN, DEFAULT -> hashChildResults(computeHashes(elements, parentFragment, nodeSpecificHasher), 31); case IN_ANY_ORDER -> hashChildResults(computeHashes(elements, parentFragment, nodeSpecificHasher), 1); default -> Couple.of(0, 0); }; }
|
computeHash
|
294,070
|
Couple<Integer> (TreeHashResult[] childResults, int multiplier) { int[] hashes = getHashes(childResults); int[] costs = getCosts(childResults); int hash = AbstractTreeHasher.vector(hashes, multiplier); int cost = AbstractTreeHasher.vector(costs); return Couple.of(hash, cost); }
|
hashChildResults
|
294,071
|
int[] (TreeHashResult[] results) { int[] hashes = new int[results.length]; for (int i = 0; i < results.length; i++) { hashes[i] = results[i].getHash(); } return hashes; }
|
getHashes
|
294,072
|
int[] (TreeHashResult[] results) { int[] costs = new int[results.length]; for (int i = 0; i < results.length; i++) { costs[i] = results[i].getCost(); } return costs; }
|
getCosts
|
294,073
|
int () { return myHash; }
|
getHash
|
294,074
|
int () { return myCost; }
|
getCost
|
294,075
|
PsiFragment () { return myFragment; }
|
getFragment
|
294,076
|
DuplocateVisitor (@NotNull FragmentsCollector collector) { return new NodeSpecificHasherBase(DuplocatorSettings.getInstance(), collector, this); }
|
createVisitor
|
294,077
|
TokenSet () { return TokenSet.EMPTY; }
|
getLiterals
|
294,078
|
ExternalizableDuplocatorState (@NotNull Language language) { return DuplocatorUtil.registerAndGetState(language); }
|
getDuplocatorState
|
294,079
|
void (@NotNull final PsiElement root, @NotNull final NodeSpecificHasher hasher) { hash(root, null, hasher); }
|
hash
|
294,080
|
TreeHashResult (@NotNull final PsiElement root, final PsiFragment upper, final NodeSpecificHasher hasher) { if (myForIndexing) { return TreeHashingUtils.computeElementHashForIndexing(this, myCallBack, root, upper, hasher); } ProgressManager.checkCanceled(); final List<PsiElement> children = hasher.getNodeChildren(root); final int size = children.size(); final int[] childHashes = new int[size]; final int[] childCosts = new int[size]; final PsiFragment fragment = buildFragment(hasher, root, getCost(root)); if (upper != null) { fragment.setParent(upper); } if (size == 0 && !(root instanceof LeafElement)) { return new TreeHashResult(hasher.getNodeHash(root), hasher.getNodeCost(root), fragment); } for (int i = 0; i < size; i++) { final TreeHashResult res = hash(children.get(i), fragment, hasher); childHashes[i] = res.getHash(); childCosts[i] = res.getCost(); } final int c = hasher.getNodeCost(root) + vector(childCosts); final int h1 = hasher.getNodeHash(root); final int discardCost = getDiscardCost(root); for (int i = 0; i < size; i++) { if (childCosts[i] <= discardCost && ignoreChildHash(children.get(i))) { childHashes[i] = 0; } } final int h = h1 + vector(childHashes); if (myCallBack != null) { myCallBack.add(h, c, fragment); } return new TreeHashResult(h, c, fragment); }
|
computeElementHash
|
294,081
|
TreePsiFragment (NodeSpecificHasher hasher, PsiElement root,int cost) { if (myForIndexing) { return new TreePsiFragment(hasher, root, cost) { @Override protected PsiAnchor createAnchor(PsiElement element) { return new PsiAnchor.HardReference(element); } @Override protected Language calcLanguage(PsiElement element) { return null; // for performance } }; } return new TreePsiFragment(hasher, root, cost); }
|
buildFragment
|
294,082
|
PsiAnchor (PsiElement element) { return new PsiAnchor.HardReference(element); }
|
createAnchor
|
294,083
|
Language (PsiElement element) { return null; // for performance }
|
calcLanguage
|
294,084
|
TreePsiFragment (NodeSpecificHasher hasher, List<? extends PsiElement> elements, int from, int to) { if (myForIndexing) { return new TreePsiFragment(hasher, elements, from, to) { @Override protected PsiAnchor createAnchor(PsiElement element) { return new PsiAnchor.HardReference(element); } @Override protected Language calcLanguage(PsiElement element) { return null; // for performance } }; } return new TreePsiFragment(hasher, elements, from, to); }
|
buildFragment
|
294,085
|
PsiAnchor (PsiElement element) { return new PsiAnchor.HardReference(element); }
|
createAnchor
|
294,086
|
Language (PsiElement element) { return null; // for performance }
|
calcLanguage
|
294,087
|
boolean (PsiElement element) { return false; }
|
ignoreChildHash
|
294,088
|
TreeHashResult (final List<? extends PsiElement> statements, final PsiFragment upper, final NodeSpecificHasher hasher) { return hashCodeBlock(statements, upper, hasher, false); }
|
hashCodeBlock
|
294,089
|
TreeHashResult (final List<? extends PsiElement> statements, final PsiFragment upper, final NodeSpecificHasher hasher, boolean forceHash) { final int statementsSize = statements.size(); if (statementsSize == 1) { return hash(statements.get(0), upper, hasher); } if (statementsSize > 0) { // Here we compute all the possible code fragments using statements if (statementsSize < 20 || forceHash) { //todo should be configurable final PsiFragment[] frags = new PsiFragment[statementsSize]; final PsiFragment fragment = buildFragment(hasher, statements, 0, statementsSize - 1); fragment.setParent(upper); // Fill all the statements costs and hashes final int[] hashes = new int[statementsSize]; final int[] costs = new int[statementsSize]; for (int i = 0; i < statementsSize; i++) { final TreeHashResult res = hash(statements.get(i), null, hasher); hashes[i] = res.getHash(); costs[i] = res.getCost(); frags[i] = res.getFragment(); } if (myCallBack != null) { final PsiFragment[] parents = new PsiFragment[statementsSize]; //parent(end) = [beg, end] for (int beg = 0; beg < statementsSize; beg++) { int hash = 0; int cost = 0; for (int end = beg; end < statementsSize && end - beg < 20; end++) { hash = 31 * hash + hashes[end]; cost += costs[end]; final PsiFragment curr = beg == end ? frags[beg] : beg == 0 && end == statementsSize - 1 ? fragment : buildFragment(hasher, statements, beg, end); if (beg > 0) { curr.setParent(parents[end]); //[beg, end].setParent([beg - 1, end]) } parents[end] = curr; if (end > beg) { parents[end - 1].setParent(curr);//[beg, end - 1].setParent([beg, end]) } myCallBack.add(hash, cost, curr); } } } return new TreeHashResult(vector(hashes, 31), vector(costs), fragment); } } return new TreeHashResult(1, 0, buildFragment(hasher, statements, 0, statementsSize - 1)); }
|
hashCodeBlock
|
294,090
|
int (final PsiElement root) { return 0; }
|
getCost
|
294,091
|
int (int[] args) { return vector(args, 1); }
|
vector
|
294,092
|
int (int[] args, int mult) { int sum = 0; for (int arg : args) { sum = mult * sum + arg; } return sum; }
|
vector
|
294,093
|
boolean (PsiElement root, NodeSpecificHasher hasher) { return false; }
|
shouldAnonymize
|
294,094
|
TreeHashResult (@NotNull PsiElement root, PsiFragment upper, @NotNull NodeSpecificHasher hasher) { TreeHashResult result = myPsiElement2HashAndCost.get(root); if (result == null) { result = super.hash(root, upper, hasher); myPsiElement2HashAndCost.put(root, result); } return result; }
|
hash
|
294,095
|
boolean (@NotNull NodeIterator nodes, @NotNull NodeIterator nodes2) { while (true) { if (!nodes.hasNext() || !nodes2.hasNext()) { return !nodes.hasNext() && !nodes2.hasNext(); } skipIfNecessary(nodes, nodes2); skipIfNecessary(nodes2, nodes); if (!nodes.hasNext() || !nodes2.hasNext()) { return !nodes.hasNext() && !nodes2.hasNext(); } if (!match(nodes.current(), nodes2.current())) { return false; } nodes.advance(); nodes2.advance(); } }
|
matchSequentially
|
294,096
|
void (NodeIterator nodes, NodeIterator nodes2) { while (DuplocatorUtil.shouldSkip(nodes2.current(), nodes.current())) { nodes2.advance(); } }
|
skipIfNecessary
|
294,097
|
boolean (PsiElement element1, PsiElement element2) { if (element1 == null || element2 == null) { return element1 == element2; } if (myDiscardCost > 0) { final int cost1 = myTreeHasher.hash(element1, null, myNodeSpecificHasher).getCost(); final int cost2 = myTreeHasher.hash(element2, null, myNodeSpecificHasher).getCost(); if (cost1 < myDiscardCost || cost2 < myDiscardCost) { return true; } } final DuplicatesProfileBase duplicatesProfile = myNodeSpecificHasher.getDuplicatesProfile(); final PsiElementRole role1 = duplicatesProfile.getRole(element1); final PsiElementRole role2 = duplicatesProfile.getRole(element2); final Set<PsiElementRole> skippedRoles = EnumSet.noneOf(PsiElementRole.class); final ExternalizableDuplocatorState duplocatorState = duplicatesProfile.getDuplocatorState(duplicatesProfile.getLanguage(element1)); for (PsiElementRole role : PsiElementRole.values()) { if (!duplocatorState.distinguishRole(role)) { skippedRoles.add(role); } } if (role1 == role2 && skippedRoles.contains(role1)) { return true; } final EquivalenceDescriptorProvider descriptorProvider = EquivalenceDescriptorProvider.getInstance(element1); EquivalenceDescriptor descriptor1 = descriptorProvider != null ? descriptorProvider.buildDescriptor(element1) : null; EquivalenceDescriptor descriptor2 = descriptorProvider != null ? descriptorProvider.buildDescriptor(element2) : null; PsiElement newElement1 = DuplocatorUtil.skipNodeIfNecessary(element1, descriptor1, myNodeFilter); PsiElement newElement2 = DuplocatorUtil.skipNodeIfNecessary(element2, descriptor2, myNodeFilter); if (newElement1 != element1 || newElement2 != element2) { return match(newElement1, newElement2); } if (!element1.getClass().equals(element2.getClass())) { return false; } if (descriptor1 != null && descriptor2 != null) { return DuplocatorUtil.match(descriptor1, descriptor2, this, skippedRoles, duplicatesProfile); } if (element1 instanceof LeafElement) { IElementType elementType1 = ((LeafElement)element1).getElementType(); IElementType elementType2 = ((LeafElement)element2).getElementType(); if (!duplocatorState.distinguishLiterals() && duplicatesProfile.getLiterals().contains(elementType1) && duplicatesProfile.getLiterals().contains(elementType2)) { return true; } return element1.getText().equals(element2.getText()); } if (element1.getFirstChild() == null && element1.getTextLength() == 0) { return element2.getFirstChild() == null && element2.getTextLength() == 0; } return matchSequentially(new FilteringNodeIterator(SiblingNodeIterator.create(element1.getFirstChild()), getNodeFilter()), new FilteringNodeIterator(SiblingNodeIterator.create(element2.getFirstChild()), getNodeFilter())); }
|
match
|
294,098
|
boolean (@NotNull NodeIterator it1, @NotNull NodeIterator it2) { final List<PsiElement> elements1 = new ArrayList<>(); final List<PsiElement> elements2 = new ArrayList<>(); while (it1.hasNext()) { final PsiElement element = it1.current(); if (element != null) { elements1.add(element); } it1.advance(); } while (it2.hasNext()) { final PsiElement element = it2.current(); if (element != null) { elements2.add(element); } it2.advance(); } if (elements1.size() != elements2.size()) { return false; } Int2ObjectMap<List<PsiElement>> hashToElement = new Int2ObjectOpenHashMap<>(elements1.size()); for (PsiElement element : elements1) { TreeHashResult result = myTreeHasher.hash(element, null, myNodeSpecificHasher); if (result != null) { hashToElement.computeIfAbsent(result.getHash(), __ -> new ArrayList<>()).add(element); } } for (PsiElement element : elements2) { TreeHashResult result = myTreeHasher.hash(element, null, myNodeSpecificHasher); if (result != null) { int hash = result.getHash(); List<PsiElement> list = hashToElement.get(hash); if (list == null) { return false; } boolean found = false; for (Iterator<PsiElement> it = list.iterator(); it.hasNext();) { if (match(element, it.next())) { it.remove(); found = true; } } if (!found) { return false; } if (list.size() == 0) { hashToElement.remove(hash); } } } return hashToElement.size() == 0; }
|
doMatchInAnyOrder
|
294,099
|
NodeFilter () { return myNodeFilter; }
|
getNodeFilter
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.