idx int64 0 41.2k | question stringlengths 73 5.81k | target stringlengths 5 918 |
|---|---|---|
5,600 | static int decompressBBitSlotsWithHardCodes ( int [ ] decompressedSlots , int [ ] compBlock , int blockSize , int bits ) { int compressedBitSize = 0 ; PForDeltaUnpack128 . unpack ( decompressedSlots , compBlock , bits ) ; compressedBitSize = bits * blockSize ; return compressedBitSize ; } | Decompress the b - bit slots using hardcoded unpack methods |
5,601 | public static final int readBits ( int [ ] in , final int inOffset , final int bits ) { final int index = inOffset >>> 5 ; final int skip = inOffset & 0x1f ; int val = in [ index ] >>> skip ; if ( 32 - skip < bits ) { val |= ( in [ index + 1 ] << ( 32 - skip ) ) ; } return val & ( 0xffffffff >>> ( 32 - bits ) ) ; } | Read a certain number of bits of an integer into an integer array starting from the given start offset |
5,602 | public static void main ( final String [ ] args ) { int [ ] example = ( new ClusteredDataGenerator ( ) ) . generateClustered ( 20 , 1000 ) ; for ( int k = 0 ; k < example . length ; ++ k ) System . out . println ( example [ k ] ) ; } | Little test program . |
5,603 | public static int compress ( int [ ] in , int currentPos , int inlength , int out [ ] , int tmpoutpos ) { int origtmpoutpos = tmpoutpos ; int finalpos = currentPos + inlength ; outer : while ( currentPos < finalpos ) { mainloop : for ( int selector = 0 ; selector < 8 ; selector ++ ) { int res = 0 ; int compressedNum = codeNum [ selector ] ; if ( finalpos <= currentPos + compressedNum - 1 ) compressedNum = finalpos - currentPos ; int b = bitLength [ selector ] ; int max = 1 << b ; int i = 0 ; for ( ; i < compressedNum ; i ++ ) { if ( Util . smallerorequalthan ( max , in [ currentPos + i ] ) ) continue mainloop ; res = ( res << b ) + in [ currentPos + i ] ; } if ( compressedNum != codeNum [ selector ] ) res <<= ( codeNum [ selector ] - compressedNum ) * b ; res |= selector << 28 ; out [ tmpoutpos ++ ] = res ; currentPos += compressedNum ; continue outer ; } final int selector = 8 ; if ( in [ currentPos ] >= 1 << bitLength [ selector ] ) throw new RuntimeException ( "Too big a number" ) ; out [ tmpoutpos ++ ] = in [ currentPos ++ ] | ( selector << 28 ) ; } return tmpoutpos - origtmpoutpos ; } | Compress an integer array using Simple9 |
5,604 | public static void run ( final PrintWriter csvWriter , final int count , final int length ) { IntegerCODEC [ ] codecs = { new JustCopy ( ) , new BinaryPacking ( ) , new DeltaZigzagBinaryPacking ( ) , new DeltaZigzagVariableByte ( ) , new IntegratedBinaryPacking ( ) , new XorBinaryPacking ( ) , new FastPFOR128 ( ) , new FastPFOR ( ) } ; csvWriter . format ( "\"Dataset\",\"CODEC\",\"Bits per int\"," + "\"Compress speed (MiS)\",\"Decompress speed (MiS)\"\n" ) ; benchmark ( csvWriter , codecs , count , length , DEFAULT_MEAN , DEFAULT_RANGE ) ; benchmark ( csvWriter , codecs , count , length , DEFAULT_MEAN >> 5 , DEFAULT_RANGE ) ; IntegerCODEC [ ] codecs2 = { new JustCopy ( ) , new BinaryPacking ( ) , new DeltaZigzagBinaryPacking ( ) , new DeltaZigzagVariableByte ( ) , new IntegratedBinaryPacking ( ) , new XorBinaryPacking ( ) , new FastPFOR128 ( ) , new FastPFOR ( ) , } ; int freq = length / 4 ; benchmarkSine ( csvWriter , codecs2 , count , length , DEFAULT_MEAN >> 0 , DEFAULT_RANGE >> 0 , freq ) ; benchmarkSine ( csvWriter , codecs2 , count , length , DEFAULT_MEAN >> 5 , DEFAULT_RANGE >> 0 , freq ) ; benchmarkSine ( csvWriter , codecs2 , count , length , DEFAULT_MEAN >> 10 , DEFAULT_RANGE >> 0 , freq ) ; benchmarkSine ( csvWriter , codecs2 , count , length , DEFAULT_MEAN >> 0 , DEFAULT_RANGE >> 2 , freq ) ; benchmarkSine ( csvWriter , codecs2 , count , length , DEFAULT_MEAN >> 5 , DEFAULT_RANGE >> 2 , freq ) ; benchmarkSine ( csvWriter , codecs2 , count , length , DEFAULT_MEAN >> 10 , DEFAULT_RANGE >> 2 , freq ) ; benchmarkSine ( csvWriter , codecs2 , count , length , DEFAULT_MEAN >> 0 , DEFAULT_RANGE >> 4 , freq ) ; benchmarkSine ( csvWriter , codecs2 , count , length , DEFAULT_MEAN >> 5 , DEFAULT_RANGE >> 4 , freq ) ; benchmarkSine ( csvWriter , codecs2 , count , length , DEFAULT_MEAN >> 10 , DEFAULT_RANGE >> 4 , freq ) ; } | Run benchmark . |
5,605 | protected void checkDataDefinitions ( StreamTokenizer streamTokenizer ) throws FSMParseException { if ( acceptIndex == - 1 ) { throw new FSMParseException ( String . format ( ACCEPT_NOT_FOUND , acceptingDataVariableName ) , streamTokenizer ) ; } } | Checks the data definition by ensuring the index in the state vector containing acceptance information is defined . |
5,606 | protected void parseTransition ( StreamTokenizer streamTokenizer ) throws FSMParseException , IOException { try { if ( streamTokenizer . nextToken ( ) != StreamTokenizer . TT_WORD ) { throw new FSMParseException ( EXPECT_NUMBER , streamTokenizer ) ; } int from = Integer . parseInt ( streamTokenizer . sval ) ; if ( ! states . isEmpty ( ) && ! states . containsKey ( from ) ) { throw new FSMParseException ( String . format ( NO_SUCH_STATE , from ) , streamTokenizer ) ; } if ( streamTokenizer . nextToken ( ) != StreamTokenizer . TT_WORD ) { throw new FSMParseException ( EXPECT_NUMBER , streamTokenizer ) ; } int to = Integer . parseInt ( streamTokenizer . sval ) ; if ( ! states . isEmpty ( ) && ! states . containsKey ( to ) ) { throw new FSMParseException ( String . format ( NO_SUCH_STATE , to ) , streamTokenizer ) ; } if ( streamTokenizer . nextToken ( ) != '"' ) { throw new FSMParseException ( EXPECT_STRING , streamTokenizer ) ; } final I input = getInputParser ( ) . apply ( streamTokenizer . sval ) ; getInputs ( ) . add ( input ) ; final Integer prev = transitions . put ( Pair . of ( from , input ) , to ) ; if ( prev != null ) { throw new FSMParseException ( String . format ( NON_DETERMINISM_DETECTED , prev ) , streamTokenizer ) ; } } catch ( NumberFormatException nfe ) { throw new FSMParseException ( nfe , streamTokenizer ) ; } } | Parse a transition by searching the current line for the source state target state and the input . |
5,607 | public static < I , O > CompactMealy < I , O > pruneTransitionsWithOutput ( MealyMachine < ? , I , ? , O > in , Alphabet < I > inputs , Collection < ? super O > outputs ) { return filterByOutput ( in , inputs , o -> ! outputs . contains ( o ) ) ; } | Returns a Mealy machine with all transitions removed that have one of the specified output values . The resulting Mealy machine will not contain any unreachable states . |
5,608 | public static < I , O > CompactMealy < I , O > retainTransitionsWithOutput ( MealyMachine < ? , I , ? , O > in , Alphabet < I > inputs , Collection < ? super O > outputs ) { return filterByOutput ( in , inputs , outputs :: contains ) ; } | Returns a Mealy machine with all transitions removed that have an output not among the specified values . The resulting Mealy machine will not contain any unreachable states . |
5,609 | public Node < I > getChild ( int idx ) { if ( children == null ) { return null ; } return children . array [ idx ] ; } | Retrieves for a given index the respective child of this node . |
5,610 | public void setChild ( int idx , int alphabetSize , Node < I > child ) { if ( children == null ) { children = new ResizingArrayStorage < > ( Node . class , alphabetSize ) ; } children . array [ idx ] = child ; } | Sets the child for a given index . |
5,611 | public WordBuilder < I > append ( Word < ? extends I > word ) { int wLen = word . length ( ) ; ensureAdditionalCapacity ( wLen ) ; word . writeToArray ( 0 , array , length , wLen ) ; length += wLen ; return this ; } | Appends a word to the contents of the internal storage . |
5,612 | public final WordBuilder < I > append ( Word < ? extends I > ... words ) { if ( words . length == 0 ) { return this ; } int allLen = 0 ; for ( Word < ? extends I > w : words ) { allLen += w . length ( ) ; } ensureAdditionalCapacity ( allLen ) ; for ( Word < ? extends I > word : words ) { int wLen = word . length ( ) ; word . writeToArray ( 0 , array , length , wLen ) ; length += wLen ; } return this ; } | Appends several words to the contents of the internal storage . |
5,613 | public final WordBuilder < I > append ( I ... symbols ) { if ( symbols . length == 0 ) { return this ; } ensureAdditionalCapacity ( symbols . length ) ; System . arraycopy ( symbols , 0 , array , length , symbols . length ) ; length += symbols . length ; return this ; } | Appends several symbols to the contents of the internal storage . |
5,614 | public void ensureCapacity ( int cap ) { if ( array . length < cap ) { final int newCap = ArrayUtil . computeNewCapacity ( array . length , cap ) ; array = Arrays . copyOf ( array , newCap ) ; lock = false ; } } | Ensures that the internal storage has in total the given capacity . |
5,615 | public WordBuilder < I > truncate ( int truncLen ) { if ( truncLen >= length ) { return this ; } ensureUnlocked ( ) ; for ( int i = truncLen ; i < length ; i ++ ) { array [ i ] = null ; } length = truncLen ; return this ; } | Truncates the contents of the initial storage to the given length . |
5,616 | public WordBuilder < I > setSymbol ( int index , I symbol ) { ensureUnlocked ( ) ; array [ index ] = symbol ; return this ; } | Sets the symbol at the given index . Note that this index must exist . |
5,617 | public WordBuilder < I > reverse ( ) { ensureUnlocked ( ) ; int lowIdx = 0 , highIdx = length - 1 ; while ( lowIdx < highIdx ) { Object tmp = array [ lowIdx ] ; array [ lowIdx ++ ] = array [ highIdx ] ; array [ highIdx -- ] = tmp ; } return this ; } | Reverses the contents of the internal buffer . |
5,618 | public static < S , I , T , D > boolean breadthFirst ( TransitionSystem < S , ? super I , T > ts , int limit , Collection < ? extends I > inputs , TSTraversalVisitor < S , I , T , D > vis ) { Deque < BFSRecord < S , D > > bfsQueue = new ArrayDeque < > ( ) ; boolean complete = true ; int stateCount = 0 ; Holder < D > dataHolder = new Holder < > ( ) ; for ( S initS : ts . getInitialStates ( ) ) { dataHolder . value = null ; TSTraversalAction act = vis . processInitial ( initS , dataHolder ) ; switch ( act ) { case ABORT_TRAVERSAL : return complete ; case EXPLORE : if ( stateCount != limit ) { bfsQueue . offer ( new BFSRecord < > ( initS , dataHolder . value ) ) ; stateCount ++ ; } else { complete = false ; } break ; default : } } while ( ! bfsQueue . isEmpty ( ) ) { BFSRecord < S , D > current = bfsQueue . poll ( ) ; S state = current . state ; D data = current . data ; if ( ! vis . startExploration ( state , data ) ) { continue ; } inputs_loop : for ( I input : inputs ) { Collection < T > transitions = ts . getTransitions ( state , input ) ; for ( T trans : transitions ) { S succ = ts . getSuccessor ( trans ) ; dataHolder . value = null ; TSTraversalAction act = vis . processTransition ( state , data , input , trans , succ , dataHolder ) ; switch ( act ) { case ABORT_INPUT : continue inputs_loop ; case ABORT_STATE : break inputs_loop ; case ABORT_TRAVERSAL : return complete ; case EXPLORE : if ( stateCount != limit ) { bfsQueue . offer ( new BFSRecord < > ( succ , dataHolder . value ) ) ; stateCount ++ ; } else { complete = false ; } break ; case IGNORE : break ; default : throw new IllegalStateException ( "Unknown action " + act ) ; } } } } return complete ; } | Traverses the given transition system in a breadth - first fashion . The traversal is steered by the specified visitor . |
5,619 | public static RandomICAutomatonGenerator < Boolean , Void > forDFA ( ) { return new RandomICAutomatonGenerator < Boolean , Void > ( ) . withStateProperties ( Random :: nextBoolean ) ; } | Creates a random IC automaton generator instance for generating DFAs . States in generated automata will be accepting or rejecting with equal probability . |
5,620 | private State < O > updateSignature ( State < O > state , int idx , State < O > succ ) { StateSignature < O > sig = state . getSignature ( ) ; if ( sig . successors . array [ idx ] == succ ) { return state ; } register . remove ( sig ) ; if ( sig . successors . array [ idx ] != null ) { sig . successors . array [ idx ] . decreaseIncoming ( ) ; } sig . successors . array [ idx ] = succ ; succ . increaseIncoming ( ) ; sig . updateHashCode ( ) ; return replaceOrRegister ( state ) ; } | Update the signature of a state changing only the successor state of a single transition index . |
5,621 | private void updateInitSignature ( int idx , State < O > succ , O out ) { StateSignature < O > sig = init . getSignature ( ) ; State < O > oldSucc = sig . successors . array [ idx ] ; if ( oldSucc == succ && Objects . equals ( out , sig . outputs . array [ idx ] ) ) { return ; } if ( oldSucc != null ) { oldSucc . decreaseIncoming ( ) ; } sig . successors . array [ idx ] = succ ; sig . outputs . array [ idx ] = out ; succ . increaseIncoming ( ) ; } | Updates the signature of the initial state changing both the successor state and the output symbol . |
5,622 | public void setImage ( BufferedImage img ) { this . img = img ; Dimension dim ; if ( img != null ) { dim = new Dimension ( img . getWidth ( ) , img . getHeight ( ) ) ; } else { dim = new Dimension ( DEFAULT_WIDTH , DEFAULT_HEIGHT ) ; } setSize ( dim ) ; setPreferredSize ( dim ) ; repaint ( ) ; } | Sets the image to be displayed . |
5,623 | public static < S , I , O > LYResult < S , I , O > compute ( final MealyMachine < S , I , ? , O > automaton , final Alphabet < I > input ) { final SplitTreeResult < S , I , O > str = computeSplitTree ( automaton , input ) ; if ( str . isPresent ( ) ) { final Set < S > states = new HashSet < > ( automaton . getStates ( ) ) ; return new LYResult < > ( extractADS ( automaton , str . get ( ) , states , states . stream ( ) . collect ( Collectors . toMap ( Function . identity ( ) , Function . identity ( ) ) ) , null ) ) ; } return new LYResult < > ( str . getIndistinguishableStates ( ) ) ; } | Computes an ADS using the algorithm of Lee and Yannakakis . |
5,624 | @ SuppressWarnings ( "unchecked" ) public static < T > Mapping < T , T > identity ( ) { return ( Mapping < T , T > ) IDENTITY_MAPPING ; } | Retrieves the identity mapping which maps each domain value to itself . |
5,625 | @ SuppressWarnings ( "unchecked" ) public static < S , T extends S > Mapping < T , S > upcast ( ) { return ( Mapping < T , S > ) IDENTITY_MAPPING ; } | Returns a mapping that maps objects to a supertype representation . |
5,626 | public static < D , R > List < R > apply ( final Mapping < ? super D , R > mapping , final List < ? extends D > list ) { return new AbstractList < R > ( ) { public R get ( int index ) { return mapping . get ( list . get ( index ) ) ; } public int size ( ) { return list . size ( ) ; } } ; } | Applies a mapping to a list resulting in a list containing the result of applying the specified mapping to each element in the list . |
5,627 | public static < D , R > Iterable < R > apply ( final Mapping < ? super D , R > mapping , final Iterable < ? extends D > it ) { return ( ) -> apply ( mapping , it . iterator ( ) ) ; } | Applies a mapping to an iterable . The result is an iterable whose iterator returns the results of applying the specified mapping to each of the elements returned by the original iterable . |
5,628 | public Acceptance lookup ( Word < ? extends I > word ) { State s = getState ( word ) ; if ( s == null ) { return Acceptance . DONT_KNOW ; } return s . getAcceptance ( ) ; } | Checks the ternary acceptance status for a given word . |
5,629 | protected void writeEdge ( PrintWriter pw ) { pw . println ( "begin edge" ) ; pw . println ( "letter:letter" ) ; pw . println ( "end edge" ) ; } | Writes the type of the edge . A DFA edge contains one label named letter of type letter . |
5,630 | protected void writeETF ( PrintWriter pw , DFA < ? , I > dfa , Alphabet < I > inputs ) { writeETFInternal ( pw , dfa , inputs ) ; } | Write DFA specific parts in the ETF . |
5,631 | @ SuppressWarnings ( "unchecked" ) public V get ( Block < ? , ? > block ) { return ( V ) storage [ block . getId ( ) ] ; } | Retrieves a value . |
5,632 | public V put ( Block < ? , ? > block , V value ) { @ SuppressWarnings ( "unchecked" ) V old = ( V ) storage [ block . getId ( ) ] ; storage [ block . getId ( ) ] = value ; return old ; } | Stores a value . |
5,633 | public static < T , U extends T > int safeWrite ( ArrayWritable < U > aw , T [ ] array ) { int num = aw . size ( ) ; if ( num <= 0 ) { return 0 ; } if ( num > array . length ) { num = array . length ; } aw . writeToArray ( 0 , array , 0 , num ) ; return num ; } | Writes the complete container data to an array . This method ensures that the array s capacity is not exceeded . |
5,634 | public static < T , U extends T > int safeWrite ( int num , ArrayWritable < U > aw , T [ ] array ) { final int elementsToCopy = Math . min ( num , Math . min ( aw . size ( ) , array . length ) ) ; if ( elementsToCopy <= 0 ) { return 0 ; } aw . writeToArray ( 0 , array , 0 , elementsToCopy ) ; return elementsToCopy ; } | Writes a given maximum amount of data items from a container to an array . This method ensures that the array s capacity is not exceeded . |
5,635 | protected void writeETF ( PrintWriter pw , MealyMachine < ? , I , ? , O > mealy , Alphabet < I > inputs ) { writeETFInternal ( pw , mealy , inputs ) ; } | Write ETF parts specific for Mealy machines with IO semantics . |
5,636 | public R findCounterExample ( A automaton , Collection < ? extends I > inputs , P property ) { if ( automaton . size ( ) > size ) { counterExamples . clear ( ) ; } size = automaton . size ( ) ; return counterExamples . computeIfAbsent ( Pair . of ( inputs , property ) , key -> Optional . ofNullable ( modelChecker . findCounterExample ( automaton , inputs , property ) ) ) . orElse ( null ) ; } | The cached implementation for finding counter examples . |
5,637 | public static < N , E > SSSPResult < N , E > findSSSP ( Graph < N , E > graph , N init , EdgeWeights < E > edgeWeights ) { DijkstraSSSP < N , E > dijkstra = new DijkstraSSSP < > ( graph , init , edgeWeights ) ; dijkstra . findSSSP ( ) ; return dijkstra ; } | Search for the shortest paths from a single source node in a graph . |
5,638 | public void findSSSP ( ) { Record < N , E > initRec = new Record < > ( init , 0.0f ) ; if ( records . put ( init , initRec ) != null ) { throw new IllegalStateException ( "Search has already been performed!" ) ; } SmartDynamicPriorityQueue < Record < N , E > > pq = BinaryHeap . create ( graph . size ( ) ) ; initRec . ref = pq . referencedAdd ( initRec ) ; while ( ! pq . isEmpty ( ) ) { Record < N , E > rec = pq . extractMin ( ) ; float dist = rec . dist ; N node = rec . node ; for ( E edge : graph . getOutgoingEdges ( node ) ) { float w = edgeWeights . getEdgeWeight ( edge ) ; float newDist = dist + w ; N tgt = graph . getTarget ( edge ) ; Record < N , E > tgtRec = records . get ( tgt ) ; if ( tgtRec == null ) { tgtRec = new Record < > ( tgt , newDist , edge , rec ) ; tgtRec . ref = pq . referencedAdd ( tgtRec ) ; records . put ( tgt , tgtRec ) ; } else if ( newDist < tgtRec . dist ) { tgtRec . dist = newDist ; tgtRec . reach = edge ; tgtRec . depth = rec . depth + 1 ; tgtRec . parent = rec ; pq . keyChanged ( tgtRec . ref ) ; } } } } | Start the search . This method may only be invoked once . |
5,639 | public WeightedSupplier < T > add ( Supplier < ? extends T > supplier , int weight ) { if ( weight < 0 ) { return this ; } int low = totalWeight ; totalWeight += weight ; SubSupplier < T > ss = new SubSupplier < > ( low , totalWeight , supplier ) ; subSuppliers . add ( ss ) ; return this ; } | Adds a sub - supplier with a given weight . |
5,640 | public boolean insert ( E object ) { storage . ensureCapacity ( size + 1 ) ; storage . array [ size ++ ] = object ; if ( ! upHeap ( ) ) { size -- ; return false ; } return true ; } | Inserts an element into the queue . |
5,641 | private boolean upHeap ( ) { int currIdx = size - 1 ; E elem = storage . array [ currIdx ] ; int steps = 0 ; while ( currIdx > 0 ) { int parentIdx = currIdx / 2 ; E parent = storage . array [ parentIdx ] ; int cmp = comparator . compare ( elem , parent ) ; if ( cmp == 0 ) { storage . array [ parentIdx ] = mergeOp . merge ( parent , elem ) ; return false ; } else if ( cmp > 0 ) { break ; } currIdx = parentIdx ; steps ++ ; } currIdx = size - 1 ; for ( int i = 0 ; i < steps ; i ++ ) { int parentIdx = currIdx / 2 ; storage . array [ currIdx ] = storage . array [ parentIdx ] ; currIdx = parentIdx ; } storage . array [ currIdx ] = elem ; return true ; } | Moves the last element upwards in the heap until the heap condition is restored . |
5,642 | private void downHeap ( ) { E elem = storage . array [ 0 ] ; int currIdx = 0 ; while ( 2 * currIdx < size ) { int leftChildIdx = 2 * currIdx ; E leftChild = storage . array [ leftChildIdx ] ; if ( comparator . compare ( elem , leftChild ) > 0 ) { storage . array [ currIdx ] = leftChild ; storage . array [ leftChildIdx ] = elem ; currIdx = leftChildIdx ; } else if ( 2 * currIdx + 1 < size ) { int rightChildIdx = 2 * currIdx + 1 ; E rightChild = storage . array [ rightChildIdx ] ; if ( comparator . compare ( elem , rightChild ) > 0 ) { storage . array [ currIdx ] = rightChild ; storage . array [ rightChildIdx ] = elem ; currIdx = rightChildIdx ; } else { return ; } } else { return ; } } } | Sifts the topmost element down into the heap until the heap condition is restored . |
5,643 | public void setAll ( int value ) { for ( int i = 0 ; i < array . length ; i ++ ) { array [ i ] = value ; } } | Sets all the elements in the array to the specified value . |
5,644 | protected final void write ( PrintWriter printWriter , A a , Alphabet < I > inputs ) { writeState ( printWriter ) ; writeEdge ( printWriter ) ; writeETF ( printWriter , a , inputs ) ; printWriter . close ( ) ; } | Write the full ETF . |
5,645 | public static void initCompleteDeterministic ( PaigeTarjan pt , UniversalDeterministicAutomaton . FullIntAbstraction < ? , ? , ? > absAutomaton , AutomatonInitialPartitioning ip , boolean pruneUnreachable ) { initCompleteDeterministic ( pt , absAutomaton , ip . initialClassifier ( absAutomaton ) , pruneUnreachable ) ; } | Initializes the partition refinement data structure from a given abstracted deterministic automaton using a predefined initial partitioning mode . |
5,646 | public boolean addToBucket ( State < S , EP > state ) { boolean first = false ; if ( bucket . isEmpty ( ) ) { first = true ; } bucket . pushBack ( state ) ; return first ; } | Adds a state to this label s bucket . |
5,647 | public boolean addToSet ( State < S , EP > state ) { boolean first = list . isEmpty ( ) ; if ( first || ! setContents . get ( state . getId ( ) ) ) { list . add ( state ) ; setContents . set ( state . getId ( ) ) ; } return first ; } | Adds a state to the associated state set . Note that a state can be in the sets of various transition labels . |
5,648 | public static < T1 , T2 , T3 > Triple < T1 , T2 , T3 > of ( T1 first , T2 second , T3 third ) { return new Triple < > ( first , second , third ) ; } | Convenience function for creating a triple allowing the user to omit the type parameters . |
5,649 | public static void skip ( InputStream is ) throws IOException { while ( is . available ( ) > 0 ) { is . skip ( Long . MAX_VALUE ) ; } is . close ( ) ; } | Skips the content of the stream as long as there is data available . Afterwards the stream is closed . |
5,650 | public static void copy ( InputStream is , OutputStream os , boolean close ) throws IOException { byte [ ] buf = new byte [ DEFAULT_BUFFER_SIZE ] ; int len ; try { while ( ( len = is . read ( buf ) ) != - 1 ) { os . write ( buf , 0 , len ) ; } } finally { if ( close ) { closeQuietly ( is ) ; closeQuietly ( os ) ; } } } | Copies all data from the given input stream to the given output stream . |
5,651 | public static void copy ( Reader r , Writer w , boolean close ) throws IOException { char [ ] buf = new char [ DEFAULT_BUFFER_SIZE ] ; int len ; try { while ( ( len = r . read ( buf ) ) != - 1 ) { w . write ( buf , 0 , len ) ; } } finally { if ( close ) { closeQuietly ( r ) ; closeQuietly ( w ) ; } } } | Copies all text from the given reader to the given writer . |
5,652 | public static InputStream asBufferedInputStream ( File file ) throws IOException { return asBufferedInputStream ( Files . newInputStream ( file . toPath ( ) ) ) ; } | Returns an input stream that reads the contents of the given file . Additionally buffers the input stream to improve performance . |
5,653 | public static OutputStream asBufferedOutputStream ( File file ) throws IOException { return asBufferedOutputStream ( Files . newOutputStream ( file . toPath ( ) ) ) ; } | Returns an output stream that writes the contents to the given file . Additionally buffers the input stream to improve performance . |
5,654 | public static InputStream runDOT ( Reader r , String format , String ... additionalOpts ) throws IOException { String [ ] dotCommand = buildDOTCommand ( format , additionalOpts ) ; Process p = ProcessUtil . buildProcess ( dotCommand , r , null , LOGGER :: warn ) ; return p . getInputStream ( ) ; } | Invokes the GraphVIZ DOT utility for rendering graphs . |
5,655 | public static void runDOT ( Reader r , String format , File out ) throws IOException { String [ ] dotCommand = buildDOTCommand ( format , "-o" + out . getAbsolutePath ( ) ) ; try { ProcessUtil . invokeProcess ( dotCommand , r , LOGGER :: warn ) ; } catch ( InterruptedException ex ) { LOGGER . error ( "Interrupted while waiting for 'dot' process to exit." , ex ) ; } } | Invokes the GraphVIZ DOT utility for rendering graphs writing output to the specified file . |
5,656 | public static void renderDOT ( Reader r , boolean modal ) { final DOTComponent cmp = createDOTComponent ( r ) ; if ( cmp == null ) { return ; } final JDialog frame = new JDialog ( ( Dialog ) null , modal ) ; JScrollPane scrollPane = new JScrollPane ( cmp ) ; frame . setContentPane ( scrollPane ) ; frame . setMaximumSize ( new Dimension ( MAX_WIDTH , MAX_HEIGHT ) ) ; frame . pack ( ) ; JMenu menu = new JMenu ( "File" ) ; menu . add ( cmp . getSavePngAction ( ) ) ; menu . add ( cmp . getSaveDotAction ( ) ) ; menu . addSeparator ( ) ; menu . add ( new AbstractAction ( "Close" ) { private static final long serialVersionUID = - 1L ; public void actionPerformed ( ActionEvent e ) { frame . dispatchEvent ( new WindowEvent ( frame , WindowEvent . WINDOW_CLOSING ) ) ; } } ) ; JMenuBar menuBar = new JMenuBar ( ) ; menuBar . add ( menu ) ; frame . setJMenuBar ( menuBar ) ; frame . setDefaultCloseOperation ( JFrame . DISPOSE_ON_CLOSE ) ; frame . setVisible ( true ) ; frame . addKeyListener ( new KeyAdapter ( ) { public void keyTyped ( KeyEvent e ) { if ( e . getKeyChar ( ) == KeyEvent . VK_ESCAPE ) { frame . dispatchEvent ( new WindowEvent ( frame , WindowEvent . WINDOW_CLOSING ) ) ; } } } ) ; } | Renders a GraphVIZ description and displays it in a Swing window . |
5,657 | public static Writer createDotWriter ( final boolean modal ) { return new StringWriter ( ) { public void close ( ) throws IOException { renderDOT ( toString ( ) , modal ) ; super . close ( ) ; } } ; } | Creates a Writer that can be used to write a DOT description to . Upon closing the writer a window with the rendering result appears . |
5,658 | public void computeCoarsestStablePartition ( ) { Block curr ; while ( ( curr = poll ( ) ) != null ) { int blockRange = curr . high - curr . low ; int [ ] blockCopy = new int [ blockRange ] ; System . arraycopy ( blockData , curr . low , blockCopy , 0 , blockRange ) ; int predOfsBase = predOfsDataLow ; for ( int i = 0 ; i < numInputs ; i ++ ) { for ( int j = 0 ; j < blockRange ; j ++ ) { int state = blockCopy [ j ] ; int predOfsIdx = predOfsBase + state ; int predLow = predOfsData [ predOfsIdx ] , predHigh = predOfsData [ predOfsIdx + 1 ] ; for ( int k = predLow ; k < predHigh ; k ++ ) { int pred = predData [ k ] ; moveLeft ( pred ) ; } } predOfsBase += numStates ; processTouched ( ) ; } } } | Refines the partition until it stabilizes . |
5,659 | public Spliterator . OfInt statesInBlockSpliterator ( Block b ) { return Arrays . spliterator ( blockData , b . low , b . high ) ; } | Retrieves a spliterator for the contents of the given block . |
5,660 | public static < S , I , O > Optional < ADSNode < S , I , O > > compute ( final MealyMachine < S , I , ? , O > automaton , final Alphabet < I > input , final Set < S > states ) { if ( states . isEmpty ( ) ) { return Optional . empty ( ) ; } else if ( states . size ( ) == 1 ) { final S singleState = states . iterator ( ) . next ( ) ; return Optional . of ( new ADSLeafNode < > ( null , singleState ) ) ; } else if ( states . size ( ) == 2 ) { return StateEquivalence . compute ( automaton , input , states ) ; } else if ( states . size ( ) == automaton . getStates ( ) . size ( ) ) { final LYResult < S , I , O > result = LeeYannakakis . compute ( automaton , input ) ; if ( result . isPresent ( ) ) { return Optional . of ( result . get ( ) ) ; } return Optional . empty ( ) ; } else { return BacktrackingSearch . compute ( automaton , input , states ) ; } } | Compute an adaptive distinguishing sequence for the given automaton and the given set of states . |
5,661 | public static < T1 , T2 > Pair < T1 , T2 > of ( T1 first , T2 second ) { return new Pair < > ( first , second ) ; } | Convenience function for creating a pair allowing the user to omit the type parameters . |
5,662 | public static < S , I > boolean isPrefixClosed ( DFA < S , I > dfa , Alphabet < I > alphabet ) { return dfa . getStates ( ) . parallelStream ( ) . allMatch ( s -> dfa . isAccepting ( s ) || alphabet . parallelStream ( ) . noneMatch ( i -> dfa . isAccepting ( dfa . getSuccessors ( s , i ) ) ) ) ; } | Computes whether the language of the given DFA is prefix - closed . |
5,663 | public Block < S , L > getBlockForState ( S origState ) { State < S , L > state = stateStorage . get ( origState ) ; return state . getBlock ( ) ; } | Retrieves the block to which a given original state belongs . |
5,664 | public static < T extends Iterable < U > , U > Comparator < T > lexComparator ( Comparator < U > elemComp ) { return new LexComparator < > ( elemComp ) ; } | Retrieves a lexicographical comparator for the given type . |
5,665 | public static < T extends List < ? extends U > , U > Comparator < T > canonicalComparator ( Comparator < ? super U > elemComp ) { return new CanonicalComparator < > ( elemComp ) ; } | Retrieves a canonical comparator for the given list type . |
5,666 | public static < I > Word < I > fromSymbols ( I ... symbols ) { if ( symbols . length == 0 ) { return epsilon ( ) ; } if ( symbols . length == 1 ) { return fromLetter ( symbols [ 0 ] ) ; } return new SharedWord < > ( symbols . clone ( ) ) ; } | Creates a word from an array of symbols . |
5,667 | @ SuppressWarnings ( "unchecked" ) public static < I > Word < I > epsilon ( ) { return ( Word < I > ) EmptyWord . INSTANCE ; } | Retrieves the empty word . |
5,668 | public static < I > Word < I > fromArray ( I [ ] symbols , int offset , int length ) { if ( length == 0 ) { return epsilon ( ) ; } if ( length == 1 ) { return fromLetter ( symbols [ offset ] ) ; } Object [ ] array = new Object [ length ] ; System . arraycopy ( symbols , offset , array , 0 , length ) ; return new SharedWord < > ( array ) ; } | Creates a word from a subrange of an array of symbols . Note that to ensure immutability internally a copy of the array is made . |
5,669 | public static < I > Word < I > fromList ( List < ? extends I > symbolList ) { int siz = symbolList . size ( ) ; if ( siz == 0 ) { return epsilon ( ) ; } if ( siz == 1 ) { return Word . fromLetter ( symbolList . get ( 0 ) ) ; } return new SharedWord < > ( symbolList ) ; } | Creates a word from a list of symbols . |
5,670 | @ SuppressWarnings ( "unchecked" ) protected Word < I > concatInternal ( Word < ? extends I > ... words ) { if ( words . length == 0 ) { return this ; } int len = length ( ) ; int totalSize = len ; for ( Word < ? extends I > word : words ) { totalSize += word . length ( ) ; } Object [ ] array = new Object [ totalSize ] ; writeToArray ( 0 , array , 0 , len ) ; int currOfs = len ; for ( Word < ? extends I > w : words ) { int wLen = w . length ( ) ; w . writeToArray ( 0 , array , currOfs , wLen ) ; currOfs += wLen ; } return new SharedWord < > ( array ) ; } | Realizes the concatenation of this word with several other words . |
5,671 | public boolean isPrefixOf ( Word < ? > other ) { int len = length ( ) , otherLen = other . length ( ) ; if ( otherLen < len ) { return false ; } for ( int i = 0 ; i < len ; i ++ ) { I sym1 = getSymbol ( i ) ; Object sym2 = other . getSymbol ( i ) ; if ( ! Objects . equals ( sym1 , sym2 ) ) { return false ; } } return true ; } | Checks if this word is a prefix of another word . |
5,672 | public Word < I > longestCommonPrefix ( Word < ? > other ) { int len = length ( ) , otherLen = other . length ( ) ; int maxIdx = ( len < otherLen ) ? len : otherLen ; int i = 0 ; while ( i < maxIdx ) { I sym1 = getSymbol ( i ) ; Object sym2 = other . getSymbol ( i ) ; if ( ! Objects . equals ( sym1 , sym2 ) ) { break ; } i ++ ; } return prefix ( i ) ; } | Determines the longest common prefix of this word and another word . |
5,673 | public Word < I > longestCommonSuffix ( Word < ? > other ) { int len = length ( ) , otherLen = other . length ( ) ; int minLen = ( len < otherLen ) ? len : otherLen ; int idx1 = len , idx2 = otherLen ; int i = 0 ; while ( i < minLen ) { I sym1 = getSymbol ( -- idx1 ) ; Object sym2 = other . getSymbol ( -- idx2 ) ; if ( ! Objects . equals ( sym1 , sym2 ) ) { break ; } i ++ ; } return suffix ( i ) ; } | Determines the longest common suffix of this word and another word . |
5,674 | public int [ ] toIntArray ( ToIntFunction < ? super I > toInt ) { int len = length ( ) ; int [ ] result = new int [ len ] ; int i = 0 ; for ( I sym : this ) { int symIdx = toInt . applyAsInt ( sym ) ; result [ i ++ ] = symIdx ; } return result ; } | Transforms this word into an array of integers using the specified function for translating an individual symbol to an integer . |
5,675 | public < T > Word < T > transform ( Function < ? super I , ? extends T > transformer ) { int len = length ( ) ; Object [ ] array = new Object [ len ] ; int i = 0 ; for ( I symbol : this ) { array [ i ++ ] = transformer . apply ( symbol ) ; } return new SharedWord < > ( array ) ; } | Transforms a word symbol - by - symbol using the specified transformation function . |
5,676 | protected State updateSignature ( State state , Acceptance acc ) { assert ( state != init ) ; StateSignature sig = state . getSignature ( ) ; if ( sig . acceptance == acc ) { return state ; } register . remove ( sig ) ; sig . acceptance = acc ; sig . updateHashCode ( ) ; return replaceOrRegister ( state ) ; } | Updates the signature for a given state . |
5,677 | protected State replaceOrRegister ( State state ) { StateSignature sig = state . getSignature ( ) ; State other = register . get ( sig ) ; if ( other != null ) { if ( state != other ) { for ( int i = 0 ; i < sig . successors . array . length ; i ++ ) { State succ = sig . successors . array [ i ] ; if ( succ != null ) { succ . decreaseIncoming ( ) ; } } } return other ; } register . put ( sig , state ) ; return state ; } | Returns the canonical state for the given state s signature or registers the state as canonical if no state with that signature exists . |
5,678 | private < E > Collection < Block < S , L > > initialize ( UniversalGraph < S , E , ? , L > graph , Collection < ? extends S > initialNodes ) { Iterable < ? extends S > origStates ; if ( initialNodes == null || initialNodes . isEmpty ( ) ) { origStates = graph . getNodes ( ) ; } else { origStates = GraphTraversal . depthFirstOrder ( graph , initialNodes ) ; } Map < L , TransitionLabel < S , L > > transitionMap = new HashMap < > ( ) ; stateStorage = graph . createStaticNodeMapping ( ) ; int numStates = 0 ; for ( S origState : origStates ) { State < S , L > state = new State < > ( numStates ++ , origState ) ; stateStorage . put ( origState , state ) ; stateList . add ( state ) ; } InitialPartitioning < S , L > initPartitioning = new HashMapInitialPartitioning < > ( graph ) ; for ( State < S , L > state : stateList ) { S origState = state . getOriginalState ( ) ; Block < S , L > block = initPartitioning . getBlock ( origState ) ; block . addState ( state ) ; for ( E edge : graph . getOutgoingEdges ( origState ) ) { S origTarget = graph . getTarget ( edge ) ; State < S , L > target = stateStorage . get ( origTarget ) ; L label = graph . getEdgeProperty ( edge ) ; TransitionLabel < S , L > transition = transitionMap . computeIfAbsent ( label , TransitionLabel :: new ) ; Edge < S , L > edgeObj = new Edge < > ( state , target , transition ) ; state . addOutgoingEdge ( edgeObj ) ; target . addIncomingEdge ( edgeObj ) ; } } stateList . quickClear ( ) ; return initPartitioning . getInitialBlocks ( ) ; } | Builds the initial data structures and performs the initial partitioning . |
5,679 | private void addToPartition ( Block < S , L > block ) { ElementReference ref = partition . referencedAdd ( block ) ; block . setPartitionReference ( ref ) ; } | Adds a block to the partition . |
5,680 | private void addToSplitterQueue ( Block < S , L > block ) { ElementReference ref = splitters . referencedAdd ( block ) ; block . setSplitterQueueReference ( ref ) ; } | Adds a block as a potential splitter . |
5,681 | private boolean removeFromSplitterQueue ( Block < S , L > block ) { ElementReference ref = block . getSplitterQueueReference ( ) ; if ( ref == null ) { return false ; } splitters . remove ( ref ) ; block . setSplitterQueueReference ( null ) ; return true ; } | Removes a block from the splitter queue . This is done when it is split completely and thus no longer existant . |
5,682 | private void updateBlocks ( ) { for ( Block < S , L > block : splitBlocks ) { int inSubBlocks = block . getElementsInSubBlocks ( ) ; if ( inSubBlocks == 0 ) { continue ; } boolean blockRemains = ( inSubBlocks < block . size ( ) ) ; boolean reuseBlock = ! blockRemains ; List < UnorderedCollection < State < S , L > > > subBlocks = block . getSubBlocks ( ) ; if ( ! blockRemains && subBlocks . size ( ) == 1 ) { block . clearSubBlocks ( ) ; continue ; } Iterator < UnorderedCollection < State < S , L > > > subBlockIt = subBlocks . iterator ( ) ; if ( reuseBlock ) { UnorderedCollection < State < S , L > > first = subBlockIt . next ( ) ; block . getStates ( ) . swap ( first ) ; updateBlockReferences ( block ) ; } while ( subBlockIt . hasNext ( ) ) { UnorderedCollection < State < S , L > > subBlockStates = subBlockIt . next ( ) ; if ( blockRemains ) { for ( State < S , L > state : subBlockStates ) { block . removeState ( state . getBlockReference ( ) ) ; } } Block < S , L > subBlock = new Block < > ( numBlocks ++ , subBlockStates ) ; updateBlockReferences ( subBlock ) ; newBlocks . add ( subBlock ) ; addToPartition ( subBlock ) ; } newBlocks . add ( block ) ; block . clearSubBlocks ( ) ; if ( removeFromSplitterQueue ( block ) ) { addAllToSplitterQueue ( newBlocks ) ; } else { addAllButLargest ( newBlocks ) ; } newBlocks . clear ( ) ; } splitBlocks . clear ( ) ; } | This method performs the actual splitting of blocks using the sub block information stored in each block object . |
5,683 | private static < S , L > void updateBlockReferences ( Block < S , L > block ) { UnorderedCollection < State < S , L > > states = block . getStates ( ) ; for ( ElementReference ref : states . references ( ) ) { State < S , L > state = states . get ( ref ) ; state . setBlockReference ( ref ) ; state . setBlock ( block ) ; } } | Sets the blockReference - attribute of each state in the collection to the corresponding ElementReference of the collection . |
5,684 | private void addAllToSplitterQueue ( Collection < Block < S , L > > blocks ) { for ( Block < S , L > block : blocks ) { addToSplitterQueue ( block ) ; } } | Adds all but the largest block of a given collection to the splitter queue . |
5,685 | public static < N , E > List < N > toNodeList ( List < E > edgeList , Graph < N , E > graph , N init ) { List < N > result = new ArrayList < > ( edgeList . size ( ) + 1 ) ; result . add ( init ) ; for ( E edge : edgeList ) { N tgt = graph . getTarget ( edge ) ; result . add ( tgt ) ; } return result ; } | Converts a list of edges into a corresponding list of nodes . Note that the list of nodes is always one larger than the respective list of edges . |
5,686 | public static < N , E > APSPResult < N , E > findAPSP ( Graph < N , E > graph , EdgeWeights < E > edgeWeights ) { return FloydWarshallAPSP . findAPSP ( graph , edgeWeights ) ; } | Computes the shortest paths between all pairs of nodes in a graph using the Floyd - Warshall dynamic programming algorithm . Note that the result is only correct if the graph contains no cycles with negative edge weight sums . |
5,687 | public static < S , I , T > void findCharacterizingSet ( UniversalDeterministicAutomaton < S , I , T , ? , ? > automaton , Collection < ? extends I > inputs , Collection < ? super Word < I > > result ) { findIncrementalCharacterizingSet ( automaton , inputs , Collections . emptyList ( ) , result ) ; } | Computes a characterizing set for the given automaton . |
5,688 | public static < S , I , T > void findCharacterizingSet ( UniversalDeterministicAutomaton < S , I , T , ? , ? > automaton , Collection < ? extends I > inputs , S state , Collection < ? super Word < I > > result ) { Object prop = automaton . getStateProperty ( state ) ; List < S > currentBlock = new ArrayList < > ( ) ; boolean multipleStateProps = false ; for ( S s : automaton ) { if ( Objects . equals ( s , state ) ) { continue ; } Object sProp = automaton . getStateProperty ( s ) ; if ( ! Objects . equals ( sProp , prop ) ) { multipleStateProps = true ; } else { currentBlock . add ( s ) ; } } if ( multipleStateProps ) { result . add ( Word . epsilon ( ) ) ; } while ( ! currentBlock . isEmpty ( ) ) { Iterator < S > it = currentBlock . iterator ( ) ; Word < I > suffix = null ; while ( it . hasNext ( ) && suffix == null ) { S s = it . next ( ) ; suffix = Automata . findSeparatingWord ( automaton , state , s , inputs ) ; } if ( suffix == null ) { return ; } result . add ( suffix ) ; List < Object > trace = buildTrace ( automaton , state , suffix ) ; List < S > nextBlock = new ArrayList < > ( ) ; while ( it . hasNext ( ) ) { S s = it . next ( ) ; if ( checkTrace ( automaton , s , suffix , trace ) ) { nextBlock . add ( s ) ; } } currentBlock = nextBlock ; } } | Computes a characterizing set for a specified state in the given automaton . |
5,689 | private void downHeap ( int idx ) { Reference < E > e = entries . array [ idx ] ; int iter = idx ; while ( hasChildren ( iter ) ) { int cidx = leftChild ( iter ) ; Reference < E > c = entries . array [ cidx ] ; if ( hasRightChild ( iter ) ) { int rcidx = rightChild ( iter ) ; Reference < E > rc = entries . array [ rcidx ] ; if ( compare ( rc , c ) < 0 ) { cidx = rcidx ; c = rc ; } } if ( compare ( e , c ) <= 0 ) { break ; } entries . array [ cidx ] = e ; entries . array [ iter ] = c ; c . index = iter ; iter = cidx ; } e . index = iter ; } | Move an element downwards inside the heap until all of its children have a key greater or equal to its own . |
5,690 | private int compare ( Reference < E > e1 , Reference < E > e2 ) { return comparator . compare ( e1 . element , e2 . element ) ; } | Compares the referenced elements . |
5,691 | private void upHeap ( int idx ) { Reference < E > e = entries . array [ idx ] ; int iter = idx ; while ( hasParent ( iter ) ) { int pidx = parent ( iter ) ; Reference < E > p = entries . array [ pidx ] ; if ( compare ( e , p ) < 0 ) { entries . array [ pidx ] = e ; entries . array [ iter ] = p ; p . index = iter ; iter = parent ( iter ) ; } else { break ; } } e . index = iter ; } | Move an element upwards inside the heap until it has a parent with a key less or equal to its own . |
5,692 | public static < I > void cover ( DeterministicAutomaton < ? , I , ? > automaton , Collection < ? extends I > inputs , Collection < ? super Word < I > > states , Collection < ? super Word < I > > transitions ) { cover ( automaton , inputs , states :: add , transitions :: add ) ; } | Utility method that allows to compute a state and transition cover simultaneously . |
5,693 | public static < S , I > boolean incrementalStateCover ( DeterministicAutomaton < S , I , ? > automaton , Collection < ? extends I > inputs , Collection < ? extends Word < I > > oldStates , Collection < ? super Word < I > > newStates ) { MutableMapping < S , Record < S , I > > reach = automaton . createStaticStateMapping ( ) ; boolean augmented = false ; Queue < Record < S , I > > bfsQueue = new ArrayDeque < > ( ) ; buildReachFromStateCover ( reach , bfsQueue , automaton , oldStates , Record :: new ) ; S init = automaton . getInitialState ( ) ; if ( reach . get ( init ) == null ) { Record < S , I > rec = new Record < > ( init , Word . epsilon ( ) ) ; reach . put ( init , rec ) ; bfsQueue . add ( rec ) ; newStates . add ( Word . epsilon ( ) ) ; augmented = true ; } Record < S , I > curr ; while ( ( curr = bfsQueue . poll ( ) ) != null ) { S state = curr . state ; Word < I > as = curr . accessSequence ; for ( I in : inputs ) { S succ = automaton . getSuccessor ( state , in ) ; if ( succ == null ) { continue ; } if ( reach . get ( succ ) == null ) { Word < I > succAs = as . append ( in ) ; Record < S , I > succRec = new Record < > ( succ , succAs ) ; reach . put ( succ , succRec ) ; bfsQueue . add ( succRec ) ; newStates . add ( succAs ) ; augmented = true ; } } } return augmented ; } | Computes an incremental state cover for a given automaton i . e . a cover that only contains the missing sequences for obtaining a complete state cover . |
5,694 | public static < I > boolean incrementalTransitionCover ( DeterministicAutomaton < ? , I , ? > automaton , Collection < ? extends I > inputs , Collection < ? extends Word < I > > oldTransCover , Collection < ? super Word < I > > newTransCover ) { final int oldTransSize = newTransCover . size ( ) ; incrementalCover ( automaton , inputs , Collections . emptySet ( ) , oldTransCover , w -> { } , newTransCover :: add ) ; return oldTransSize < newTransCover . size ( ) ; } | Computes an incremental transition cover for a given automaton i . e . a cover that only contains the missing sequences for obtaining a complete transition cover . |
5,695 | public static < I > boolean incrementalStructuralCover ( DeterministicAutomaton < ? , I , ? > automaton , Collection < ? extends I > inputs , Collection < ? extends Word < I > > oldCover , Collection < ? super Word < I > > newCover ) { final int oldCoverSize = newCover . size ( ) ; incrementalCover ( automaton , inputs , oldCover , Collections . emptySet ( ) , newCover :: add , newCover :: add ) ; return oldCoverSize < newCover . size ( ) ; } | Computes an incremental structural cover for a given automaton i . e . a cover that only contains the missing sequences for obtaining a complete structural cover . |
5,696 | public static < I > boolean incrementalCover ( DeterministicAutomaton < ? , I , ? > automaton , Collection < ? extends I > inputs , Collection < ? extends Word < I > > oldStateCover , Collection < ? extends Word < I > > oldTransCover , Collection < ? super Word < I > > newStateCover , Collection < ? super Word < I > > newTransCover ) { final int oldStateSize = newStateCover . size ( ) ; final int oldTransSize = newTransCover . size ( ) ; incrementalCover ( automaton , inputs , oldStateCover , oldTransCover , newStateCover :: add , newTransCover :: add ) ; return oldStateSize < newStateCover . size ( ) || oldTransSize < newTransCover . size ( ) ; } | Utility method that allows to compute an incremental state and transition cover simultaneously . |
5,697 | public static < I > List < Word < I > > stateCover ( DeterministicAutomaton < ? , I , ? > automaton , Collection < ? extends I > inputs ) { final List < Word < I > > result = new ArrayList < > ( automaton . size ( ) ) ; Covers . stateCover ( automaton , inputs , result ) ; return result ; } | Convenient method for computing a state cover . |
5,698 | public SuffixTrieNode < I > add ( I symbol , SuffixTrieNode < I > parent ) { SuffixTrieNode < I > n = new SuffixTrieNode < > ( symbol , parent ) ; if ( nodes != null ) { nodes . add ( n ) ; } return n ; } | Adds a word to the trie . |
5,699 | private static String generateRandomAutomaton ( Random random ) throws IOException { final Automaton < ? , Integer , ? > automaton ; if ( random . nextBoolean ( ) ) { automaton = RandomAutomata . randomDFA ( random , AUTOMATON_SIZE , INPUT_ALPHABET ) ; } else { automaton = RandomAutomata . randomMealy ( random , AUTOMATON_SIZE , INPUT_ALPHABET , OUTPUT_ALPHABET ) ; } final StringWriter writer = new StringWriter ( ) ; GraphDOT . write ( automaton , INPUT_ALPHABET , writer ) ; return writer . toString ( ) ; } | Generate DOT code for some random automaton . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.