idx int64 0 165k | question stringlengths 73 5.81k | target stringlengths 5 918 |
|---|---|---|
7,900 | public Iterator < Class > getClassesToDeleteIterator ( ) { if ( fixture instanceof CleanableFixture ) { return cleanableFixture ( ) . getClassesToDeleteIterator ( ) ; } return Collections . < Class > emptyList ( ) . iterator ( ) ; } | Returns an ordered iterator of mapping classes to delete from database . |
7,901 | private String buildCsvValue ( final Field field , final String fieldValue ) { return ( areTextValuesWrapped && field . getType ( ) . equals ( String . class ) || isValueWrappable . test ( fieldValue ) ) ? wrapWithQuotes ( fieldValue ) : fieldValue ; } | Build correct final export field value in CSV format Check for wrap option for field value |
7,902 | private String generateCsvHeader ( final IClassContainer container ) { final String separatorAsStr = String . valueOf ( separator ) ; return container . getFormatSupported ( Format . CSV ) . entrySet ( ) . stream ( ) . map ( e -> e . getValue ( ) . getExportName ( ) ) . collect ( Collectors . joining ( separatorAsStr ) ) ; } | Generates header for CSV file |
7,903 | public SyntacticCategory assignFeatures ( Map < Integer , String > assignedFeatures , Map < Integer , Integer > relabeledFeatures ) { String newFeatureValue = featureValue ; int newFeatureVariable = featureVariable ; if ( assignedFeatures . containsKey ( featureVariable ) ) { newFeatureValue = assignedFeatures . get ( featureVariable ) ; newFeatureVariable = - 1 ; } else if ( relabeledFeatures . containsKey ( featureVariable ) ) { newFeatureVariable = relabeledFeatures . get ( newFeatureVariable ) ; } if ( isAtomic ( ) ) { return SyntacticCategory . createAtomic ( value , newFeatureValue , newFeatureVariable ) ; } else { SyntacticCategory assignedReturn = returnType . assignFeatures ( assignedFeatures , relabeledFeatures ) ; SyntacticCategory assignedArgument = argumentType . assignFeatures ( assignedFeatures , relabeledFeatures ) ; return SyntacticCategory . createFunctional ( direction , assignedReturn , assignedArgument , newFeatureValue , newFeatureVariable ) ; } } | Assigns values to or relabels feature variables in this category . |
7,904 | public SyntacticCategory assignAllFeatures ( String value ) { Set < Integer > featureVars = Sets . newHashSet ( ) ; getAllFeatureVariables ( featureVars ) ; Map < Integer , String > valueMap = Maps . newHashMap ( ) ; for ( Integer var : featureVars ) { valueMap . put ( var , value ) ; } return assignFeatures ( valueMap , Collections . < Integer , Integer > emptyMap ( ) ) ; } | Assigns value to all unfilled feature variables . |
7,905 | public SyntacticCategory getWithoutFeatures ( ) { if ( isAtomic ( ) ) { return createAtomic ( value , DEFAULT_FEATURE_VALUE , - 1 ) ; } else { return createFunctional ( getDirection ( ) , returnType . getWithoutFeatures ( ) , argumentType . getWithoutFeatures ( ) ) ; } } | Get a syntactic category identical to this one except with all feature values replaced by the default value . |
7,906 | public List < SyntacticCategory > getArgumentList ( ) { if ( isAtomic ( ) ) { return Lists . newArrayList ( ) ; } else { List < SyntacticCategory > args = getReturn ( ) . getArgumentList ( ) ; args . add ( getArgument ( ) ) ; return args ; } } | Gets the sequence of arguments that this category accepts . Note that the returned arguments themselves may be functional types . |
7,907 | private static MaxMarginalSet cliqueTreeToMaxMarginalSet ( CliqueTree cliqueTree , FactorGraph originalFactorGraph ) { for ( int i = 0 ; i < cliqueTree . numFactors ( ) ; i ++ ) { computeMarginal ( cliqueTree , i , false ) ; } return new FactorMaxMarginalSet ( cliqueTree , originalFactorGraph . getConditionedValues ( ) ) ; } | Retrieves max marginals from the given clique tree . |
7,908 | public String sign ( String content , PrivateKey privateKey ) { if ( content == null ) { return null ; } byte [ ] bytes = content . getBytes ( StandardCharsets . UTF_8 ) ; InputStream input = new ByteArrayInputStream ( bytes ) ; return sign ( input , privateKey ) ; } | Generates a digital signature for the given string . |
7,909 | public boolean verify ( String content , PublicKey publicKey , String signature ) { byte [ ] bytes = content . getBytes ( StandardCharsets . UTF_8 ) ; InputStream input = new ByteArrayInputStream ( bytes ) ; return verify ( input , publicKey , signature ) ; } | Verifies whether the given content matches the given signature . |
7,910 | public final Assignment union ( Assignment other ) { Preconditions . checkNotNull ( other ) ; if ( other . size ( ) == 0 ) { return this ; } if ( vars . length == 0 ) { return other ; } int [ ] otherNums = other . getVariableNumsArray ( ) ; int [ ] myNums = getVariableNumsArray ( ) ; Object [ ] otherVals = other . getValuesArray ( ) ; Object [ ] myVals = getValuesArray ( ) ; int [ ] mergedNums = new int [ otherNums . length + myNums . length ] ; Object [ ] mergedVals = new Object [ otherNums . length + myNums . length ] ; int i = 0 ; int j = 0 ; int numFilled = 0 ; while ( i < otherNums . length && j < myNums . length ) { if ( otherNums [ i ] < myNums [ j ] ) { mergedNums [ numFilled ] = otherNums [ i ] ; mergedVals [ numFilled ] = otherVals [ i ] ; i ++ ; numFilled ++ ; } else if ( otherNums [ i ] > myNums [ j ] ) { mergedNums [ numFilled ] = myNums [ j ] ; mergedVals [ numFilled ] = myVals [ j ] ; j ++ ; numFilled ++ ; } else { Preconditions . checkState ( false , "Cannot combine non-disjoint assignments: %s with %s" , this , other ) ; } } while ( i < otherNums . length ) { mergedNums [ numFilled ] = otherNums [ i ] ; mergedVals [ numFilled ] = otherVals [ i ] ; i ++ ; numFilled ++ ; } while ( j < myNums . length ) { mergedNums [ numFilled ] = myNums [ j ] ; mergedVals [ numFilled ] = myVals [ j ] ; j ++ ; numFilled ++ ; } Preconditions . checkState ( numFilled == mergedNums . length ) ; return Assignment . fromSortedArrays ( mergedNums , mergedVals ) ; } | Combines two assignments into a single joint assignment to all of the variables in each assignment . The two assignments must contain disjoint sets of variables . |
7,911 | public final Assignment removeAll ( Collection < Integer > varNumsToRemove ) { return removeAll ( Ints . toArray ( varNumsToRemove ) ) ; } | Returns a copy of this assignment without any assignments to the variable numbers in varNumsToRemove |
7,912 | public Assignment mapVariables ( Map < Integer , Integer > varMap ) { int [ ] newVarNums = new int [ vars . length ] ; Object [ ] newValues = new Object [ vars . length ] ; int numFilled = 0 ; for ( int i = 0 ; i < vars . length ; i ++ ) { if ( varMap . containsKey ( vars [ i ] ) ) { newVarNums [ numFilled ] = varMap . get ( vars [ i ] ) ; newValues [ numFilled ] = values [ i ] ; numFilled ++ ; } } if ( numFilled < newVarNums . length ) { newVarNums = Arrays . copyOf ( newVarNums , numFilled ) ; newValues = Arrays . copyOf ( newValues , numFilled ) ; } return Assignment . fromUnsortedArrays ( newVarNums , newValues ) ; } | Return a new assignment where each var num has been replaced by its value in varMap . |
7,913 | private String [ ] curateIgnoredColumns ( String [ ] ignoredColumns ) { if ( ignoredColumns == null ) { return null ; } else { String [ ] curated = new String [ ignoredColumns . length ] ; for ( int i = 0 ; i < ignoredColumns . length ; i ++ ) { if ( ignoredColumns [ i ] != null ) { String [ ] splinters = ignoredColumns [ i ] . split ( "\\." ) ; if ( splinters != null && splinters . length > 0 ) { String last = splinters [ splinters . length - 1 ] ; curated [ i ] = last ; } } } return curated ; } } | while ignoring specific table |
7,914 | private boolean hasFileChangedUnexpectedly ( ) { synchronized ( this ) { if ( mDiskWritesInFlight > 0 ) { if ( DEBUG ) System . out . println ( "disk write in flight, not unexpected." ) ; return false ; } } if ( ! mFile . canRead ( ) ) { return true ; } synchronized ( this ) { return mStatTimestamp != mFile . lastModified ( ) || mStatSize != mFile . length ( ) ; } } | we didn t instigate . |
7,915 | private void enqueueDiskWrite ( final MemoryCommitResult mcr , final Runnable postWriteRunnable ) { final Runnable writeToDiskRunnable = new Runnable ( ) { public void run ( ) { synchronized ( mWritingToDiskLock ) { writeToFile ( mcr ) ; } synchronized ( XmlStorage . this ) { mDiskWritesInFlight -- ; } if ( postWriteRunnable != null ) { postWriteRunnable . run ( ) ; } } } ; final boolean isFromSyncCommit = ( postWriteRunnable == null ) ; if ( isFromSyncCommit ) { boolean wasEmpty = false ; synchronized ( XmlStorage . this ) { wasEmpty = mDiskWritesInFlight == 1 ; } if ( wasEmpty ) { writeToDiskRunnable . run ( ) ; return ; } } QueuedWork . singleThreadExecutor ( ) . execute ( writeToDiskRunnable ) ; } | Enqueue an already - committed - to - memory result to be written to disk . |
7,916 | public static List < String > readLines ( String filename ) { List < String > lines = Lists . newArrayList ( ) ; try { BufferedReader in = new BufferedReader ( new FileReader ( filename ) ) ; String line ; while ( ( line = in . readLine ( ) ) != null ) { if ( line . trim ( ) . length ( ) > 0 ) { lines . add ( line ) ; } } in . close ( ) ; } catch ( IOException e ) { throw new RuntimeException ( e ) ; } return lines ; } | Read the lines of a file into a list of strings with each line represented as its own string . |
7,917 | public static < I , O > List < Example < DynamicAssignment , DynamicAssignment > > reformatTrainingData ( List < ? extends TaggedSequence < I , O > > sequences , FeatureVectorGenerator < LocalContext < I > > featureGen , Function < ? super LocalContext < I > , ? extends Object > inputGen , DynamicVariableSet modelVariables , I startInput , O startLabel ) { Preconditions . checkArgument ( ! ( startInput == null ^ startLabel == null ) ) ; DynamicVariableSet plate = modelVariables . getPlate ( PLATE_NAME ) ; VariableNumMap x = plate . getFixedVariables ( ) . getVariablesByName ( INPUT_FEATURES_NAME ) ; VariableNumMap xInput = plate . getFixedVariables ( ) . getVariablesByName ( INPUT_NAME ) ; VariableNumMap y = plate . getFixedVariables ( ) . getVariablesByName ( OUTPUT_NAME ) ; List < Example < DynamicAssignment , DynamicAssignment > > examples = Lists . newArrayList ( ) ; for ( TaggedSequence < I , O > sequence : sequences ) { List < Assignment > inputs = Lists . newArrayList ( ) ; if ( startInput != null ) { List < I > newItems = Lists . newArrayList ( ) ; newItems . add ( startInput ) ; newItems . addAll ( sequence . getItems ( ) ) ; LocalContext < I > startContext = new ListLocalContext < I > ( newItems , 0 ) ; Assignment inputFeatureVector = x . outcomeArrayToAssignment ( featureGen . apply ( startContext ) ) ; Assignment inputElement = xInput . outcomeArrayToAssignment ( inputGen . apply ( startContext ) ) ; Assignment firstLabel = y . outcomeArrayToAssignment ( startLabel ) ; inputs . add ( Assignment . unionAll ( inputFeatureVector , inputElement , firstLabel ) ) ; } List < LocalContext < I > > contexts = sequence . getLocalContexts ( ) ; for ( int i = 0 ; i < contexts . size ( ) ; i ++ ) { Assignment inputFeatureVector = x . outcomeArrayToAssignment ( featureGen . apply ( contexts . get ( i ) ) ) ; Assignment inputElement = xInput . outcomeArrayToAssignment ( inputGen . apply ( contexts . get ( i ) ) ) ; inputs . add ( inputFeatureVector . union ( inputElement ) ) ; } DynamicAssignment input = DynamicAssignment . createPlateAssignment ( PLATE_NAME , inputs ) ; DynamicAssignment output = DynamicAssignment . EMPTY ; if ( sequence . getLabels ( ) != null ) { List < Assignment > outputs = Lists . newArrayList ( ) ; if ( startInput != null ) { outputs . add ( Assignment . EMPTY ) ; } List < O > labels = sequence . getLabels ( ) ; for ( int i = 0 ; i < contexts . size ( ) ; i ++ ) { outputs . add ( y . outcomeArrayToAssignment ( labels . get ( i ) ) ) ; } output = DynamicAssignment . createPlateAssignment ( PLATE_NAME , outputs ) ; } examples . add ( Example . create ( input , output ) ) ; } return examples ; } | Converts training data as sequences into assignments that can be used for parameter estimation . |
7,918 | public static < I , O > List < Example < DynamicAssignment , DynamicAssignment > > reformatTrainingDataPerItem ( List < ? extends TaggedSequence < I , O > > sequences , FeatureVectorGenerator < LocalContext < I > > featureGen , Function < ? super LocalContext < I > , ? extends Object > inputGen , DynamicVariableSet modelVariables , I startInput , O startLabel ) { DynamicVariableSet plate = modelVariables . getPlate ( PLATE_NAME ) ; VariableNumMap x = plate . getFixedVariables ( ) . getVariablesByName ( INPUT_FEATURES_NAME ) ; VariableNumMap xInput = plate . getFixedVariables ( ) . getVariablesByName ( INPUT_NAME ) ; VariableNumMap y = plate . getFixedVariables ( ) . getVariablesByName ( OUTPUT_NAME ) ; ReformatPerItemMapper < I , O > mapper = new ReformatPerItemMapper < I , O > ( featureGen , inputGen , x , xInput , y , startInput , startLabel ) ; List < List < Example < DynamicAssignment , DynamicAssignment > > > exampleLists = MapReduceConfiguration . getMapReduceExecutor ( ) . map ( sequences , mapper ) ; List < Example < DynamicAssignment , DynamicAssignment > > examples = Lists . newArrayList ( ) ; for ( List < Example < DynamicAssignment , DynamicAssignment > > exampleList : exampleLists ) { examples . addAll ( exampleList ) ; } return examples ; } | Creates training examples from sequential data where each example involves predicting a single label given the current input and previous label . Such examples are suitable for training locally - normalized sequence models such as HMMs and MEMMs . |
7,919 | public static < I , O > FactorGraphSequenceTagger < I , O > trainSequenceModel ( ParametricFactorGraph sequenceModelFamily , List < Example < DynamicAssignment , DynamicAssignment > > examples , Class < O > outputClass , FeatureVectorGenerator < LocalContext < I > > featureGen , Function < ? super LocalContext < I > , ? extends Object > inputGen , I startInput , O startLabel , GradientOptimizer optimizer , boolean useMaxMargin ) { SufficientStatistics parameters = estimateParameters ( sequenceModelFamily , examples , optimizer , useMaxMargin ) ; DynamicFactorGraph factorGraph = sequenceModelFamily . getModelFromParameters ( parameters ) ; return new FactorGraphSequenceTagger < I , O > ( sequenceModelFamily , parameters , factorGraph , featureGen , inputGen , outputClass , new JunctionTree ( ) , new JunctionTree ( true ) , startInput , startLabel ) ; } | Trains a sequence model . |
7,920 | public static synchronized void onDestroy ( Context context ) { if ( sRetryReceiver != null ) { Log . v ( TAG , "Unregistering receiver" ) ; context . unregisterReceiver ( sRetryReceiver ) ; sRetryReceiver = null ; } } | Clear internal resources . |
7,921 | static String setRegistrationId ( Context context , String regId ) { final SharedPreferences prefs = getGCMPreferences ( context ) ; String oldRegistrationId = prefs . getString ( PROPERTY_REG_ID , "" ) ; int appVersion = getAppVersion ( context ) ; Log . v ( TAG , "Saving regId on app version " + appVersion ) ; Editor editor = prefs . edit ( ) ; editor . putString ( PROPERTY_REG_ID , regId ) ; editor . putInt ( PROPERTY_APP_VERSION , appVersion ) ; editor . commit ( ) ; return oldRegistrationId ; } | Sets the registration id in the persistence store . |
7,922 | public static void setRegisteredOnServer ( Context context , boolean flag ) { final SharedPreferences prefs = getGCMPreferences ( context ) ; Editor editor = prefs . edit ( ) ; editor . putBoolean ( PROPERTY_ON_SERVER , flag ) ; long lifespan = getRegisterOnServerLifespan ( context ) ; long expirationTime = System . currentTimeMillis ( ) + lifespan ; Log . v ( TAG , "Setting registeredOnServer status as " + flag + " until " + new Timestamp ( expirationTime ) ) ; editor . putLong ( PROPERTY_ON_SERVER_EXPIRATION_TIME , expirationTime ) ; editor . commit ( ) ; } | Sets whether the device was successfully registered in the server side . |
7,923 | private static int getAppVersion ( Context context ) { try { PackageInfo packageInfo = context . getPackageManager ( ) . getPackageInfo ( context . getPackageName ( ) , 0 ) ; return packageInfo . versionCode ; } catch ( NameNotFoundException e ) { throw new RuntimeException ( "Coult not get package name: " + e ) ; } } | Gets the application version . |
7,924 | static int getBackoff ( Context context ) { final SharedPreferences prefs = getGCMPreferences ( context ) ; return prefs . getInt ( BACKOFF_MS , DEFAULT_BACKOFF_MS ) ; } | Gets the current backoff counter . |
7,925 | private double denseTensorInnerProduct ( DenseTensor other ) { double [ ] otherValues = other . values ; int length = values . length ; Preconditions . checkArgument ( otherValues . length == length ) ; double innerProduct = 0.0 ; for ( int i = 0 ; i < length ; i ++ ) { innerProduct += values [ i ] * otherValues [ i ] ; } return innerProduct ; } | Implementation of inner product where both tensors are dense and have the same dimensionality . These properties enable the inner product to be computed extremely quickly by iterating over both dense arrays of values . |
7,926 | private List < Annotation > buildDeclaredAnnotationList ( final Annotation annotation ) { final List < Annotation > list = Arrays . stream ( annotation . annotationType ( ) . getDeclaredAnnotations ( ) ) . collect ( Collectors . toList ( ) ) ; list . add ( annotation ) ; return list ; } | Retrieve declared annotations from parent one and build set of them all |
7,927 | public static < I , O > CrossValidationEvaluation < I , O > kFold ( Collection < Example < I , O > > data , int k ) { Preconditions . checkNotNull ( data ) ; Preconditions . checkArgument ( k > 1 ) ; int numTrainingPoints = data . size ( ) ; List < Collection < Example < I , O > > > folds = Lists . newArrayList ( ) ; for ( List < Example < I , O > > fold : Iterables . partition ( data , ( int ) Math . ceil ( numTrainingPoints / k ) ) ) { folds . add ( fold ) ; } return new CrossValidationEvaluation < I , O > ( folds ) ; } | Construct a cross validation evaluation from a data set by partitioning it into k folds . The elements in data should be in a random order . |
7,928 | public String [ ] getLocalColumns ( String modelName ) { List < String > columnList = new ArrayList < String > ( ) ; for ( int i = 0 ; i < this . hasOne . length ; i ++ ) { if ( modelName . equalsIgnoreCase ( this . hasOne [ i ] ) ) { columnList . add ( hasOneLocalColumn [ i ] ) ; } } for ( int j = 0 ; j < this . hasMany . length ; j ++ ) { if ( modelName . equalsIgnoreCase ( hasMany [ j ] ) ) { columnList . add ( hasManyLocalColumn [ j ] ) ; } } if ( columnList . size ( ) == 0 ) { return null ; } return columnList . toArray ( new String [ columnList . size ( ) ] ) ; } | get the foreign key column of this table definition from this refered table |
7,929 | public String [ ] getPlainProperties ( ) { String [ ] commonColumns = { "created" , "createdby" , "updated" , "updatedby" , "isactive" } ; String [ ] referencedProperties = getReferencedColumns ( ) ; List < String > plainProperties = new ArrayList < String > ( ) ; for ( String att : attributes ) { if ( CStringUtils . indexOf ( referencedProperties , att ) >= 0 ) { } else if ( att . endsWith ( "_id" ) ) { ; } else if ( CStringUtils . indexOf ( commonColumns , att ) >= 0 ) { ; } else { plainProperties . add ( att ) ; } } return plainProperties . toArray ( new String [ plainProperties . size ( ) ] ) ; } | Get the properties that are pertaining to the model this does not include linker columns |
7,930 | public void add ( Object entity , String name ) { Result result = new Result ( entity , name ) ; results . add ( result ) ; } | Add a new entity to an extraction collection . |
7,931 | public List < Object > getEntities ( String name ) { List < Object > entitiesList = new LinkedList < Object > ( ) ; for ( Result result : results ) { if ( result . getResultName ( ) . equals ( name ) ) { entitiesList . add ( result . getObject ( ) ) ; } } return entitiesList ; } | Returns all entities stored in the given collection . |
7,932 | public List < Object > getEntities ( ) { List < Object > entitiesList = new LinkedList < Object > ( ) ; for ( Result result : results ) { entitiesList . add ( result . getObject ( ) ) ; } return entitiesList ; } | Returns all entities . |
7,933 | private void simpleIncrement ( TensorBase other , double multiplier ) { Preconditions . checkArgument ( Arrays . equals ( other . getDimensionNumbers ( ) , getDimensionNumbers ( ) ) ) ; if ( other instanceof DenseTensorBase ) { double [ ] otherTensorValues = ( ( DenseTensorBase ) other ) . values ; Preconditions . checkArgument ( otherTensorValues . length == values . length ) ; int length = values . length ; for ( int i = 0 ; i < length ; i ++ ) { values [ i ] += otherTensorValues [ i ] * multiplier ; } } else { int otherSize = other . size ( ) ; for ( int i = 0 ; i < otherSize ; i ++ ) { long keyNum = other . indexToKeyNum ( i ) ; double value = other . getByIndex ( i ) ; values [ keyNumToIndex ( keyNum ) ] += value * multiplier ; } } } | Increment algorithm for the case where both tensors have the same set of dimensions . |
7,934 | private ModelDef getOverrideModel ( ModelDef model , ModelMetaData explicitMeta2 ) { if ( explicitMeta2 == null ) { return model ; } List < ModelDef > explicitList = explicitMeta2 . getModelDefinitionList ( ) ; for ( ModelDef explicitModel : explicitList ) { if ( explicitModel . getModelName ( ) . equals ( model . getModelName ( ) ) ) { return explicitModel ; } } return model ; } | When the mode is in the explecit model use it instead else use what s the in database |
7,935 | private String chooseFirstOccurence ( String [ ] among_owners , String [ ] within_listgroup , String prior_tableName ) { int index = CStringUtils . indexOf ( within_listgroup , prior_tableName ) ; for ( int i = index - 1 ; i >= 0 ; i -- ) { String closest = within_listgroup [ i ] ; if ( CStringUtils . indexOf ( among_owners , closest ) >= 0 ) { return closest ; } } return null ; } | Choose among the owners that are present within the list group that occurs before the tableName and closest to it |
7,936 | private Map < String , Set < String [ ] > > transformGroup ( List < String [ ] > tableGroups ) { Map < String , Set < String [ ] > > model_tableGroup = new LinkedHashMap < String , Set < String [ ] > > ( ) ; for ( String [ ] list : tableGroups ) { for ( String table : list ) { if ( model_tableGroup . containsKey ( table ) ) { Set < String [ ] > tableGroupSet = model_tableGroup . get ( table ) ; tableGroupSet . add ( list ) ; } else { Set < String [ ] > tableGroupSet = new HashSet < String [ ] > ( ) ; tableGroupSet . add ( list ) ; model_tableGroup . put ( table , tableGroupSet ) ; } } } return model_tableGroup ; } | transform the listing of table groups according to table |
7,937 | public TrustGraphNodeId getNextHop ( final TrustGraphAdvertisement message ) { final TrustGraphNodeId prev = message . getSender ( ) ; return getNextHop ( prev ) ; } | Determine the next hop for a message . |
7,938 | public TrustGraphNodeId getNextHop ( final TrustGraphNodeId priorNeighbor ) { if ( priorNeighbor != null ) { return routingTable . get ( priorNeighbor ) ; } else { return null ; } } | Determine the next TrustGraphNodeId in a route containing a given neighbor as the prior node . The next hop is the TrustGraphNodeId paired with the given neighbor in the table . |
7,939 | public void addNeighbor ( final TrustGraphNodeId neighbor ) { if ( neighbor == null ) { return ; } synchronized ( this ) { if ( contains ( neighbor ) ) { return ; } if ( routingTable . isEmpty ( ) ) { routingTable . put ( neighbor , neighbor ) ; } else { Map . Entry < TrustGraphNodeId , TrustGraphNodeId > split = randomRoute ( ) ; TrustGraphNodeId splitKey = split . getKey ( ) ; TrustGraphNodeId splitVal = split . getValue ( ) ; routingTable . put ( neighbor , splitVal ) ; routingTable . replace ( splitKey , neighbor ) ; } addNeighborToOrdering ( neighbor ) ; } } | Add a single TrustGraphNodeId to the routing table . |
7,940 | protected Map . Entry < TrustGraphNodeId , TrustGraphNodeId > randomRoute ( ) { final int routeNumber = rng . nextInt ( routingTable . size ( ) ) ; final Iterator < Map . Entry < TrustGraphNodeId , TrustGraphNodeId > > routes = routingTable . entrySet ( ) . iterator ( ) ; for ( int i = 0 ; i < routeNumber ; i ++ ) { routes . next ( ) ; } return routes . next ( ) ; } | internal helper method to pick a random route from the table . |
7,941 | public void addNeighbors ( final Collection < TrustGraphNodeId > neighborsIn ) { if ( neighborsIn . isEmpty ( ) ) { return ; } synchronized ( this ) { final LinkedList < TrustGraphNodeId > newNeighbors = new LinkedList < TrustGraphNodeId > ( ) ; for ( TrustGraphNodeId n : neighborsIn ) { if ( ! contains ( n ) ) { newNeighbors . add ( n ) ; } } if ( newNeighbors . size ( ) == 0 ) { return ; } else if ( newNeighbors . size ( ) == 1 ) { addNeighbor ( newNeighbors . get ( 0 ) ) ; return ; } Map . Entry < TrustGraphNodeId , TrustGraphNodeId > split = null ; if ( ! routingTable . isEmpty ( ) ) { split = randomRoute ( ) ; } Collections . shuffle ( newNeighbors , rng ) ; final Iterator < TrustGraphNodeId > i = newNeighbors . iterator ( ) ; TrustGraphNodeId key = i . next ( ) ; while ( i . hasNext ( ) ) { final TrustGraphNodeId val = i . next ( ) ; routingTable . put ( key , val ) ; key = val ; } if ( split == null ) { routingTable . put ( newNeighbors . getLast ( ) , newNeighbors . getFirst ( ) ) ; } else { TrustGraphNodeId splitKey = split . getKey ( ) ; TrustGraphNodeId splitVal = split . getValue ( ) ; routingTable . put ( newNeighbors . getLast ( ) , splitVal ) ; routingTable . replace ( splitKey , newNeighbors . getFirst ( ) ) ; } addNeighborsToOrdering ( newNeighbors ) ; } } | Add a group of TrustGraphNeighbors to the routing table . |
7,942 | public void removeNeighbor ( final TrustGraphNodeId neighbor ) { synchronized ( this ) { if ( ! contains ( neighbor ) ) { return ; } removeNeighborFromOrdering ( neighbor ) ; removeNeighborFromRoutingTable ( neighbor ) ; } } | Remove a single TrustGraphNodeId from the routing table |
7,943 | public void removeNeighbors ( final Collection < TrustGraphNodeId > neighbors ) { synchronized ( this ) { removeNeighborsFromOrdering ( neighbors ) ; for ( TrustGraphNodeId n : neighbors ) { removeNeighborFromRoutingTable ( n ) ; } } } | Remove a set of TrustGraphNeighbors from the routing table . |
7,944 | protected void addNeighborToOrdering ( TrustGraphNodeId neighbor ) { int position = rng . nextInt ( orderedNeighbors . size ( ) + 1 ) ; if ( position == orderedNeighbors . size ( ) ) { orderedNeighbors . add ( neighbor ) ; } else { orderedNeighbors . add ( position , neighbor ) ; } } | Internal policy method . |
7,945 | public RandomRoutingTable . Snapshot snapshot ( ) { synchronized ( this ) { return new Snapshot ( new HashMap < TrustGraphNodeId , TrustGraphNodeId > ( routingTable ) , new ArrayList < TrustGraphNodeId > ( orderedNeighbors ) ) ; } } | Creates a snapshot of the current state of the routing table . A mapping X - > Y between two TrustGraphNeighbors represents that the next hop of a message received from the neighbor X is Y . |
7,946 | public List < IncEvalState > getStates ( ) { List < IncEvalState > states = Lists . newArrayList ( ) ; getStatesHelper ( states ) ; return states ; } | Gets the result of all evaluations anywhere in this tree . |
7,947 | protected RunnerResult actuallyExecute ( JobIdentityAttr identityProvider , String cronExp , VaryingCronOption cronOption , TaskExecutionContext context , OptionalThing < LaunchNowOption > nowOption ) { adjustThreadNameIfNeeds ( cronOption ) ; return runJob ( identityProvider , cronExp , cronOption , context , nowOption ) ; } | in execution lock cannot use varingCron here |
7,948 | protected boolean forwardAdvertisement ( TrustGraphAdvertisement message ) { if ( ! shouldForward ( message ) ) { return false ; } TrustGraphNodeId nextHop = getRoutingTable ( ) . getNextHop ( message ) ; if ( nextHop == null ) { return false ; } sendAdvertisement ( message , nextHop , message . getInboundTTL ( ) - 1 ) ; return true ; } | This method performs the forwarding behavior for a received message . The message is forwarded to the next hop on the route according to the routing table with ttl decreased by 1 . |
7,949 | public Observable < BackendUser > getUserFromAuthToken ( final String authToken ) { return Observable . create ( new Observable . OnSubscribe < BackendUser > ( ) { public void call ( Subscriber < ? super BackendUser > subscriber ) { try { setLoginState ( LOGGING_IN ) ; logger . debug ( "getWebService(): " + getWebService ( ) ) ; logger . debug ( "getuserFromAuthToken: " + authToken ) ; ValidCredentials validCredentials = getWebService ( ) . getUserFromAuthToken ( authToken ) . toBlocking ( ) . first ( ) ; if ( validCredentials == null ) throw new Exception ( "Null User Returned" ) ; subscriber . onNext ( setUser ( validCredentials ) ) ; } catch ( Exception e ) { setLoginState ( LOGGED_OUT ) ; subscriber . onError ( e ) ; } } } ) ; } | Login using user authentication token |
7,950 | public Observable < BackendUser > getUserFromRecoveryToken ( final String recoveryToken ) { return Observable . create ( new Observable . OnSubscribe < BackendUser > ( ) { public void call ( Subscriber < ? super BackendUser > subscriber ) { try { setLoginState ( LOGGING_IN ) ; ValidCredentials validCredentials = getWebService ( ) . getUserFromRecoveryToken ( recoveryToken ) . toBlockingObservable ( ) . first ( ) ; if ( validCredentials == null ) throw new Exception ( "Null User Returned" ) ; subscriber . onNext ( setUser ( validCredentials ) ) ; } catch ( Exception e ) { setLoginState ( LOGGED_OUT ) ; subscriber . onError ( e ) ; } } } ) ; } | Login using user recovery token |
7,951 | public void logout ( ) { List < LocalCredentials > accountList = accountStorage . getAccounts ( ) ; if ( accountList . size ( ) == 1 ) { String userName = accountList . get ( 0 ) . getName ( ) ; logger . debug ( "logout: " + userName ) ; accountStorage . removeAccount ( userName ) ; user = null ; } setLoginState ( LoginState . LOGGED_OUT ) ; } | Log out current user if logged in . |
7,952 | public PublicKey getServerKey ( ) { logger . debug ( "getServerKey()" ) ; try { if ( serverPublicKey != null ) return serverPublicKey ; byte [ ] pubKey = getWebService ( ) . getPublicKey ( ) ; logger . debug ( "pubKey: " + String . valueOf ( pubKey ) ) ; serverPublicKey = Crypto . pubKeyFromBytes ( pubKey ) ; return serverPublicKey ; } catch ( Exception e ) { logger . error ( "Failed to getServerKey()" , e ) ; } return null ; } | Returns server public key . Queries server or local copy . |
7,953 | public SignUpResponse signUp ( SignUpCredentials loginCreds ) { logger . debug ( "signUp(" + loginCreds + ")" ) ; try { setLoginState ( LOGGING_IN ) ; PublicKey key = Crypto . pubKeyFromBytes ( getWebService ( ) . getPublicKey ( ) ) ; loginCreds . encryptPassword ( key ) ; logger . debug ( "Login Creds: " + loginCreds ) ; ServerResponse < ValidCredentials > response = ServerResponse . from ( ValidCredentials . class , getWebService ( ) . userSignUp ( loginCreds ) ) ; logger . debug ( "Response: " + response . getStatus ( ) ) ; BackendUser user ; if ( response . getStatus ( ) . isSuccess ( ) ) { user = setUser ( response . get ( ) ) ; } else { return new SignUpResponse ( null , Status . SERVER_ERROR_INTERNAL , " null user returned" ) ; } return new SignUpResponse ( user , response . getStatus ( ) , response . getError ( ) ) ; } catch ( Exception e ) { logger . error ( "SignUp Failure(" + loginCreds . getEmailAddress ( ) + ")" , e ) ; return new SignUpResponse ( null , Status . SERVER_ERROR_INTERNAL , e . getLocalizedMessage ( ) ) ; } } | Syncronously attempt to create user account |
7,954 | public Observable < BackendUser > signUpASync ( final SignUpCredentials signInCreds ) { logger . debug ( "signUpASync(" + signInCreds + ")" ) ; try { setLoginState ( LOGGING_IN ) ; return getWebService ( ) . getPublicKeyA ( ) . flatMap ( new Func1 < byte [ ] , Observable < SignUpCredentials > > ( ) { public Observable < SignUpCredentials > call ( byte [ ] bytes ) { try { PublicKey key = Crypto . pubKeyFromBytes ( bytes ) ; signInCreds . encryptPassword ( key ) ; return Observable . from ( signInCreds ) ; } catch ( Exception e ) { return Observable . error ( e ) ; } } } ) . flatMap ( new Func1 < SignUpCredentials , Observable < ValidCredentials > > ( ) { public Observable < ValidCredentials > call ( SignUpCredentials o ) { return getWebService ( ) . userSignUpA ( signInCreds ) ; } } ) . map ( new Func1 < ValidCredentials , BackendUser > ( ) { public BackendUser call ( ValidCredentials validCredentials ) { return setUser ( validCredentials ) ; } } ) . subscribeOn ( Schedulers . io ( ) ) . observeOn ( config . observeOn ( ) ) ; } catch ( Exception e ) { logger . error ( "Failed to signUp(" + signInCreds . getEmailAddress ( ) + ")" , e ) ; return Observable . error ( e ) ; } } | Asyncronously attempt to create user account |
7,955 | public SignInResponse login ( final LoginCredentials loginCreds ) { logger . debug ( "login(" + loginCreds + ")" ) ; try { setLoginState ( LOGGING_IN ) ; if ( ! loginCreds . isEncrypted ( ) ) { PublicKey key = Crypto . pubKeyFromBytes ( getWebService ( ) . getPublicKey ( ) ) ; loginCreds . encryptPassword ( key ) ; } logger . debug ( "Login Creds: " + loginCreds ) ; ServerResponse < ValidCredentials > response = ServerResponse . from ( ValidCredentials . class , getWebService ( ) . login ( loginCreds ) ) ; BackendUser user ; if ( response . getStatus ( ) . isSuccess ( ) ) { user = setUser ( response . get ( ) ) ; } else { logger . error ( "Login Failure(" + loginCreds . getEmailAddress ( ) + "): " + response . getStatus ( ) . getCode ( ) + " " + response . getError ( ) ) ; setLoginState ( LOGGED_OUT ) ; return new SignInResponse ( null , Status . SERVER_ERROR_INTERNAL , "Login failed" ) ; } return new SignInResponse ( user , response . getStatus ( ) , response . getError ( ) ) ; } catch ( Exception e ) { logger . error ( "Login Failure(" + loginCreds . getEmailAddress ( ) + ")" , e ) ; setLoginState ( LOGGED_OUT ) ; return new SignInResponse ( null , Status . SERVER_ERROR_INTERNAL , e . getLocalizedMessage ( ) ) ; } } | Syncronously attempt to log into user account |
7,956 | public Observable < BackendUser > loginASync ( final LoginCredentials loginCreds ) { logger . debug ( "loginASync(" + loginCreds + ")" ) ; try { setLoginState ( LOGGING_IN ) ; return getWebService ( ) . getPublicKeyA ( ) . flatMap ( new Func1 < byte [ ] , Observable < LoginCredentials > > ( ) { public Observable < LoginCredentials > call ( byte [ ] bytes ) { try { if ( ! loginCreds . isEncrypted ( ) ) { PublicKey key = Crypto . pubKeyFromBytes ( bytes ) ; loginCreds . encryptPassword ( key ) ; } return Observable . from ( loginCreds ) ; } catch ( Exception e ) { return Observable . error ( e ) ; } } } ) . flatMap ( new Func1 < LoginCredentials , Observable < ValidCredentials > > ( ) { public Observable < ValidCredentials > call ( LoginCredentials credentials ) { return getWebService ( ) . loginA ( credentials ) ; } } ) . map ( new Func1 < ValidCredentials , BackendUser > ( ) { public BackendUser call ( ValidCredentials validCredentials ) { return setUser ( validCredentials ) ; } } ) . subscribeOn ( Schedulers . io ( ) ) . observeOn ( config . observeOn ( ) ) ; } catch ( Exception e ) { logger . error ( "Failed to SignIn(" + loginCreds . getEmailAddress ( ) + ")" , e ) ; setLoginState ( LOGGED_OUT ) ; return Observable . error ( e ) ; } } | Asyncronously attempt to log into user account |
7,957 | public Observable < Void > sendUserData ( BackendUser backendUser ) { return getWebService ( ) . sendUserData ( isLoggedIn ( ) , backendUser . getOwnerId ( ) + "" , backendUser . getUserData ( ) ) . subscribeOn ( config . subscribeOn ( ) ) . observeOn ( config . observeOn ( ) ) ; } | Update remote server with new user data . |
7,958 | public Observable < Map < String , Object > > getUserData ( BackendUser backendUser ) { return getWebService ( ) . getUserData ( isLoggedIn ( ) , backendUser . getOwnerId ( ) + "" ) . subscribeOn ( config . subscribeOn ( ) ) . observeOn ( config . observeOn ( ) ) ; } | Query server to return user data for the logged in user |
7,959 | public void addLoginListener ( LoginListener listener ) { logger . debug ( "addLoginListener" ) ; Subscription subscription = loginEventPublisher . subscribeOn ( config . subscribeOn ( ) ) . observeOn ( config . observeOn ( ) ) . subscribe ( listener ) ; listener . setSubscription ( subscription ) ; } | Add loginListener to listen to login events |
7,960 | private void processOptions ( OptionSet options ) { Pseudorandom . get ( ) . setSeed ( options . valueOf ( randomSeed ) ) ; if ( opts . contains ( CommonOptions . MAP_REDUCE ) ) { MapReduceConfiguration . setMapReduceExecutor ( new LocalMapReduceExecutor ( options . valueOf ( mrMaxThreads ) , options . valueOf ( mrMaxBatchesPerThread ) ) ) ; } if ( opts . contains ( CommonOptions . STOCHASTIC_GRADIENT ) || opts . contains ( CommonOptions . LBFGS ) ) { LogFunction log = null ; if ( parsedOptions . has ( logBrief ) ) { log = new NullLogFunction ( ) ; } else { log = new DefaultLogFunction ( parsedOptions . valueOf ( logInterval ) , false , options . valueOf ( logParametersInterval ) , options . valueOf ( logParametersDir ) ) ; } LogFunctions . setLogFunction ( log ) ; } } | Initializes program state using any options processable by this class . |
7,961 | public static String toBase64 ( byte [ ] byteArray ) { String result = null ; if ( byteArray != null ) { result = Base64 . encodeBase64String ( byteArray ) ; } return result ; } | Encodes the given byte array as a base - 64 String . |
7,962 | public static byte [ ] fromBase64 ( String base64String ) { byte [ ] result = null ; if ( base64String != null ) { result = Base64 . decodeBase64 ( base64String ) ; } return result ; } | Decodes the given base - 64 string to a byte array . |
7,963 | public static byte [ ] fromString ( String unicodeString ) { byte [ ] result = null ; if ( unicodeString != null ) { result = unicodeString . getBytes ( StandardCharsets . UTF_8 ) ; } return result ; } | Converts the given String to a byte array . |
7,964 | public void infer ( ) { scopes = StaticAnalysis . getScopes ( expression ) ; expressionTypes = Maps . newHashMap ( ) ; constraints = ConstraintSet . empty ( ) ; for ( Scope scope : scopes . getScopes ( ) ) { for ( String variable : scope . getBoundVariables ( ) ) { int location = scope . getBindingIndex ( variable ) ; if ( ! expressionTypes . containsKey ( location ) ) { expressionTypes . put ( location , constraints . getFreshTypeVar ( ) ) ; } } } populateExpressionTypes ( 0 ) ; solved = constraints . solve ( typeDeclaration ) ; for ( int k : expressionTypes . keySet ( ) ) { expressionTypes . put ( k , expressionTypes . get ( k ) . substitute ( solved . getBindings ( ) ) ) ; } } | Run type inference . |
7,965 | private static byte [ ] concat ( byte [ ] a , byte [ ] b ) { byte [ ] c = new byte [ a . length + b . length ] ; System . arraycopy ( a , 0 , c , 0 , a . length ) ; System . arraycopy ( b , 0 , c , a . length , b . length ) ; return c ; } | Concatenates two byte arrays and returns the resulting byte array . |
7,966 | public List < SyntacticCategory > getAllSpannedLexiconEntries ( ) { List < SyntacticCategory > categories = Lists . newArrayList ( ) ; getAllSpannedLexiconEntriesHelper ( categories ) ; return categories ; } | Gets the syntactic categories assigned to the words in this parse . |
7,967 | private static byte [ ] encodeEnternal ( byte [ ] d ) { if ( d == null ) return null ; byte data [ ] = new byte [ d . length + 2 ] ; System . arraycopy ( d , 0 , data , 0 , d . length ) ; byte dest [ ] = new byte [ ( data . length / 3 ) * 4 ] ; for ( int sidx = 0 , didx = 0 ; sidx < d . length ; sidx += 3 , didx += 4 ) { dest [ didx ] = ( byte ) ( ( data [ sidx ] >>> 2 ) & 077 ) ; dest [ didx + 1 ] = ( byte ) ( ( data [ sidx + 1 ] >>> 4 ) & 017 | ( data [ sidx ] << 4 ) & 077 ) ; dest [ didx + 2 ] = ( byte ) ( ( data [ sidx + 2 ] >>> 6 ) & 003 | ( data [ sidx + 1 ] << 2 ) & 077 ) ; dest [ didx + 3 ] = ( byte ) ( data [ sidx + 2 ] & 077 ) ; } for ( int idx = 0 ; idx < dest . length ; idx ++ ) { if ( dest [ idx ] < 26 ) dest [ idx ] = ( byte ) ( dest [ idx ] + 'A' ) ; else if ( dest [ idx ] < 52 ) dest [ idx ] = ( byte ) ( dest [ idx ] + 'a' - 26 ) ; else if ( dest [ idx ] < 62 ) dest [ idx ] = ( byte ) ( dest [ idx ] + '0' - 52 ) ; else if ( dest [ idx ] < 63 ) dest [ idx ] = ( byte ) '+' ; else dest [ idx ] = ( byte ) '/' ; } for ( int idx = dest . length - 1 ; idx > ( d . length * 4 ) / 3 ; idx -- ) { dest [ idx ] = ( byte ) '=' ; } return dest ; } | Encode some data and return a String . |
7,968 | private static byte [ ] decodeInternal ( byte [ ] data ) { int tail = data . length ; while ( data [ tail - 1 ] == '=' ) tail -- ; byte dest [ ] = new byte [ tail - data . length / 4 ] ; for ( int idx = 0 ; idx < data . length ; idx ++ ) { if ( data [ idx ] == '=' ) data [ idx ] = 0 ; else if ( data [ idx ] == '/' ) data [ idx ] = 63 ; else if ( data [ idx ] == '+' ) data [ idx ] = 62 ; else if ( data [ idx ] >= '0' && data [ idx ] <= '9' ) data [ idx ] = ( byte ) ( data [ idx ] - ( '0' - 52 ) ) ; else if ( data [ idx ] >= 'a' && data [ idx ] <= 'z' ) data [ idx ] = ( byte ) ( data [ idx ] - ( 'a' - 26 ) ) ; else if ( data [ idx ] >= 'A' && data [ idx ] <= 'Z' ) data [ idx ] = ( byte ) ( data [ idx ] - 'A' ) ; } int sidx , didx ; for ( sidx = 0 , didx = 0 ; didx < dest . length - 2 ; sidx += 4 , didx += 3 ) { dest [ didx ] = ( byte ) ( ( ( data [ sidx ] << 2 ) & 255 ) | ( ( data [ sidx + 1 ] >>> 4 ) & 3 ) ) ; dest [ didx + 1 ] = ( byte ) ( ( ( data [ sidx + 1 ] << 4 ) & 255 ) | ( ( data [ sidx + 2 ] >>> 2 ) & 017 ) ) ; dest [ didx + 2 ] = ( byte ) ( ( ( data [ sidx + 2 ] << 6 ) & 255 ) | ( data [ sidx + 3 ] & 077 ) ) ; } if ( didx < dest . length ) { dest [ didx ] = ( byte ) ( ( ( data [ sidx ] << 2 ) & 255 ) | ( ( data [ sidx + 1 ] >>> 4 ) & 3 ) ) ; } if ( ++ didx < dest . length ) { dest [ didx ] = ( byte ) ( ( ( data [ sidx + 1 ] << 4 ) & 255 ) | ( ( data [ sidx + 2 ] >>> 2 ) & 017 ) ) ; } return dest ; } | Decode data and return bytes . Assumes that the data passed in is ASCII text . |
7,969 | private Object jsonToObject ( String recordValue ) throws JsonParseException , JsonMappingException , IOException { ObjectMapper mapper = new ObjectMapper ( ) ; Object json = mapper . readValue ( recordValue , Object . class ) ; return json ; } | Remapping json directly to object as opposed to traversing the tree |
7,970 | public void correctDataTypes ( DAO [ ] daoList , ModelDef model ) { for ( DAO dao : daoList ) { correctDataTypes ( dao , model ) ; } } | Most of postgresql database datatype already mapped to the correct data type by the JDBC |
7,971 | private Object correctDataType ( Object value , String dataType ) { if ( value == null ) { return null ; } return value ; } | add logic here if PostgreSQL JDBC didn t map DB data type to their correct Java Data type |
7,972 | public boolean filter ( Object entity ) { if ( fixture instanceof FilterableFixture ) { return filterableFixture ( ) . filter ( entity ) ; } return true ; } | Determines whether the entity must be inserted in database or not . |
7,973 | public static boolean isUnicode ( String str ) { if ( str == null ) { return false ; } return ( ! IDN . toASCII ( str ) . equals ( str ) ) ; } | Determinates if a given string can be converted into Punycode . |
7,974 | public static boolean isPunycode ( String str ) { if ( str == null ) { return false ; } return ( ! IDN . toUnicode ( str ) . equals ( str ) ) ; } | Determinates if a given string is in Punycode format . |
7,975 | public Assignment outcomeToAssignment ( List < String > factorVariables , List < ? extends Object > outcome ) { assert factorVariables . size ( ) == outcome . size ( ) ; int [ ] varNums = new int [ factorVariables . size ( ) ] ; Object [ ] values = new Object [ factorVariables . size ( ) ] ; for ( int i = 0 ; i < factorVariables . size ( ) ; i ++ ) { int varInd = getVariables ( ) . getVariableByName ( factorVariables . get ( i ) ) ; varNums [ i ] = varInd ; values [ i ] = outcome . get ( i ) ; } return Assignment . fromUnsortedArrays ( varNums , values ) ; } | Gets an assignment for the named set of variables . |
7,976 | public Set < Integer > getSharedVariables ( int factor1 , int factor2 ) { Set < Integer > varNums = new HashSet < Integer > ( factorVariableMap . get ( factor1 ) ) ; varNums . retainAll ( factorVariableMap . get ( factor2 ) ) ; return varNums ; } | Get all of the variables that the two factors have in common . |
7,977 | public static < A , B , C > FeatureGenerator < A , C > convertingFeatureGenerator ( FeatureGenerator < B , C > generator , Function < A , B > converter ) { return new ConvertingFeatureGenerator < A , B , C > ( generator , converter ) ; } | Gets a feature generator that first converts the data then applies a given feature generator . |
7,978 | public static < A , B , C > FeatureGenerator < A , C > postConvertingFeatureGenerator ( FeatureGenerator < A , B > generator , Function < B , C > converter ) { return new PostConvertingFeatureGenerator < A , B , C > ( generator , converter ) ; } | Gets a feature generator that applies a generator then applies a converter to the generated feature names . |
7,979 | public List < List < Object > > getTuples ( ) { Iterator < KeyValue > keyValueIter = indicators . keyValueIterator ( ) ; List < List < Object > > tuples = Lists . newArrayList ( ) ; while ( keyValueIter . hasNext ( ) ) { KeyValue keyValue = keyValueIter . next ( ) ; if ( keyValue . getValue ( ) != 0.0 ) { tuples . add ( vars . intArrayToAssignment ( keyValue . getKey ( ) ) . getValues ( ) ) ; } } return tuples ; } | Gets the tuples which are in this assignment . |
7,980 | public DAO [ ] executeSelect ( String sql , Object [ ] parameters ) throws DatabaseException { ResultSet rs = executeSelectSQL ( sql , parameters , null , false ) ; return resultSetToDAO ( rs , null ) ; } | Execute generic SQL statement |
7,981 | public Statement getPreparedStatement ( String sql , Object [ ] parameters , boolean returnValues ) throws DatabaseException { Statement stmt = null ; try { if ( supportPreparedStatement ( ) ) { if ( appendReturningColumnClause ( ) && returnValues ) { stmt = connection . prepareStatement ( sql , PreparedStatement . RETURN_GENERATED_KEYS ) ; } else { stmt = connection . prepareStatement ( sql ) ; } if ( parameters != null ) { for ( int i = 0 ; i < parameters . length ; i ++ ) { ( ( PreparedStatement ) stmt ) . setObject ( i + 1 , getEquivalentDBObject ( parameters [ i ] ) ) ; } } } else { stmt = connection . createStatement ( ) ; return stmt ; } } catch ( SQLException e ) { logSQL ( stmt , sql , parameters , true ) ; e . printStackTrace ( ) ; throw new DataEntryException ( e . getMessage ( ) ) ; } return stmt ; } | Get the SQL statements based on different cases DB does not support PrepareStatement DB does not allow returning of Generated Keys |
7,982 | public HeadedSyntacticCategory getCanonicalForm ( Map < Integer , Integer > relabeling ) { Preconditions . checkArgument ( relabeling . size ( ) == 0 ) ; int [ ] relabeledVariables = canonicalizeVariableArray ( semanticVariables , relabeling ) ; return new HeadedSyntacticCategory ( syntacticCategory . getCanonicalForm ( ) , relabeledVariables , rootIndex ) ; } | Gets a canonical representation of this category that treats each variable number as an equivalence class . The canonical form relabels variables in the category such that all categories with the same variable equivalence relations have the same canonical form . |
7,983 | public HeadedSyntacticCategory getArgumentType ( ) { SyntacticCategory argumentSyntax = syntacticCategory . getArgument ( ) ; int [ ] argumentSemantics = ArrayUtils . copyOfRange ( semanticVariables , rootIndex + 1 , semanticVariables . length ) ; int argumentRoot = argumentSyntax . getNumReturnSubcategories ( ) ; return new HeadedSyntacticCategory ( argumentSyntax , argumentSemantics , argumentRoot ) ; } | Gets the syntactic type and semantic variable assignments to the argument type of this category . |
7,984 | public List < HeadedSyntacticCategory > getArgumentTypes ( ) { List < HeadedSyntacticCategory > arguments = Lists . newArrayList ( ) ; HeadedSyntacticCategory cat = this ; while ( ! cat . isAtomic ( ) ) { arguments . add ( cat . getArgumentType ( ) ) ; cat = cat . getReturnType ( ) ; } return arguments ; } | Returns a list of all arguments to this category until an atomic return category is reached . The first element of the returned list is the argument that must be given first etc . |
7,985 | public HeadedSyntacticCategory getReturnType ( ) { SyntacticCategory returnSyntax = syntacticCategory . getReturn ( ) ; int [ ] returnSemantics = ArrayUtils . copyOf ( semanticVariables , rootIndex ) ; int returnRoot = returnSyntax . getNumReturnSubcategories ( ) ; return new HeadedSyntacticCategory ( returnSyntax , returnSemantics , returnRoot ) ; } | Gets the syntactic type and semantic variable assignments to the return type of this category . |
7,986 | public Credentials getSafe ( ) { Credentials safeCreds = new Credentials ( this ) ; safeCreds . meta_remove ( PASSWORD_KEY ) ; safeCreds . meta_remove ( AUTH_TOKEN_KEY ) ; safeCreds . meta_remove ( AUTH_TOKEN_EXPIRE_KEY ) ; safeCreds . meta_remove ( VALIDATION_KEY ) ; safeCreds . meta_remove ( PUSH_MESSAGING_KEY ) ; safeCreds . meta_remove ( RECOVERY_TOKEN_KEY ) ; return safeCreds ; } | New Credentials object which contains no sensitive information removing Password auth token auth token expiration date validation token push messaging token recovery token |
7,987 | public CfgParseChart parseMarginal ( List < ? > terminals , Object root , boolean useSumProduct ) { CfgParseChart chart = createParseChart ( terminals , useSumProduct ) ; Factor rootFactor = TableFactor . pointDistribution ( parentVar , parentVar . outcomeArrayToAssignment ( root ) ) ; return marginal ( chart , terminals , rootFactor ) ; } | Compute the marginal distribution over all grammar entries conditioned on the given sequence of terminals . |
7,988 | public CfgParseChart parseMarginal ( List < ? > terminals , boolean useSumProduct ) { Factor rootDist = TableFactor . unity ( parentVar ) ; return parseMarginal ( terminals , rootDist , useSumProduct ) ; } | Computes the marginal distribution over CFG parses given terminals . |
7,989 | public CfgParseChart parseMarginal ( List < ? > terminals , Factor rootDist , boolean useSumProduct ) { return marginal ( createParseChart ( terminals , useSumProduct ) , terminals , rootDist ) ; } | Compute the distribution over CFG entries the parse root and the children conditioned on the provided terminals and assuming the provided distributions over the root node . |
7,990 | private CfgParseChart createParseChart ( List < ? > terminals , boolean useSumProduct ) { return new CfgParseChart ( terminals , parentVar , leftVar , rightVar , terminalVar , ruleTypeVar , binaryDistribution , useSumProduct ) ; } | Initializes parse charts with the correct variable arguments . |
7,991 | private void initializeBeamSearchChart ( List < Object > terminals , BeamSearchCfgParseChart chart , long [ ] treeEncodingOffsets ) { Variable terminalListValue = terminalVar . getOnlyVariable ( ) ; long terminalSignal = ( ( long ) chart . chartSize ( ) ) * ( treeEncodingOffsets [ 3 ] + treeEncodingOffsets [ 2 ] ) ; for ( int i = 0 ; i < terminals . size ( ) ; i ++ ) { for ( int j = i ; j < terminals . size ( ) ; j ++ ) { if ( terminalListValue . canTakeValue ( terminals . subList ( i , j + 1 ) ) ) { Assignment assignment = terminalVar . outcomeArrayToAssignment ( terminals . subList ( i , j + 1 ) ) ; Iterator < Outcome > iterator = terminalDistribution . outcomePrefixIterator ( assignment ) ; while ( iterator . hasNext ( ) ) { Outcome bestOutcome = iterator . next ( ) ; int root = nonterminalVariableType . getValueIndex ( bestOutcome . getAssignment ( ) . getValue ( parentVar . getOnlyVariableNum ( ) ) ) ; int ruleType = ruleVariableType . getValueIndex ( bestOutcome . getAssignment ( ) . getValue ( ruleTypeVar . getOnlyVariableNum ( ) ) ) ; long partialKeyNum = ( root * treeEncodingOffsets [ 4 ] ) + ( ruleType * treeEncodingOffsets [ 5 ] ) ; chart . addParseTreeKeyForSpan ( i , j , terminalSignal + partialKeyNum , bestOutcome . getProbability ( ) ) ; } } } } } | Initializes a beam search chart using the terminal production distribution . |
7,992 | public boolean isExportable ( ) { return fieldContainerMap . entrySet ( ) . stream ( ) . anyMatch ( e -> format . isTypeSupported ( e . getValue ( ) . getType ( ) ) ) ; } | If empty then no export values are present and export is pointless |
7,993 | public String toTableString ( boolean probs ) { Set < B > key2s = Sets . newHashSet ( ) ; for ( A key : counts . keySet ( ) ) { key2s . addAll ( counts . get ( key ) . keySet ( ) ) ; } List < B > key2List = Lists . newArrayList ( key2s ) ; StringBuilder sb = new StringBuilder ( ) ; sb . append ( "\t" ) ; for ( B key2 : key2List ) { sb . append ( key2 ) ; sb . append ( "\t" ) ; } sb . append ( "\n" ) ; for ( A key1 : counts . keySet ( ) ) { sb . append ( key1 ) ; sb . append ( "\t" ) ; for ( B key2 : key2List ) { if ( probs ) { sb . append ( String . format ( "%.3f" , getProbability ( key1 , key2 ) ) ) ; } else { sb . append ( getCount ( key1 , key2 ) ) ; } sb . append ( "\t" ) ; } sb . append ( "\n" ) ; } return sb . toString ( ) ; } | Generates a 2D table displaying the contents of this accumulator . |
7,994 | public void reindexItems ( ) { if ( numUnsortedItems > 0 ) { int [ ] newSortedKeys = Arrays . copyOf ( sortedKeys , sortedKeys . length + numUnsortedItems ) ; int [ ] newSortedValues = Arrays . copyOf ( sortedValues , sortedValues . length + numUnsortedItems ) ; int oldLength = sortedKeys . length ; for ( int i = 0 ; i < numUnsortedItems ; i ++ ) { newSortedKeys [ i + oldLength ] = unsortedKeys [ i ] ; newSortedValues [ i + oldLength ] = unsortedValues [ i ] ; } ArrayUtils . sortKeyValuePairs ( newSortedKeys , newSortedValues , 0 , newSortedKeys . length ) ; sortedKeys = newSortedKeys ; sortedValues = newSortedValues ; numUnsortedItems = 0 ; rebuildKeySet ( ) ; } } | Moves all elements from the unsorted portion of this map into the sorted portion . |
7,995 | private void resizeMap ( ) { int [ ] newUnsortedKeys = Arrays . copyOf ( unsortedKeys , unsortedKeys . length * 2 ) ; int [ ] newUnsortedValues = Arrays . copyOf ( unsortedValues , unsortedValues . length * 2 ) ; unsortedKeys = newUnsortedKeys ; unsortedValues = newUnsortedValues ; } | Doubles the size of the unsorted portion of the map . |
7,996 | public static BackendUser from ( ValidCredentials credentials ) { BackendUser beu = new BackendUser ( ) ; beu . initFrom ( credentials ) ; return beu ; } | Convience method to initialize BackendUser from ValidCredentials |
7,997 | public static SignInResponse signIn ( String email , String password ) { return signIn ( new LoginCredentials ( email , password ) ) ; } | Perform syncronously login attempt . |
7,998 | public static SignUpResponse signUp ( String username , String email , String password ) { return signUp ( new SignUpCredentials ( username , email , password ) ) ; } | Perform syncronously sign up attempt . |
7,999 | public static Observable < BackendUser > signInInBackground ( String email , String password ) { return getAM ( ) . loginASync ( new LoginCredentials ( email , password ) ) ; } | Perform asyncronously login attempt . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.