idx int64 0 41.2k | question stringlengths 74 4.21k | target stringlengths 5 888 |
|---|---|---|
39,500 | protected PreparedStatement executeSql ( String sql , MapSqlParameterSource args ) { Map < String , Object > parameters = args != null ? args . getValues ( ) : new HashMap < String , Object > ( ) ; for ( Map . Entry < String , Object > placeHolderEntry : parameters . entrySet ( ) ) { String key = placeHolderEntry . getKey ( ) ; String value = placeHolderEntry . getValue ( ) . toString ( ) ; log . debug ( "substitution for parameter '" + key + "' in SQL script: " + value ) ; sql = sql . replaceAll ( key , Matcher . quoteReplacement ( value ) ) ; } log . debug ( "Executing SQL\n{}" , sql ) ; CancelableStatements cancelableStats = new CancelableStatements ( sql , statementController ) ; if ( statementController != null ) { statementController . registerStatement ( cancelableStats . statement ) ; } else { log . debug ( "statement controller is not initialized" ) ; } getJdbcTemplate ( ) . execute ( cancelableStats , cancelableStats ) ; return cancelableStats . statement ; } | executes an SQL string substituting the parameters found in args |
39,501 | private String readSqlFromResource ( Resource resource ) { try ( BufferedReader reader = new BufferedReader ( new InputStreamReader ( new FileInputStream ( resource . getFile ( ) ) , "UTF-8" ) ) ; ) { StringBuilder sqlBuf = new StringBuilder ( ) ; for ( String line = reader . readLine ( ) ; line != null ; line = reader . readLine ( ) ) { sqlBuf . append ( line ) . append ( "\n" ) ; } return sqlBuf . toString ( ) ; } catch ( IOException e ) { log . error ( "Couldn't read SQL script from resource file." , e ) ; throw new FileAccessException ( "Couldn't read SQL script from resource file." , e ) ; } } | Reads the content from a resource into a string . |
39,502 | public static void toDNF ( LogicClause topNode ) { while ( makeDNF ( topNode ) == false ) { } cleanEmptyLeafs ( topNode ) ; flattenDNF ( topNode ) ; } | Transforms an AQL query to the Disjunctive Normal Form . |
39,503 | private static void flattenDNF ( LogicClause top ) { if ( top . getOp ( ) == LogicClause . Operator . LEAF || top . getOp ( ) == LogicClause . Operator . AND ) { List < LogicClause > children = new ArrayList < > ( ) ; findAllChildrenForOp ( top , children , LogicClause . Operator . AND ) ; List < ? extends Token > orginalAndContent = top . getContent ( ) ; top . setOp ( LogicClause . Operator . OR ) ; top . clearChildren ( ) ; top . setContent ( null ) ; LogicClause andClause = new LogicClause ( LogicClause . Operator . AND ) ; andClause . setContent ( orginalAndContent ) ; top . addChild ( andClause ) ; andClause . addAllChildren ( children ) ; } else if ( top . getOp ( ) == LogicClause . Operator . OR ) { List < LogicClause > allOrNodes = new ArrayList < > ( ) ; findAllChildrenForOp ( top , allOrNodes , LogicClause . Operator . OR , true ) ; top . clearChildren ( ) ; top . addAllChildren ( allOrNodes ) ; for ( LogicClause subclause : top . getChildren ( ) ) { if ( subclause . getOp ( ) == LogicClause . Operator . LEAF ) { List < ? extends Token > content = subclause . getContent ( ) ; subclause . clearChildren ( ) ; subclause . setOp ( LogicClause . Operator . AND ) ; subclause . setContent ( null ) ; LogicClause newLeaf = new LogicClause ( LogicClause . Operator . LEAF ) ; newLeaf . setContent ( content ) ; subclause . addChild ( newLeaf ) ; } else if ( subclause . getOp ( ) == LogicClause . Operator . AND ) { List < LogicClause > children = new ArrayList < > ( ) ; findAllChildrenForOp ( subclause , children , LogicClause . Operator . AND ) ; subclause . clearChildren ( ) ; for ( LogicClause c : children ) { subclause . addChild ( c ) ; } } else { Preconditions . checkArgument ( false , "input is not in DNF" ) ; } } } } | Flatten the clause in the sense that there is only one toplevel OR layer and one layer of AND - clauses . |
39,504 | public List < Long > getDocumentsForMetadata ( QueryData queryData ) { List < Long > corpusList = queryData . getCorpusList ( ) ; if ( ! corpusList . isEmpty ( ) ) { List < QueryAnnotation > metaData = queryData . getMetaData ( ) ; if ( ! metaData . isEmpty ( ) ) { String documentsWithMetaDataSql = subQueryCorpusSelectionStrategy . buildSubQuery ( corpusList , metaData ) ; List < Long > documents = getJdbcTemplate ( ) . query ( documentsWithMetaDataSql , SingleColumnRowMapper . newInstance ( Long . class ) ) ; return documents ; } } return null ; } | Will query the database which documents are matching according to the given metadata |
39,505 | public void setQueryResultQueue ( BlockingQueue < SaltProject > queue , PagedResultQuery q , ArrayList < Match > allMatches ) { this . projectQueue = queue ; this . currentQuery = q ; this . numberOfResults = allMatches . size ( ) ; this . allMatches = allMatches ; paging . setPageSize ( q . getLimit ( ) , false ) ; paging . setInfo ( q . getQuery ( ) ) ; resultLayout . removeAllComponents ( ) ; resultPanelList . clear ( ) ; SaltProject first = queue . poll ( ) ; Preconditions . checkState ( first != null , "There must be already an element in the queue" ) ; addQueryResult ( q , Arrays . asList ( first ) ) ; } | Set a new querys in result panel . |
39,506 | public static Client createRESTClient ( String userName , String password ) { DefaultApacheHttpClient4Config rc = new DefaultApacheHttpClient4Config ( ) ; rc . getClasses ( ) . add ( SaltProjectProvider . class ) ; ThreadSafeClientConnManager clientConnMgr = new ThreadSafeClientConnManager ( ) ; clientConnMgr . setDefaultMaxPerRoute ( 10 ) ; rc . getProperties ( ) . put ( ApacheHttpClient4Config . PROPERTY_CONNECTION_MANAGER , clientConnMgr ) ; if ( userName != null && password != null ) { CredentialsProvider credentialsProvider = new BasicCredentialsProvider ( ) ; credentialsProvider . setCredentials ( AuthScope . ANY , new UsernamePasswordCredentials ( userName , password ) ) ; rc . getProperties ( ) . put ( ApacheHttpClient4Config . PROPERTY_CREDENTIALS_PROVIDER , credentialsProvider ) ; rc . getProperties ( ) . put ( ApacheHttpClient4Config . PROPERTY_PREEMPTIVE_BASIC_AUTHENTICATION , true ) ; } Client c = ApacheHttpClient4 . create ( rc ) ; return c ; } | Creates an authentificiated REST client |
39,507 | public static List < Annotation > getMetaDataDoc ( String toplevelCorpusName , String documentName ) { List < Annotation > result = new ArrayList < Annotation > ( ) ; WebResource res = Helper . getAnnisWebResource ( ) ; try { res = res . path ( "meta" ) . path ( "doc" ) . path ( urlPathEscape . escape ( toplevelCorpusName ) ) ; res = res . path ( urlPathEscape . escape ( documentName ) ) ; result = res . get ( new GenericType < List < Annotation > > ( ) { } ) ; } catch ( UniformInterfaceException | ClientHandlerException ex ) { log . error ( null , ex ) ; if ( ! AnnisBaseUI . handleCommonError ( ex , "retrieve metadata" ) ) { Notification . show ( "Remote exception: " + ex . getLocalizedMessage ( ) , Notification . Type . WARNING_MESSAGE ) ; } } return result ; } | Retrieve the meta data for a given document of a corpus . |
39,508 | public static CorpusConfig getCorpusConfig ( String corpus ) { if ( corpus == null || corpus . isEmpty ( ) ) { Notification . show ( "no corpus is selected" , "please select at leas one corpus and execute query again" , Notification . Type . WARNING_MESSAGE ) ; return null ; } CorpusConfig corpusConfig = new CorpusConfig ( ) ; try { corpusConfig = Helper . getAnnisWebResource ( ) . path ( "query" ) . path ( "corpora" ) . path ( urlPathEscape . escape ( corpus ) ) . path ( "config" ) . get ( CorpusConfig . class ) ; } catch ( UniformInterfaceException | ClientHandlerException ex ) { if ( ! AnnisBaseUI . handleCommonError ( ex , "get corpus configuration" ) ) { new Notification ( ERROR_MESSAGE_CORPUS_PROPS_HEADER , ERROR_MESSAGE_CORPUS_PROPS , Notification . Type . WARNING_MESSAGE , true ) . show ( Page . getCurrent ( ) ) ; } } return corpusConfig ; } | Loads the corpus config of a specific corpus . |
39,509 | public static CorpusConfigMap getCorpusConfigs ( ) { CorpusConfigMap corpusConfigurations = null ; try { corpusConfigurations = Helper . getAnnisWebResource ( ) . path ( "query" ) . path ( "corpora" ) . path ( "config" ) . get ( CorpusConfigMap . class ) ; } catch ( UniformInterfaceException | ClientHandlerException ex ) { UI . getCurrent ( ) . access ( new Runnable ( ) { public void run ( ) { if ( ! AnnisBaseUI . handleCommonError ( ex , "get corpus configurations" ) ) { new Notification ( ERROR_MESSAGE_CORPUS_PROPS_HEADER , ERROR_MESSAGE_CORPUS_PROPS , Notification . Type . WARNING_MESSAGE , true ) . show ( Page . getCurrent ( ) ) ; } } } ) ; } if ( corpusConfigurations == null ) { corpusConfigurations = new CorpusConfigMap ( ) ; } corpusConfigurations . put ( DEFAULT_CONFIG , getDefaultCorpusConfig ( ) ) ; return corpusConfigurations ; } | Loads the all available corpus configurations . |
39,510 | public static CorpusConfigMap getCorpusConfigs ( Set < String > corpora ) { CorpusConfigMap corpusConfigurations = new CorpusConfigMap ( ) ; for ( String corpus : corpora ) { corpusConfigurations . put ( corpus , getCorpusConfig ( corpus ) ) ; } corpusConfigurations . put ( DEFAULT_CONFIG , getDefaultCorpusConfig ( ) ) ; return corpusConfigurations ; } | Loads the available corpus configurations for a list of specific corpora . |
39,511 | public static Map < String , String > parseFragment ( String fragment ) { Map < String , String > result = new TreeMap < String , String > ( ) ; fragment = StringUtils . removeStart ( fragment , "!" ) ; String [ ] split = StringUtils . split ( fragment , "&" ) ; for ( String s : split ) { String [ ] parts = s . split ( "=" , 2 ) ; String name = parts [ 0 ] . trim ( ) ; String value = "" ; if ( parts . length == 2 ) { try { if ( name . startsWith ( "_" ) ) { value = new String ( Base64 . decodeBase64 ( parts [ 1 ] ) , "UTF-8" ) ; } else { value = URLDecoder . decode ( parts [ 1 ] , "UTF-8" ) ; } } catch ( UnsupportedEncodingException ex ) { log . error ( ex . getMessage ( ) , ex ) ; } } name = StringUtils . removeStart ( name , "_" ) ; result . put ( name , value ) ; } return result ; } | Parses the fragment . |
39,512 | public static String convertExceptionToMessage ( Throwable ex ) { StringBuilder sb = new StringBuilder ( ) ; if ( ex != null ) { sb . append ( "Exception type: " ) . append ( ex . getClass ( ) . getName ( ) ) . append ( "\n" ) ; sb . append ( "Message: " ) . append ( ex . getLocalizedMessage ( ) ) . append ( "\n" ) ; sb . append ( "Stacktrace: \n" ) ; StackTraceElement [ ] st = ex . getStackTrace ( ) ; for ( int i = 0 ; i < st . length ; i ++ ) { sb . append ( st [ i ] . toString ( ) ) ; sb . append ( "\n" ) ; } } return sb . toString ( ) ; } | Returns a formatted string containing the type of the exception the message and the stacktrace . |
39,513 | public static String encodePath ( String v ) { String encoded = urlPathEscape . escape ( v ) ; return encoded ; } | Encodes a String so it can be used as path param . |
39,514 | public static String encodeQueryParam ( String v ) { String encoded = UrlEscapers . urlFormParameterEscaper ( ) . escape ( v ) ; return encoded ; } | Encodes a String so it can be used as query param . |
39,515 | public void writeOutput ( VisualizerInput input , OutputStream outstream ) { try { OutputStreamWriter writer = new OutputStreamWriter ( outstream , getCharacterEncoding ( ) ) ; writeOutput ( input , writer ) ; writer . flush ( ) ; } catch ( IOException ex ) { log . error ( "Exception when writing visualizer output." , ex ) ; StringWriter strWriter = new StringWriter ( ) ; ex . printStackTrace ( new PrintWriter ( strWriter ) ) ; try { outstream . write ( strWriter . toString ( ) . getBytes ( "UTF-8" ) ) ; } catch ( IOException ex1 ) { log . error ( null , ex ) ; } } } | Will create a Writer of the outstream . |
39,516 | private static Set < String > getRelationLevelSet ( SDocumentGraph graph , String namespace , Class < ? extends SRelation > type ) { Set < String > result = new TreeSet < > ( ) ; if ( graph != null ) { List < ? extends SRelation > edges = null ; if ( type == SDominanceRelation . class ) { edges = graph . getDominanceRelations ( ) ; } else if ( type == SPointingRelation . class ) { edges = graph . getPointingRelations ( ) ; } else if ( type == SSpanningRelation . class ) { edges = graph . getSpanningRelations ( ) ; } if ( edges != null ) { for ( SRelation < ? , ? > edge : edges ) { Set < SLayer > layers = edge . getLayers ( ) ; for ( SLayer layer : layers ) { if ( namespace == null || namespace . equals ( layer . getName ( ) ) ) { for ( SAnnotation anno : edge . getAnnotations ( ) ) { result . add ( anno . getQName ( ) ) ; } break ; } } } } } return result ; } | Get the qualified name of all annotations belonging to relations having a specific namespace . |
39,517 | public boolean includeNode ( SNode node ) { if ( node instanceof SToken || configurations . get ( 0 ) == null ) { return true ; } Set < SAnnotation > nodeAnnotations = node . getAnnotations ( ) ; return includeObject ( nodeAnnotations , displayedNodeAnnotations ) ; } | Implements the includeNode method of the org . corpus_tools . salt . util . ExportFilter interface . |
39,518 | public boolean includeRelation ( SRelation relation ) { Map < String , Set < String > > displayedRelAnnotations = new HashMap < String , Set < String > > ( ) ; if ( relation instanceof SPointingRelation ) { if ( configurations . get ( 1 ) == null ) { return true ; } else { displayedRelAnnotations = displayedPointingRelAnnotations ; } } if ( relation instanceof SSpanningRelation ) { if ( configurations . get ( 2 ) == null ) { return true ; } else { displayedRelAnnotations = displayedSpanningRelAnnotations ; } } if ( relation instanceof SDominanceRelation ) { if ( configurations . get ( 3 ) == null ) { return true ; } else { displayedRelAnnotations = displayedDominanceRelAnnotations ; } } Set < SAnnotation > relAnnotations = relation . getAnnotations ( ) ; return includeObject ( relAnnotations , displayedRelAnnotations ) ; } | Implements the includeRelation method of the org . corpus_tools . salt . util . ExportFilter interface . |
39,519 | public static boolean containsRTLText ( String str ) { if ( str != null ) { for ( int i = 0 ; i < str . length ( ) ; i ++ ) { char cc = str . charAt ( i ) ; if ( cc >= 1425 && cc <= 1785 ) { return true ; } else if ( cc >= 64286 && cc <= 65019 ) { return true ; } else if ( cc >= 65136 && cc <= 65276 ) { return true ; } } } return false ; } | Detects arabic characters in a string . |
39,520 | public static String getSafeFileName ( String orig ) { if ( orig != null ) { return orig . replaceAll ( "[^0-9A-Za-z-]" , "_" ) ; } else { return UUID . randomUUID ( ) . toString ( ) ; } } | Returns a file name that is safe to use and does not have any invalid characters . |
39,521 | public static Set < String > getToplevelCorpusNames ( SaltProject p ) { Set < String > names = new HashSet < > ( ) ; if ( p != null && p . getCorpusGraphs ( ) != null ) { for ( SCorpusGraph g : p . getCorpusGraphs ( ) ) { if ( g . getRoots ( ) != null ) { for ( SNode c : g . getRoots ( ) ) { names . add ( c . getName ( ) ) ; } } } } return names ; } | Gets all names of a corpus from a salt project . |
39,522 | public static < V > Map < SNode , V > createSNodeMapFromIDs ( Map < String , V > map , SDocumentGraph graph ) { HashMap < SNode , V > result = new LinkedHashMap < > ( ) ; if ( map != null && graph != null ) { for ( Map . Entry < String , V > e : map . entrySet ( ) ) { SNode n = graph . getNode ( e . getKey ( ) ) ; if ( n != null ) { result . put ( n , e . getValue ( ) ) ; } } } return result ; } | Takes a map of salt node IDs to a value and return a new map that uses the SNodes as keys instead of the IDs . |
39,523 | public static LinkedHashMap < String , ArrayList < Row > > parseSalt ( VisualizerInput input , boolean showSpanAnnos , boolean showTokenAnnos , List < String > annotationNames , Set < String > mediaLayer , boolean replaceValueWithMediaIcon , long startTokenIndex , long endTokenIndex , PDFController pdfController , STextualDS text ) { SDocumentGraph graph = input . getDocument ( ) . getDocumentGraph ( ) ; LinkedHashMap < String , ArrayList < Row > > rowsByAnnotation = new LinkedHashMap < > ( ) ; for ( String anno : annotationNames ) { rowsByAnnotation . put ( anno , new ArrayList < Row > ( ) ) ; } AtomicInteger eventCounter = new AtomicInteger ( ) ; PDFPageHelper pageNumberHelper = new PDFPageHelper ( input ) ; if ( showSpanAnnos ) { for ( SSpan span : graph . getSpans ( ) ) { if ( text == null || text == CommonHelper . getTextualDSForNode ( span , graph ) ) { addAnnotationsForNode ( span , graph , startTokenIndex , endTokenIndex , pdfController , pageNumberHelper , eventCounter , rowsByAnnotation , true , mediaLayer , replaceValueWithMediaIcon ) ; } } } if ( showTokenAnnos ) { for ( SToken tok : graph . getTokens ( ) ) { if ( text == null || text == CommonHelper . getTextualDSForNode ( tok , graph ) ) { addAnnotationsForNode ( tok , graph , startTokenIndex , endTokenIndex , pdfController , pageNumberHelper , eventCounter , rowsByAnnotation , false , mediaLayer , replaceValueWithMediaIcon ) ; } } } for ( Map . Entry < String , ArrayList < Row > > e : rowsByAnnotation . entrySet ( ) ) { mergeAllRowsIfPossible ( e . getValue ( ) ) ; } for ( Map . Entry < String , ArrayList < Row > > e : rowsByAnnotation . entrySet ( ) ) { for ( Row r : e . getValue ( ) ) { sortEventsByTokenIndex ( r ) ; } } for ( Map . Entry < String , ArrayList < Row > > e : rowsByAnnotation . entrySet ( ) ) { for ( Row r : e . getValue ( ) ) { splitRowsOnIslands ( r , graph , text , startTokenIndex , endTokenIndex ) ; } } for ( Map . Entry < String , ArrayList < Row > > e : rowsByAnnotation . entrySet ( ) ) { for ( Row r : e . getValue ( ) ) { splitRowsOnGaps ( r , graph , startTokenIndex , endTokenIndex ) ; } } return rowsByAnnotation ; } | Converts Salt document graph to rows . |
39,524 | public static Set < String > computeDisplayedNamespace ( VisualizerInput input , List < Class < ? extends SNode > > types ) { if ( input == null ) { return new HashSet < > ( ) ; } String showNamespaceConfig = input . getMappings ( ) . getProperty ( GridComponent . MAPPING_SHOW_NAMESPACE ) ; if ( showNamespaceConfig != null ) { SDocumentGraph graph = input . getDocument ( ) . getDocumentGraph ( ) ; Set < String > annoPool = new LinkedHashSet < > ( ) ; for ( Class < ? extends SNode > t : types ) { annoPool . addAll ( SToken . class . isAssignableFrom ( t ) ? getAnnotationLevelSet ( graph , null , t ) : getAnnotationLevelSet ( graph , input . getNamespace ( ) , t ) ) ; } if ( "true" . equalsIgnoreCase ( showNamespaceConfig ) ) { return annoPool ; } else if ( "false" . equalsIgnoreCase ( showNamespaceConfig ) ) { return new LinkedHashSet < > ( ) ; } else { Set < String > annos = new LinkedHashSet < > ( ) ; List < String > defs = Splitter . on ( ',' ) . omitEmptyStrings ( ) . trimResults ( ) . splitToList ( showNamespaceConfig ) ; for ( String s : defs ) { if ( s . startsWith ( "/" ) && s . endsWith ( "/" ) ) { Pattern regex = Pattern . compile ( StringUtils . strip ( s , "/" ) ) ; LinkedList < String > matchingAnnos = new LinkedList < > ( ) ; for ( String a : annoPool ) { if ( regex . matcher ( a ) . matches ( ) ) { matchingAnnos . add ( a ) ; } } annos . addAll ( matchingAnnos ) ; annoPool . removeAll ( matchingAnnos ) ; } else { annos . add ( s ) ; annoPool . remove ( s ) ; } } return annos ; } } return new LinkedHashSet < > ( ) ; } | Returns the annotations to which should be displayed together with their namespace . |
39,525 | private static Set < String > getAnnotationLevelSet ( SDocumentGraph graph , String namespace , Class < ? extends SNode > type ) { Set < String > result = new TreeSet < > ( ) ; if ( graph != null ) { List < ? extends SNode > nodes ; if ( SSpan . class == type ) { nodes = graph . getSpans ( ) ; } else if ( SToken . class == type ) { nodes = graph . getTokens ( ) ; } else { nodes = graph . getNodes ( ) ; } if ( nodes != null ) { for ( SNode n : nodes ) { if ( type . isAssignableFrom ( n . getClass ( ) ) ) { for ( SLayer layer : n . getLayers ( ) ) { if ( namespace == null || namespace . equals ( layer . getName ( ) ) ) { for ( SAnnotation anno : n . getAnnotations ( ) ) { result . add ( anno . getQName ( ) ) ; } break ; } } } } } } return result ; } | Get the qualified name of all annotations belonging to spans having a specific namespace . |
39,526 | private static void sortEventsByTokenIndex ( Row row ) { Collections . sort ( row . getEvents ( ) , new Comparator < GridEvent > ( ) { public int compare ( GridEvent o1 , GridEvent o2 ) { if ( o1 == o2 ) { return 0 ; } if ( o1 == null ) { return - 1 ; } if ( o2 == null ) { return + 1 ; } return Integer . compare ( o1 . getLeft ( ) , o2 . getLeft ( ) ) ; } } ) ; } | Sort events of a row . The sorting is depending on the left value of the event |
39,527 | private static void splitRowsOnIslands ( Row row , final SDocumentGraph graph , STextualDS text , long startTokenIndex , long endTokenIndex ) { BitSet tokenCoverage = new BitSet ( ) ; List < SToken > sortedTokenList = graph . getSortedTokenByText ( ) ; ListIterator < SToken > itToken = sortedTokenList . listIterator ( ) ; while ( itToken . hasNext ( ) ) { SToken t = itToken . next ( ) ; if ( text == null || text == CommonHelper . getTextualDSForNode ( t , graph ) ) { RelannisNodeFeature feat = ( RelannisNodeFeature ) t . getFeature ( ANNIS_NS , FEAT_RELANNIS_NODE ) . getValue ( ) ; long tokenIndexRaw = feat . getTokenIndex ( ) ; tokenIndexRaw = clip ( tokenIndexRaw , startTokenIndex , endTokenIndex ) ; int tokenIndex = ( int ) ( tokenIndexRaw - startTokenIndex ) ; tokenCoverage . set ( tokenIndex ) ; } } ListIterator < GridEvent > itEvents = row . getEvents ( ) . listIterator ( ) ; while ( itEvents . hasNext ( ) ) { GridEvent event = itEvents . next ( ) ; BitSet eventBitSet = new BitSet ( ) ; eventBitSet . set ( event . getLeft ( ) , event . getRight ( ) + 1 ) ; eventBitSet . and ( tokenCoverage ) ; if ( eventBitSet . nextClearBit ( event . getLeft ( ) ) <= event . getRight ( ) ) { row . removeEvent ( itEvents ) ; int subElement = 0 ; int offset = eventBitSet . nextSetBit ( 0 ) ; while ( offset >= 0 ) { int end = eventBitSet . nextClearBit ( offset ) - 1 ; if ( offset < end ) { GridEvent newEvent = new GridEvent ( event ) ; newEvent . setId ( event . getId ( ) + "_islandsplit_" + subElement ++ ) ; newEvent . setLeft ( offset ) ; newEvent . setRight ( end ) ; row . addEvent ( itEvents , newEvent ) ; } offset = eventBitSet . nextSetBit ( end + 1 ) ; } } } } | Splits events of a row if they overlap an island . Islands are areas between the token which are included in the result . |
39,528 | private String getText ( SNode node , VisualizerInput input ) { SDocumentGraph sDocumentGraph = input . getSResult ( ) . getDocumentGraph ( ) ; List < DataSourceSequence > sequences = sDocumentGraph . getOverlappedDataSourceSequence ( node , SALT_TYPE . STEXT_OVERLAPPING_RELATION ) ; if ( sequences != null && sequences . size ( ) > 0 ) { return ( ( STextualDS ) sequences . get ( 0 ) . getDataSource ( ) ) . getText ( ) . substring ( sequences . get ( 0 ) . getStart ( ) . intValue ( ) , sequences . get ( 0 ) . getEnd ( ) . intValue ( ) ) ; } return "" ; } | Get the text which is overlapped by the SNode . |
39,529 | private void addLoginButton ( ) { VaadinSession session = VaadinSession . getCurrent ( ) ; if ( session != null ) { boolean kickstarter = Helper . isKickstarter ( session ) ; if ( ! kickstarter ) { addComponent ( lblUserName ) ; setComponentAlignment ( lblUserName , Alignment . MIDDLE_RIGHT ) ; addComponent ( btLogin ) ; setComponentAlignment ( btLogin , Alignment . MIDDLE_RIGHT ) ; } } } | Adds the login button + login text to the toolbar . This is only happened when the gui is not started via the kickstarter . |
39,530 | public void addAnnotationConditions ( Collection < String > conditions , int index , QueryAnnotation annotation , String table , TableAccessStrategy tas ) { TextMatching tm = annotation . getTextMatching ( ) ; String column = annotation . getNamespace ( ) == null ? "annotext" : "qannotext" ; Escaper escaper = tm != null && tm . isRegex ( ) ? regexEscaper : likeEscaper ; String val ; if ( tm == null ) { val = "%" ; } else { val = escaper . escape ( annotation . getValue ( ) ) ; } String prefix ; if ( annotation . getNamespace ( ) == null ) { prefix = escaper . escape ( annotation . getName ( ) ) + ":" ; } else { prefix = escaper . escape ( annotation . getNamespace ( ) ) + ":" + escaper . escape ( annotation . getName ( ) ) + ":" ; } if ( tm == null || tm == TextMatching . EXACT_EQUAL ) { conditions . add ( tas . aliasedColumn ( table , column , index ) + " LIKE '" + prefix + val + "'" ) ; } else if ( tm == TextMatching . EXACT_NOT_EQUAL ) { conditions . add ( tas . aliasedColumn ( table , column , index ) + " LIKE '" + prefix + "%'" ) ; conditions . add ( tas . aliasedColumn ( table , column , index ) + " NOT LIKE '" + prefix + val + "'" ) ; } else if ( tm == TextMatching . REGEXP_EQUAL ) { conditions . add ( tas . aliasedColumn ( table , column , index ) + " ~ '^(" + prefix + "(" + val + "))$'" ) ; } else if ( tm == TextMatching . REGEXP_NOT_EQUAL ) { conditions . add ( tas . aliasedColumn ( table , column , index ) + " LIKE '" + prefix + "%'" ) ; conditions . add ( tas . aliasedColumn ( table , column , index ) + " !~ '^(" + prefix + "(" + val + "))$'" ) ; } } | Adds annotation conditions for a single node . |
39,531 | public static FrequencyTableEntry parse ( String definition ) { List < String > splitted = Splitter . on ( ':' ) . trimResults ( ) . omitEmptyStrings ( ) . limit ( 2 ) . splitToList ( definition ) ; if ( splitted . size ( ) == 2 ) { FrequencyTableEntry entry = new FrequencyTableEntry ( ) ; if ( "meta" . equals ( splitted . get ( 0 ) ) ) { entry . setReferencedNode ( null ) ; entry . setType ( FrequencyTableEntryType . meta ) ; entry . setKey ( splitted . get ( 1 ) ) ; } else { entry . setReferencedNode ( splitted . get ( 0 ) ) ; if ( "tok" . equals ( splitted . get ( 1 ) ) ) { entry . setType ( FrequencyTableEntryType . span ) ; } else { entry . setType ( FrequencyTableEntryType . annotation ) ; entry . setKey ( splitted . get ( 1 ) ) ; } } return entry ; } return null ; } | A constructor that takes the raw definition as argument . |
39,532 | private void updateAutoGeneratedQueriesPanel ( ) { Set < String > corpora = new HashSet < > ( ui . getQueryState ( ) . getSelectedCorpora ( ) . getValue ( ) ) ; if ( corpora . isEmpty ( ) ) { corpora . addAll ( ui . getQueryState ( ) . getAvailableCorpora ( ) . getItemIds ( ) ) ; } autoGenQueries . setSelectedCorpusInBackground ( corpora ) ; } | Updates or initializes the panel which holds the automatic generated queries . |
39,533 | protected boolean existConflictingTopLevelCorpus ( String topLevelCorpusName ) { String sql = "SELECT count(name) as amount FROM corpus WHERE top_level=true AND name='" + topLevelCorpusName + "'" ; Integer numberOfCorpora = getJdbcTemplate ( ) . query ( sql , new ResultSetExtractor < Integer > ( ) { public Integer extractData ( ResultSet rs ) throws SQLException , DataAccessException { if ( rs . next ( ) ) { return rs . getInt ( "amount" ) ; } else { return 0 ; } } } ) ; return numberOfCorpora > 0 ; } | Checks if there already exists a top level corpus . |
39,534 | protected void closeAllConnections ( String databasename ) { String sql = "SELECT pg_terminate_backend(pg_stat_activity.pid)\n" + "FROM pg_stat_activity\n" + "WHERE pg_stat_activity.datname = ?\n" + " AND pid <> pg_backend_pid();" ; try ( Connection conn = getDataSource ( ) . getConnection ( ) ) { DatabaseMetaData meta = conn . getMetaData ( ) ; if ( meta . getDatabaseMajorVersion ( ) == 9 && meta . getDatabaseMinorVersion ( ) <= 1 ) { sql = "SELECT pg_terminate_backend(pg_stat_activity.procpid)\n" + "FROM pg_stat_activity\n" + "WHERE pg_stat_activity.datname = ?\n" + " AND procpid <> pg_backend_pid();" ; } } catch ( SQLException ex ) { log . warn ( "Could not get the PostgreSQL version" , ex ) ; } getJdbcTemplate ( ) . queryForRowSet ( sql , databasename ) ; } | Closes all open idle connections . The current data source must have superuser rights . |
39,535 | private String getText ( SToken currNode ) { List < DataSourceSequence > sSequences = ( ( SDocumentGraph ) currNode . getGraph ( ) ) . getOverlappedDataSourceSequence ( currNode , SALT_TYPE . STEXT_OVERLAPPING_RELATION ) ; if ( sSequences == null || sSequences . size ( ) != 1 ) { log . error ( "rst supports only one text and only text level" ) ; return null ; } log . debug ( "sSequences {}" , sSequences . toString ( ) ) ; if ( sSequences . get ( 0 ) . getDataSource ( ) instanceof STextualDS ) { STextualDS text = ( ( STextualDS ) sSequences . get ( 0 ) . getDataSource ( ) ) ; int start = sSequences . get ( 0 ) . getStart ( ) . intValue ( ) ; int end = sSequences . get ( 0 ) . getEnd ( ) . intValue ( ) ; return text . getText ( ) . substring ( start , end ) ; } log . error ( "{} instead of {}" , sSequences . get ( 0 ) . getDataSource ( ) . getClass ( ) . getName ( ) , STextualDS . class . getName ( ) ) ; return null ; } | Gets the overlapping token as string from a node which are direct dominated by this node . |
39,536 | private String getHTMLColor ( SToken token ) { if ( ! markedAndCovered . containsKey ( token ) ) { return null ; } int color = ( int ) ( long ) markedAndCovered . get ( token ) ; color = Math . min ( color > 0 ? color - 1 : color , MatchedNodeColors . values ( ) . length - 1 ) ; return MatchedNodeColors . values ( ) [ color ] . getHTMLColor ( ) ; } | Checks if a specific token is marked as matching token and returns a HTML color string . |
39,537 | private boolean isSegment ( SNode currNode ) { List < SRelation < SNode , SNode > > edges = currNode . getGraph ( ) . getOutRelations ( currNode . getId ( ) ) ; if ( edges != null && edges . size ( ) > 0 ) { for ( SRelation < SNode , SNode > edge : edges ) { if ( edge . getTarget ( ) instanceof SToken ) { return true ; } } } return false ; } | Checks if there exists an SRelation which targets a SToken . |
39,538 | private void sortChildren ( JSONObject root ) throws JSONException { JSONArray children = root . getJSONArray ( "children" ) ; List < JSONObject > childrenSorted = new ArrayList < JSONObject > ( children . length ( ) ) ; for ( int i = 0 ; i < children . length ( ) ; i ++ ) { childrenSorted . add ( children . getJSONObject ( i ) ) ; } Collections . sort ( childrenSorted , new Comparator < Object > ( ) { public int compare ( Object o1 , Object o2 ) { int o1IdxLeft = 0 ; int o1IdxRight = 0 ; int o2IdxLeft = 0 ; int o2IdxRight = 0 ; try { o1IdxLeft = ( ( JSONObject ) o1 ) . getJSONObject ( "data" ) . getInt ( SENTENCE_LEFT ) ; o1IdxRight = ( ( JSONObject ) o1 ) . getJSONObject ( "data" ) . getInt ( SENTENCE_RIGHT ) ; o2IdxLeft = ( ( JSONObject ) o2 ) . getJSONObject ( "data" ) . getInt ( SENTENCE_LEFT ) ; o2IdxRight = ( ( JSONObject ) o2 ) . getJSONObject ( "data" ) . getInt ( SENTENCE_RIGHT ) ; } catch ( JSONException ex ) { log . error ( "Could not compare sentence indizes." , ex ) ; } if ( o1IdxLeft + o1IdxRight > o2IdxLeft + o2IdxRight ) { return 1 ; } if ( o1IdxLeft + o1IdxRight == o2IdxLeft + o2IdxRight ) { return 0 ; } else { return - 1 ; } } } ) ; children = new JSONArray ( childrenSorted ) ; root . put ( "children" , children ) ; } | Sorts the children of root by the the sentence indizes . Since the sentence indizes are based on the token indizes some sentences have no sentences indizes because sometimes token nodes are out of context . |
39,539 | public void shutdown ( ) { log . info ( "Shutting down..." ) ; isShutdownRequested = true ; try { mainThread . join ( ) ; } catch ( InterruptedException e ) { log . error ( "Interrupted which waiting on main daemon thread to complete." ) ; } } | shutdown the AnnisService - ensure that current work load finishes |
39,540 | public void start ( boolean rethrowExceptions ) throws Exception { log . info ( "Starting up REST..." ) ; try { createWebServer ( ) ; if ( server == null ) { isShutdownRequested = true ; errorCode = 100 ; } else { server . start ( ) ; } } catch ( Exception ex ) { log . error ( "could not start ANNIS REST service" , ex ) ; isShutdownRequested = true ; errorCode = 100 ; if ( rethrowExceptions ) { if ( ! ( ex instanceof AnnisException ) && ex . getCause ( ) instanceof AnnisException ) { throw ( ( AnnisException ) ex . getCause ( ) ) ; } else { throw ( ex ) ; } } } } | Creates and starts the server |
39,541 | public void setTimeout ( int milliseconds ) { if ( ctx != null ) { QueryDao dao = ( QueryDao ) ctx . getBean ( "queryDao" ) ; if ( dao != null ) { dao . setTimeout ( milliseconds ) ; } } } | Set the timeout in milliseconds |
39,542 | public static Map < String , ZipEntry > corporaInZipfile ( ZipFile zip ) throws IOException { Map < String , ZipEntry > result = new HashMap < > ( ) ; for ( ZipEntry e : getANNISEntry ( zip , "corpus" ) ) { String name = extractToplevelCorpusNames ( zip . getInputStream ( e ) ) ; result . put ( name , e ) ; } return result ; } | List all corpora of a ZIP file and their paths . |
39,543 | public static String extractToplevelCorpusNames ( InputStream corpusTabContent ) { String result = null ; try ( CSVReader csv = new CSVReader ( new InputStreamReader ( corpusTabContent , "UTF-8" ) , '\t' ) ) { String [ ] line ; int maxPost = Integer . MIN_VALUE ; int minPre = Integer . MAX_VALUE ; while ( ( line = csv . readNext ( ) ) != null ) { if ( line . length >= 6 && "CORPUS" . equalsIgnoreCase ( line [ 2 ] ) ) { int pre = Integer . parseInt ( line [ 4 ] ) ; int post = Integer . parseInt ( line [ 5 ] ) ; if ( pre <= minPre && post >= maxPost ) { minPre = pre ; maxPost = post ; result = line [ 1 ] ; } } } } catch ( UnsupportedEncodingException ex ) { log . error ( null , ex ) ; } catch ( IOException ex ) { log . error ( null , ex ) ; } return result ; } | Extract the name of the toplevel corpus from the content of the corpus . tab file . |
39,544 | public static List < ZipEntry > getANNISEntry ( ZipFile file , String table , String ... fileEndings ) { List < ZipEntry > allMatchingEntries = new ArrayList < > ( ) ; if ( fileEndings == null || fileEndings . length == 0 ) { fileEndings = new String [ ] { "tab" , "annis" } ; } final List < String > fullNames = new LinkedList < > ( ) ; for ( String e : fileEndings ) { fullNames . add ( table + "." + e ) ; } Enumeration < ? extends ZipEntry > entries = file . entries ( ) ; while ( entries . hasMoreElements ( ) ) { ZipEntry entry = entries . nextElement ( ) ; if ( ! entry . isDirectory ( ) ) { String name = entry . getName ( ) ; if ( name != null ) { name = name . replaceAll ( "\\\\" , "/" ) ; for ( String n : fullNames ) { if ( n . equalsIgnoreCase ( name ) || entry . getName ( ) . endsWith ( "/" + n ) ) { allMatchingEntries . add ( entry ) ; } } } } } return allMatchingEntries ; } | Find the directories containing the real ANNIS tab files for a zip file . |
39,545 | private void join ( ParserRuleContext ctx , Class < ? extends Join > type ) { QueryNode left = relationChain . get ( relationIdx ) ; QueryNode right = relationChain . get ( relationIdx + 1 ) ; try { Constructor < ? extends Join > c = type . getConstructor ( QueryNode . class ) ; Join newJoin = c . newInstance ( right ) ; left . addOutgoingJoin ( addParsedLocation ( ctx , newJoin ) ) ; } catch ( NoSuchMethodException ex ) { log . error ( null , ex ) ; } catch ( InstantiationException ex ) { log . error ( null , ex ) ; } catch ( IllegalAccessException ex ) { log . error ( null , ex ) ; } catch ( InvocationTargetException ex ) { log . error ( null , ex ) ; } } | Automatically create a join from a node and a join class . |
39,546 | private void analyzeTextTable ( String toplevelCorpusName ) { List < String > rawTexts = getQueryDao ( ) . getRawText ( toplevelCorpusName ) ; final Pattern WHITESPACE_MATCHER = Pattern . compile ( "^\\s+$" ) ; for ( String s : rawTexts ) { if ( s != null && WHITESPACE_MATCHER . matcher ( s ) . matches ( ) ) { if ( getQueryDao ( ) . getDocBrowserConfiguration ( toplevelCorpusName ) == null ) { Properties corpusConf ; try { corpusConf = getQueryDao ( ) . getCorpusConfiguration ( toplevelCorpusName ) ; } catch ( FileNotFoundException ex ) { log . error ( "not found a corpus configuration, so skip analyzing the text table" , ex ) ; return ; } boolean hasKey = corpusConf . containsKey ( "browse-documents" ) ; boolean isActive = Boolean . parseBoolean ( corpusConf . getProperty ( "browse-documents" ) ) ; if ( ! ( hasKey && isActive ) ) { log . info ( "disable document browser" ) ; corpusConf . put ( "browse-documents" , "false" ) ; getQueryDao ( ) . setCorpusConfiguration ( toplevelCorpusName , corpusConf ) ; } return ; } } } } | Searches for textes which are empty or only contains whitespaces . If that is the case the visualizer and no document visualizer are defined in the corpus properties file a new file is created and stores a new config which disables document browsing . |
39,547 | public void init ( ) { AnnotationIntrospector introspector = new JaxbAnnotationIntrospector ( ) ; jsonMapper . setAnnotationIntrospector ( introspector ) ; jsonMapper . configure ( SerializationConfig . Feature . INDENT_OUTPUT , false ) ; } | Called when Spring configuration finished |
39,548 | @ Transactional ( readOnly = true , propagation = Propagation . REQUIRED ) public String getDatabaseSchemaVersion ( ) { try { List < Map < String , Object > > result = getJdbcTemplate ( ) . queryForList ( "SELECT \"value\" FROM repository_metadata WHERE \"name\"='schema-version'" ) ; String schema = result . size ( ) > 0 ? ( String ) result . get ( 0 ) . get ( "value" ) : "" ; return schema ; } catch ( DataAccessException ex ) { String error = "Wrong database schema (too old to get the exact number), " + "please initialize the database." ; log . error ( error ) ; } return "" ; } | Get the real schema name and version as used by the database . |
39,549 | @ Transactional ( readOnly = false , propagation = Propagation . REQUIRES_NEW , isolation = Isolation . READ_COMMITTED ) public boolean importCorpus ( String path , String aliasName , boolean overwrite , boolean waitForOtherTasks ) { checkDatabaseSchemaVersion ( ) ; if ( ! lockRepositoryMetadataTable ( waitForOtherTasks ) ) { log . error ( "Another import is currently running" ) ; return false ; } getJdbcTemplate ( ) . update ( "SET statement_timeout TO 0" ) ; ANNISFormatVersion annisFormatVersion = getANNISFormatVersion ( path ) ; if ( annisFormatVersion == ANNISFormatVersion . V3_3 ) { return importVersion4 ( path , aliasName , overwrite , annisFormatVersion ) ; } else if ( annisFormatVersion == ANNISFormatVersion . V3_1 || annisFormatVersion == ANNISFormatVersion . V3_2 ) { return importVersion3 ( path , aliasName , overwrite , annisFormatVersion ) ; } log . error ( "Unknown ANNIS import format version" ) ; return false ; } | Reads ANNIS files from several directories . |
39,550 | private void importSingleFile ( String file , String toplevelCorpusName , long corpusRef ) { BinaryImportHelper preStat = new BinaryImportHelper ( file , getRealDataDir ( ) , toplevelCorpusName , corpusRef , mimeTypeMapping ) ; getJdbcTemplate ( ) . execute ( BinaryImportHelper . SQL , preStat ) ; } | Imports a single binary file . |
39,551 | @ Transactional ( readOnly = true ) public void cleanupData ( ) { List < String > allFilesInDatabaseList = getJdbcTemplate ( ) . queryForList ( "SELECT filename FROM media_files AS m" , String . class ) ; File dataDir = getRealDataDir ( ) ; Set < File > allFilesInDatabase = new HashSet < > ( ) ; for ( String singleFileName : allFilesInDatabaseList ) { allFilesInDatabase . add ( new File ( dataDir , singleFileName ) ) ; } log . info ( "Cleaning up the data directory" ) ; File [ ] childFiles = dataDir . listFiles ( ) ; if ( childFiles != null ) { for ( File f : childFiles ) { if ( f . isFile ( ) && ! allFilesInDatabase . contains ( f ) ) { if ( ! f . delete ( ) ) { log . warn ( "Could not delete {}" , f . getAbsolutePath ( ) ) ; } } } } } | Delete files not used by this instance in the data directory . |
39,552 | public List < Map < String , Object > > listCorpusStats ( File databaseProperties ) { List < Map < String , Object > > result = new LinkedList < > ( ) ; DataSource origDataSource = getDataSource ( ) . getInnerDataSource ( ) ; try { if ( databaseProperties != null ) { getDataSource ( ) . setInnerDataSource ( createDataSource ( databaseProperties ) ) ; } result = getJdbcTemplate ( ) . queryForList ( "SELECT * FROM corpus_info ORDER BY name" ) ; } catch ( IOException | URISyntaxException | DataAccessException ex ) { if ( databaseProperties == null ) { log . error ( "Could not query corpus list" , ex ) ; } else { log . error ( "Could not query corpus list for the file " + databaseProperties . getAbsolutePath ( ) , ex ) ; } } finally { getDataSource ( ) . setInnerDataSource ( origDataSource ) ; } return result ; } | Lists the corpora using the connection information of a given database . properties . file |
39,553 | public Multimap < String , String > listCorpusAlias ( File databaseProperties ) { Multimap < String , String > result = TreeMultimap . create ( ) ; DataSource origDataSource = getDataSource ( ) . getInnerDataSource ( ) ; try { if ( databaseProperties != null ) { getDataSource ( ) . setInnerDataSource ( createDataSource ( databaseProperties ) ) ; } result = getJdbcTemplate ( ) . query ( "SELECT a.alias AS alias, c.name AS corpus\n" + "FROM corpus_alias AS a, corpus AS c\n" + "WHERE\n" + " a.corpus_ref = c.id" , new ResultSetExtractor < Multimap < String , String > > ( ) { public Multimap < String , String > extractData ( ResultSet rs ) throws SQLException , DataAccessException { Multimap < String , String > data = TreeMultimap . create ( ) ; while ( rs . next ( ) ) { data . put ( rs . getString ( 1 ) , rs . getString ( 2 ) ) ; } return data ; } } ) ; } catch ( IOException | URISyntaxException | DataAccessException ex ) { if ( databaseProperties == null ) { log . error ( "Could not query corpus list" , ex ) ; } else { log . error ( "Could not query corpus list for the file " + databaseProperties . getAbsolutePath ( ) , ex ) ; } } finally { getDataSource ( ) . setInnerDataSource ( origDataSource ) ; } return result ; } | Provides a list where the keys are the aliases and the values are the corpus names . |
39,554 | private void bulkloadTableFromResource ( String table , Resource resource ) { log . debug ( "bulk-loading data from '" + resource . getFilename ( ) + "' into table '" + table + "'" ) ; String sql = "COPY \"" + table + "\" FROM STDIN WITH DELIMITER E'\t' NULL AS 'NULL'" ; try { Connection originalCon = DataSourceUtils . getConnection ( getDataSource ( ) ) ; Connection con = originalCon ; if ( con instanceof DelegatingConnection ) { DelegatingConnection < ? > delCon = ( DelegatingConnection < ? > ) con ; con = delCon . getInnermostDelegate ( ) ; } Preconditions . checkState ( con instanceof PGConnection , "bulk-loading only works with a PostgreSQL JDBC connection" ) ; PGConnection pgCon = ( PGConnection ) con ; pgCon . getCopyAPI ( ) . copyIn ( sql , resource . getInputStream ( ) ) ; DataSourceUtils . releaseConnection ( originalCon , getDataSource ( ) ) ; } catch ( SQLException e ) { throw new DatabaseAccessException ( e ) ; } catch ( IOException e ) { throw new FileAccessException ( e ) ; } } | bulk - loads a table from a resource |
39,555 | private List < String > listIndexesOnTables ( List < String > tables ) { String sql = "" + "SELECT indexname " + "FROM pg_indexes " + "WHERE tablename IN (" + StringUtils . repeat ( "?" , "," , tables . size ( ) ) + ") " + "AND lower(indexname) NOT IN " + " (SELECT lower(conname) FROM pg_constraint WHERE contype in ('p', 'u'))" ; return getJdbcTemplate ( ) . query ( sql , tables . toArray ( ) , stringRowMapper ( ) ) ; } | exploits the fact that the index has the same name as the constraint |
39,556 | private String getTopLevelCorpusFromTmpArea ( ) { String sql = "SELECT name FROM " + tableInStagingArea ( "corpus" ) + " WHERE type='CORPUS'\n" + "AND pre = (SELECT min(pre) FROM " + tableInStagingArea ( "corpus" ) + ")\n" + "AND post = (SELECT max(post) FROM " + tableInStagingArea ( "corpus" ) + ")" ; return getJdbcTemplate ( ) . query ( sql , new ResultSetExtractor < String > ( ) { public String extractData ( ResultSet rs ) throws SQLException , DataAccessException { if ( rs . next ( ) ) { return rs . getString ( "name" ) ; } else { return null ; } } } ) ; } | Retrieves the name of the top level corpus in the corpus . tab file . |
39,557 | private void importResolverVisMapTable ( String path , String table , String annisFileSuffix ) { try { File resolver_vis_tab = new File ( path , table + annisFileSuffix ) ; if ( ! resolver_vis_tab . isFile ( ) ) { return ; } String firstLine ; try ( BufferedReader bReader = new BufferedReader ( new InputStreamReader ( new FileInputStream ( resolver_vis_tab ) , "UTF-8" ) ) ) { firstLine = bReader . readLine ( ) ; } int cols = 9 ; if ( firstLine != null ) { String [ ] entries = firstLine . split ( "\t" ) ; cols = entries . length ; log . debug ( "the first row: {} amount of cols: {}" , entries , cols ) ; } switch ( cols ) { case 8 : readOldResolverVisMapFormat ( resolver_vis_tab ) ; break ; case 9 : bulkloadTableFromResource ( tableInStagingArea ( table ) , new FileSystemResource ( new File ( path , table + annisFileSuffix ) ) ) ; break ; default : log . error ( "invalid amount of cols" ) ; throw new RuntimeException ( ) ; } } catch ( IOException | FileAccessException e ) { log . error ( "could not read {}" , table , e ) ; } } | Imported the old and the new version of the resolver_vis_map . tab . The new version has an additional column for visibility status of the visualization . |
39,558 | private void fixResolverVisMapTable ( String toplevelCorpus , String table ) { log . info ( "checking resolver_vis_map for errors" ) ; int invalidRows = getJdbcTemplate ( ) . update ( "DELETE FROM " + table + " WHERE corpus <> ?" , toplevelCorpus ) ; if ( invalidRows > 0 ) { log . warn ( "there were " + invalidRows + " rows in the resolver_vis_map that referenced the wrong corpus" ) ; } } | Removes any unwanted entries from the resolver_vis_map table |
39,559 | private void analyzeAutoGeneratedQueries ( long corpusID ) { List < ExampleQuery > exampleQueries = getJdbcTemplate ( ) . query ( "SELECT * FROM _" + EXAMPLE_QUERIES_TAB , new RowMapper < ExampleQuery > ( ) { public ExampleQuery mapRow ( ResultSet rs , int i ) throws SQLException { ExampleQuery eQ = new ExampleQuery ( ) ; eQ . setExampleQuery ( rs . getString ( "example_query" ) ) ; return eQ ; } } ) ; countExampleQueryNodes ( exampleQueries ) ; getOperators ( exampleQueries , "\\.(\\*)?|\\>|\\>\\*|_i_" ) ; writeAmountOfNodesBack ( exampleQueries ) ; } | Counts nodes and operators of the AQL example query and writes it back to the staging area . |
39,560 | private void countExampleQueryNodes ( List < ExampleQuery > exampleQueries ) { for ( ExampleQuery eQ : exampleQueries ) { QueryData query = getQueryDao ( ) . parseAQL ( eQ . getExampleQuery ( ) , null ) ; int count = 0 ; for ( List < QueryNode > qNodes : query . getAlternatives ( ) ) { count += qNodes . size ( ) ; } eQ . setNodes ( count ) ; } } | Maps example queries to integer which represents the amount of nodes of the aql query . |
39,561 | private void writeAmountOfNodesBack ( List < ExampleQuery > exampleQueries ) { String sqlTemplate = "UPDATE _" + EXAMPLE_QUERIES_TAB + " SET nodes=?, used_ops=CAST(? AS text[]) WHERE example_query=?;" ; for ( ExampleQuery eQ : exampleQueries ) { getJdbcTemplate ( ) . update ( sqlTemplate , eQ . getNodes ( ) , eQ . getUsedOperators ( ) , eQ . getExampleQuery ( ) ) ; } } | Writes the counted nodes and the used operators back to the staging area . |
39,562 | private void checkTopLevelCorpus ( ) throws ConflictingCorpusException { String corpusName = getTopLevelCorpusFromTmpArea ( ) ; if ( existConflictingTopLevelCorpus ( corpusName ) ) { String msg = "There already exists a top level corpus with the name: " + corpusName ; throw new ConflictingCorpusException ( msg ) ; } } | Checks if a already exists a corpus with the same name of the top level corpus in the corpus . tab file . If this is the case an Exception is thrown and the import is aborted . |
39,563 | public void doClearExampleQueries ( String unused ) { for ( Long corpusId : corpusList ) { System . out . println ( "delete example queries for " + corpusId ) ; queriesGenerator . delExampleQueries ( corpusId ) ; } } | Clears all example queries . |
39,564 | public void doGenerateExampleQueries ( String args ) { Boolean del = false ; if ( args != null && "overwrite" . equals ( args ) ) { del = true ; } if ( corpusList != null ) { for ( Long corpusId : corpusList ) { System . out . println ( "generate example queries " + corpusId ) ; queriesGenerator . generateQueries ( corpusId , del ) ; } } } | Enables the auto generating of example queries for the annis shell . |
39,565 | private QueryData analyzeQuery ( String annisQuery , String queryFunction ) { QueryData queryData ; log . debug ( "analyze query for " + queryFunction + " function" ) ; if ( queryFunction != null && ! queryFunction . matches ( "(sql_)?subgraph" ) ) { queryData = queryDao . parseAQL ( annisQuery , corpusList ) ; } else { queryData = GraphHelper . createQueryData ( MatchGroup . parseString ( annisQuery ) , queryDao ) ; } queryData . setCorpusConfiguration ( queryDao . getCorpusConfiguration ( ) ) ; queryData . setDocuments ( metaDataFilter . getDocumentsForMetadata ( queryData ) ) ; if ( queryFunction != null && queryFunction . matches ( "(sql_)?(annotate|find)" ) ) { queryData . addExtension ( new AnnotateQueryData ( left , right , segmentationLayer , filter ) ) ; queryData . addExtension ( new LimitOffsetQueryData ( offset , limit , order ) ) ; } else if ( queryFunction != null && queryFunction . matches ( "(sql_)?subgraph" ) ) { queryData . addExtension ( new AnnotateQueryData ( left , right , segmentationLayer , filter ) ) ; } else if ( queryFunction != null && queryFunction . matches ( "(sql_)?frequency" ) ) { if ( frequencyDef == null ) { out . println ( "You have to set the 'freq-def' property first" ) ; } else { queryData . addExtension ( frequencyDef ) ; } } if ( annisQuery != null ) { if ( benchmarkName == null ) { benchmarkName = "auto_" + benchmarks . size ( ) ; } Benchmark b = new AnnisRunner . Benchmark ( queryFunction + " " + annisQuery , queryData ) ; b . name = benchmarkName ; benchmarks . add ( b ) ; benchmarkName = null ; } return queryData ; } | Does the setup for the QueryData object . |
39,566 | public static String calculateSHAHash ( String s ) throws NoSuchAlgorithmException , UnsupportedEncodingException { MessageDigest md = MessageDigest . getInstance ( "SHA-256" ) ; md . update ( s . getBytes ( "UTF-8" ) ) ; byte [ ] digest = md . digest ( ) ; StringBuilder hashVal = new StringBuilder ( ) ; for ( byte b : digest ) { hashVal . append ( String . format ( "%02x" , b ) ) ; } return hashVal . toString ( ) ; } | Hashes a string using SHA - 256 . |
39,567 | public String setHighlightingColor ( SNode node ) { String color = null ; SFeature featMatched = node . getFeature ( ANNIS_NS , FEAT_MATCHEDNODE ) ; Long matchRaw = featMatched == null ? null : featMatched . getValue_SNUMERIC ( ) ; if ( matchRaw != null ) { color = MatchedNodeColors . getHTMLColorByMatch ( matchRaw ) ; return color ; } return color ; } | Implements the getHighlightingColor method of the org . corpus_tools . salt . util . StyleImporter interface . |
39,568 | public void injectUniqueCSS ( String cssContent , String wrapperClass ) { if ( alreadyAddedCSS == null ) { alreadyAddedCSS = new TreeSet < String > ( ) ; } if ( wrapperClass != null ) { cssContent = wrapCSS ( cssContent , wrapperClass ) ; } String hashForCssContent = Hashing . md5 ( ) . hashString ( cssContent , Charsets . UTF_8 ) . toString ( ) ; if ( ! alreadyAddedCSS . contains ( hashForCssContent ) ) { Page . getCurrent ( ) . getStyles ( ) . add ( cssContent ) ; alreadyAddedCSS . add ( hashForCssContent ) ; } } | Inject CSS into the UI . This function will not add multiple style - elements if the exact CSS string was already added . |
39,569 | @ Path ( "search/frequency" ) @ Produces ( "application/xml" ) public FrequencyTable frequency ( @ QueryParam ( "q" ) String query , @ QueryParam ( "corpora" ) String rawCorpusNames , @ QueryParam ( "fields" ) String rawFields ) { requiredParameter ( query , "q" , "AnnisQL query" ) ; requiredParameter ( rawCorpusNames , "corpora" , "comma separated list of corpus names" ) ; requiredParameter ( rawFields , "fields" , "Comma seperated list of result vector elements." ) ; Subject user = SecurityUtils . getSubject ( ) ; List < String > corpusNames = splitCorpusNamesFromRaw ( rawCorpusNames ) ; for ( String c : corpusNames ) { user . checkPermission ( "query:matrix:" + c ) ; } QueryData data = queryDataFromParameters ( query , rawCorpusNames ) ; FrequencyTableQuery ext = FrequencyTableQuery . parse ( rawFields ) ; data . addExtension ( ext ) ; long start = new Date ( ) . getTime ( ) ; FrequencyTable freqTable = queryDao . frequency ( data ) ; long end = new Date ( ) . getTime ( ) ; logQuery ( "FREQUENCY" , query , splitCorpusNamesFromRaw ( rawCorpusNames ) , end - start ) ; return freqTable ; } | Frequency analysis . |
39,570 | @ Produces ( "text/plain" ) @ Path ( "check" ) public String check ( @ QueryParam ( "q" ) String query , @ DefaultValue ( "" ) @ QueryParam ( "corpora" ) String rawCorpusNames ) { Subject user = SecurityUtils . getSubject ( ) ; List < String > corpusNames = splitCorpusNamesFromRaw ( rawCorpusNames ) ; for ( String c : corpusNames ) { user . checkPermission ( "query:parse:" + c ) ; } Collections . sort ( corpusNames ) ; List < Long > corpusIDs = queryDao . mapCorpusNamesToIds ( corpusNames ) ; queryDao . parseAQL ( query , corpusIDs ) ; return "ok" ; } | Return true if this is a valid query or throw exception when invalid |
39,571 | @ Path ( "parse/nodes" ) @ Produces ( "application/xml" ) public Response parseNodes ( @ QueryParam ( "q" ) String query , @ DefaultValue ( "" ) @ QueryParam ( "corpora" ) String rawCorpusNames ) { Subject user = SecurityUtils . getSubject ( ) ; List < String > corpusNames = splitCorpusNamesFromRaw ( rawCorpusNames ) ; for ( String c : corpusNames ) { user . checkPermission ( "query:parse:" + c ) ; } Collections . sort ( corpusNames ) ; List < Long > corpusIDs = queryDao . mapCorpusNamesToIds ( corpusNames ) ; QueryData data = queryDao . parseAQL ( query , corpusIDs ) ; List < QueryNode > nodes = new LinkedList < > ( ) ; int i = 0 ; for ( List < QueryNode > alternative : data . getAlternatives ( ) ) { for ( QueryNode n : alternative ) { n . setAlternativeNumber ( i ) ; nodes . add ( n ) ; } i ++ ; } return Response . ok ( new GenericEntity < List < QueryNode > > ( nodes ) { } ) . build ( ) ; } | Return the list of the query nodes if this is a valid query or throw exception when invalid |
39,572 | @ Path ( "corpora/example-queries/" ) @ Produces ( MediaType . APPLICATION_XML ) public List < ExampleQuery > getExampleQueries ( @ QueryParam ( "corpora" ) String rawCorpusNames ) throws WebApplicationException { Subject user = SecurityUtils . getSubject ( ) ; try { String [ ] corpusNames ; if ( rawCorpusNames != null ) { corpusNames = rawCorpusNames . split ( "," ) ; } else { List < AnnisCorpus > allCorpora = queryDao . listCorpora ( ) ; corpusNames = new String [ allCorpora . size ( ) ] ; for ( int i = 0 ; i < corpusNames . length ; i ++ ) { corpusNames [ i ] = allCorpora . get ( i ) . getName ( ) ; } } List < String > allowedCorpora = new ArrayList < > ( ) ; for ( String c : corpusNames ) { if ( user . isPermitted ( "query:*:" + c ) ) { allowedCorpora . add ( c ) ; } } List < Long > corpusIDs = queryDao . mapCorpusNamesToIds ( allowedCorpora ) ; return queryDao . getExampleQueries ( corpusIDs ) ; } catch ( Exception ex ) { log . error ( "Problem accessing example queries" , ex ) ; throw new WebApplicationException ( ex , 500 ) ; } } | Fetches the example queries for a specific corpus . |
39,573 | private void requiredParameter ( String value , String name , String description ) throws WebApplicationException { if ( value == null ) { throw new WebApplicationException ( Response . status ( Response . Status . BAD_REQUEST ) . type ( MediaType . TEXT_PLAIN ) . entity ( "missing required parameter '" + name + "' (" + description + ")" ) . build ( ) ) ; } } | Throw an exception if the parameter is missing . |
39,574 | private List < String > splitCorpusNamesFromRaw ( String rawCorpusNames ) { return new ArrayList < > ( Splitter . on ( "," ) . omitEmptyStrings ( ) . trimResults ( ) . splitToList ( rawCorpusNames ) ) ; } | Splits a list of corpus names into a proper java list . |
39,575 | private int getCorpusConfigIntValues ( String context ) { int value = Integer . parseInt ( defaultCorpusConfig . getConfig ( ) . getProperty ( context ) ) ; if ( value < 0 ) { throw new IllegalStateException ( "the value must be > 0" ) ; } return value ; } | Extract corpus configurations values with numeric values . |
39,576 | @ Path ( "rawtext/{top}/{docname}" ) @ Produces ( MediaType . APPLICATION_XML ) public RawTextWrapper getRawText ( @ PathParam ( "top" ) String top , @ PathParam ( "docname" ) String docname ) { Subject user = SecurityUtils . getSubject ( ) ; user . checkPermission ( "query:raw_text:" + top ) ; RawTextWrapper result = new RawTextWrapper ( ) ; result . setTexts ( queryDao . getRawText ( top , docname ) ) ; return result ; } | Fetches the raw text from the text . tab file . |
39,577 | private Map < Integer , List < Annotation > > splitListAnnotations ( ) { List < Annotation > metadata = Helper . getMetaData ( toplevelCorpusName , documentName ) ; Map < Integer , List < Annotation > > hashMetaData = new HashMap < > ( ) ; if ( metadata != null && ! metadata . isEmpty ( ) ) { if ( documentName != null ) { hashMetaData = new TreeMap < > ( Collections . reverseOrder ( ) ) ; } else { hashMetaData = new TreeMap < > ( ) ; } for ( Annotation metaDatum : metadata ) { int pre = metaDatum . getPre ( ) ; if ( ! hashMetaData . containsKey ( pre ) ) { hashMetaData . put ( pre , new ArrayList < Annotation > ( ) ) ; hashMetaData . get ( pre ) . add ( metaDatum ) ; } else { hashMetaData . get ( pre ) . add ( metaDatum ) ; } } } return hashMetaData ; } | Returns empty map if no metadata are available . |
39,578 | private void addEmptyLabel ( ) { if ( emptyLabel == null ) { emptyLabel = new Label ( "none" ) ; } if ( corpusAnnotationTable != null ) { layout . removeComponent ( corpusAnnotationTable ) ; } layout . addComponent ( emptyLabel ) ; emptyLabel . setSizeUndefined ( ) ; layout . setComponentAlignment ( emptyLabel , Alignment . MIDDLE_CENTER ) ; layout . setExpandRatio ( emptyLabel , 1.0f ) ; } | Places a label in the middle center of the corpus browser panel . |
39,579 | void setDocNames ( List < Annotation > docs ) { container = new IndexedContainer ( ) ; container . addContainerProperty ( PROP_DOC_NAME , String . class , "n/a" ) ; MetaColumns metaCols = generateMetaColumns ( ) ; for ( MetaDataCol metaDatum : metaCols . visibleColumns ) { container . addContainerProperty ( metaDatum . getColName ( ) , String . class , "n/a" ) ; } for ( MetaDataCol metaDatum : metaCols . sortColumns ) { container . addContainerProperty ( metaDatum . getColName ( ) , String . class , "n/a" ) ; } container . addContainerProperty ( "corpus path" , String . class , "n/a" ) ; container . addContainerProperty ( "info" , Button . class , null ) ; container . addContainerProperty ( "visualizer" , Panel . class , null ) ; for ( Annotation a : docs ) { String doc = a . getName ( ) ; List < String > pathList = a . getAnnotationPath ( ) ; if ( pathList == null ) { pathList = new LinkedList < > ( ) ; } Collections . reverse ( pathList ) ; String path = StringUtils . join ( pathList , " > " ) ; Item row = container . addItem ( path ) ; if ( row != null ) { row . getItemProperty ( PROP_DOC_NAME ) . setValue ( doc ) ; for ( MetaDataCol metaDataCol : metaCols . visibleColumns ) { String value = generateCell ( a . getAnnotationPath ( ) , metaDataCol ) ; row . getItemProperty ( metaDataCol . getColName ( ) ) . setValue ( value ) ; } for ( MetaDataCol metaDataCol : metaCols . sortColumns ) { if ( ! metaCols . visibleColumns . contains ( metaDataCol ) ) { String value = generateCell ( a . getAnnotationPath ( ) , metaDataCol ) ; row . getItemProperty ( metaDataCol . getColName ( ) ) . setValue ( value ) ; } } row . getItemProperty ( "corpus path" ) . setValue ( path ) ; row . getItemProperty ( "visualizer" ) . setValue ( generateVisualizerLinks ( doc ) ) ; row . getItemProperty ( "info" ) . setValue ( generateInfoButtonCell ( doc ) ) ; } } setContainerDataSource ( container ) ; Object [ ] metaDataColNames = new Object [ metaCols . visibleColumns . size ( ) ] ; for ( int i = 0 ; i < metaDataColNames . length ; i ++ ) { metaDataColNames [ i ] = metaCols . visibleColumns . get ( i ) . getColName ( ) ; } Object [ ] columnNames = ArrayUtils . addAll ( ArrayUtils . addAll ( new Object [ ] { "document name" } , metaDataColNames ) , new Object [ ] { "corpus path" , "visualizer" , "info" } ) ; setVisibleColumns ( columnNames ) ; for ( Object colName : columnNames ) { setColumnHeader ( ( String ) colName , ( String ) colName ) ; } sortByMetaData ( metaCols . sortColumns ) ; } | Updates the table with docnames and generate the additional columns defined by the user . |
39,580 | private void sortByMetaData ( List < MetaDataCol > sortColumns ) { if ( sortColumns == null || sortColumns . isEmpty ( ) ) { sort ( new Object [ ] { PROP_DOC_NAME } , new boolean [ ] { true } ) ; return ; } Object [ ] sortByColumns = new Object [ sortColumns . size ( ) ] ; boolean [ ] ascendingOrDescending = new boolean [ sortColumns . size ( ) ] ; for ( int i = 0 ; i < sortColumns . size ( ) ; i ++ ) { sortByColumns [ i ] = sortColumns . get ( i ) . getColName ( ) ; ascendingOrDescending [ i ] = sortColumns . get ( i ) . ascending ; } sort ( sortByColumns , ascendingOrDescending ) ; } | Sort the table by a given config . The config includes metadata keys and the table is sorted lexicographically by their values . If not config for sorting is determined the document name is used for sorting . |
39,581 | private List < Annotation > getDocMetaData ( String document ) { if ( ! docMetaDataCache . containsKey ( docBrowserPanel . getCorpus ( ) ) ) { WebResource res = Helper . getAnnisWebResource ( ) ; res = res . path ( "meta/corpus/" ) . path ( urlPathEscape . escape ( docBrowserPanel . getCorpus ( ) ) ) . path ( "closure" ) ; Map < String , List < Annotation > > metaDataMap = new HashMap < > ( ) ; for ( Annotation a : res . get ( new Helper . AnnotationListType ( ) ) ) { if ( a . getAnnotationPath ( ) != null && ! a . getAnnotationPath ( ) . isEmpty ( ) && a . getType ( ) . equals ( "DOCUMENT" ) ) { String docName = a . getAnnotationPath ( ) . get ( 0 ) ; if ( ! metaDataMap . containsKey ( docName ) ) { metaDataMap . put ( docName , new ArrayList < Annotation > ( ) ) ; } metaDataMap . get ( docName ) . add ( a ) ; } } docMetaDataCache . put ( docBrowserPanel . getCorpus ( ) , metaDataMap ) ; } if ( docMetaDataCache . get ( docBrowserPanel . getCorpus ( ) ) . containsKey ( document ) ) { return docMetaDataCache . get ( docBrowserPanel . getCorpus ( ) ) . get ( document ) ; } else { return new ArrayList < Annotation > ( ) ; } } | Retrieves date from the cache or from the annis rest service for a specific document . |
39,582 | public boolean addEvent ( GridEvent e ) { BitSet eventOccupance = new BitSet ( e . getRight ( ) ) ; eventOccupance . set ( e . getLeft ( ) , e . getRight ( ) + 1 , true ) ; if ( occupancySet . intersects ( eventOccupance ) ) { return false ; } occupancySet . or ( eventOccupance ) ; events . add ( e ) ; if ( e . getTextID ( ) != null && ! e . getTextID ( ) . isEmpty ( ) ) { textIDs . add ( e . getTextID ( ) ) ; } return true ; } | Adds an event to this row |
39,583 | public void updateFragment ( DisplayedResultQuery q ) { List < String > args = Helper . citationFragment ( q . getQuery ( ) , q . getCorpora ( ) , q . getLeftContext ( ) , q . getRightContext ( ) , q . getSegmentation ( ) , q . getBaseText ( ) , q . getOffset ( ) , q . getLimit ( ) , q . getOrder ( ) , q . getSelectedMatches ( ) ) ; lastEvaluatedFragment = StringUtils . join ( args , "&" ) ; UI . getCurrent ( ) . getPage ( ) . setUriFragment ( lastEvaluatedFragment , false ) ; Page . getCurrent ( ) . setTitle ( ui . getInstanceConfig ( ) . getInstanceDisplayName ( ) + " (ANNIS Corpus Search)" ) ; } | Updates the browser address bar with the current query parameters and the query itself . |
39,584 | public void updateFragementWithSelectedCorpus ( Set < String > corpora ) { if ( corpora != null && ! corpora . isEmpty ( ) ) { String fragment = "_c=" + Helper . encodeBase64URL ( StringUtils . join ( corpora , "," ) ) ; UI . getCurrent ( ) . getPage ( ) . setUriFragment ( fragment ) ; } else { UI . getCurrent ( ) . getPage ( ) . setUriFragment ( "" ) ; } } | Adds the _c fragement to the URL in the browser adress bar when a corpus is selected . |
39,585 | public void reset ( ) { try { if ( rs . getType ( ) == ResultSet . TYPE_FORWARD_ONLY ) { throw new UnsupportedOperationException ( "Can not reset iterator for a ResultSet that is of type \"forward only\"" ) ; } hasNext = rs . first ( ) ; } catch ( SQLException ex ) { log . error ( null , ex ) ; } } | Returns to the beginning of the iteration . |
39,586 | public FrequencyTableEntry toFrequencyTableEntry ( ) { FrequencyTableEntry result = new FrequencyTableEntry ( ) ; result . setReferencedNode ( nr ) ; if ( annotation != null && "tok" . equals ( annotation ) ) { result . setType ( FrequencyTableEntryType . span ) ; } else { result . setType ( FrequencyTableEntryType . annotation ) ; result . setKey ( annotation ) ; } return result ; } | Converts this object to a proper definition . |
39,587 | public static Response map ( SQLException sqlEx ) { if ( null != sqlEx . getSQLState ( ) ) { switch ( sqlEx . getSQLState ( ) ) { case "57014" : return Response . status ( 504 ) . entity ( sqlEx . getMessage ( ) ) . build ( ) ; case "2201B" : AqlParseError error = new AqlParseError ( sqlEx . getMessage ( ) ) ; return Response . status ( Response . Status . BAD_REQUEST ) . entity ( new GenericEntity < List < AqlParseError > > ( Arrays . asList ( error ) ) { } ) . type ( "application/xml" ) . build ( ) ; } } return null ; } | Maps an exception to a response or returns null if it wasn t handled |
39,588 | private void convertSaltProject ( SaltProject p , List < String > annoKeys , Map < String , String > args , boolean alignmc , int offset , Map < String , CorpusConfig > corpusConfigs , Writer out , Integer nodeCount ) throws IOException , IllegalArgumentException { int recordNumber = offset ; if ( p != null && p . getCorpusGraphs ( ) != null ) { Map < String , String > spanAnno2order = null ; boolean virtualTokenizationFromNamespace = false ; Set < String > corpusNames = CommonHelper . getToplevelCorpusNames ( p ) ; if ( ! corpusNames . isEmpty ( ) ) { CorpusConfig config = corpusConfigs . get ( corpusNames . iterator ( ) . next ( ) ) ; if ( config != null ) { if ( "true" . equalsIgnoreCase ( config . getConfig ( "virtual_tokenization_from_namespace" ) ) ) { virtualTokenizationFromNamespace = true ; } else { String mappingRaw = config . getConfig ( "virtual_tokenization_mapping" ) ; if ( mappingRaw != null ) { spanAnno2order = new HashMap < > ( ) ; for ( String singleMapping : Splitter . on ( ',' ) . split ( mappingRaw ) ) { List < String > mappingParts = Splitter . on ( '=' ) . splitToList ( singleMapping ) ; if ( mappingParts . size ( ) >= 2 ) { spanAnno2order . put ( mappingParts . get ( 0 ) , mappingParts . get ( 1 ) ) ; } } } } } } for ( SCorpusGraph corpusGraph : p . getCorpusGraphs ( ) ) { if ( corpusGraph . getDocuments ( ) != null ) { for ( SDocument doc : corpusGraph . getDocuments ( ) ) { if ( virtualTokenizationFromNamespace ) { TimelineReconstructor . removeVirtualTokenizationUsingNamespace ( doc . getDocumentGraph ( ) ) ; } else if ( spanAnno2order != null ) { TimelineReconstructor . removeVirtualTokenization ( doc . getDocumentGraph ( ) , spanAnno2order ) ; } if ( nodeCount != null ) { createAdjacencyMatrix ( doc . getDocumentGraph ( ) , args , recordNumber ++ , nodeCount ) ; } else { outputText ( doc . getDocumentGraph ( ) , alignmc , recordNumber ++ , out ) ; } } } } } } | invokes the createAdjacencyMatrix method if nodeCount ! = null or outputText otherwise |
39,589 | public void reportServiceException ( UniformInterfaceException ex , boolean showNotification ) { QueryPanel qp = searchView . getControlPanel ( ) . getQueryPanel ( ) ; String caption = null ; String description = null ; if ( ! AnnisBaseUI . handleCommonError ( ex , "execute query" ) ) { switch ( ex . getResponse ( ) . getStatus ( ) ) { case 400 : List < AqlParseError > errors = ex . getResponse ( ) . getEntity ( new GenericType < List < AqlParseError > > ( ) { } ) ; caption = "Parsing error" ; description = Joiner . on ( "\n" ) . join ( errors ) ; qp . setStatus ( description ) ; qp . setErrors ( errors ) ; break ; case 504 : caption = "Timeout" ; description = "Query execution took too long." ; qp . setStatus ( caption + ": " + description ) ; break ; case 403 : if ( Helper . getUser ( ) == null ) { qp . setStatus ( "You don't have the access rights to query this corpus. " + "You might want to login to access more corpora." ) ; searchView . getMainToolbar ( ) . showLoginWindow ( true ) ; } else { caption = "You don't have the access rights to query this corpus. " + "You might want to login as another user to access more corpora." ; qp . setStatus ( caption ) ; } break ; default : log . error ( "Exception when communicating with service" , ex ) ; qp . setStatus ( "Unexpected exception: " + ex . getMessage ( ) ) ; ExceptionDialog . show ( ex , "Exception when communicating with service." ) ; break ; } if ( showNotification && caption != null ) { Notification . show ( caption , description , Notification . Type . WARNING_MESSAGE ) ; } } } | Show errors that occured during the execution of a query to the user . |
39,590 | private static < T > void setIfNew ( Property < T > prop , T newValue ) { if ( ! Objects . equals ( prop . getValue ( ) , newValue ) ) { prop . setValue ( newValue ) ; } } | Only changes the value of the property if it is not equals to the old one . |
39,591 | public ExportQuery getExportQuery ( ) { return QueryGenerator . export ( ) . query ( state . getAql ( ) . getValue ( ) ) . corpora ( state . getSelectedCorpora ( ) . getValue ( ) ) . left ( state . getLeftContext ( ) . getValue ( ) ) . right ( state . getRightContext ( ) . getValue ( ) ) . segmentation ( state . getVisibleBaseText ( ) . getValue ( ) ) . exporter ( state . getExporter ( ) . getValue ( ) ) . annotations ( state . getExportAnnotationKeys ( ) . getValue ( ) ) . param ( state . getExportParameters ( ) . getValue ( ) ) . alignmc ( state . getAlignmc ( ) . getValue ( ) ) . build ( ) ; } | Get the current query as it is defined by the UI controls . |
39,592 | private void cancelSearch ( ) { searchView . getControlPanel ( ) . getQueryPanel ( ) . setCountIndicatorEnabled ( false ) ; Map < QueryUIState . QueryType , Future < ? > > exec = state . getExecutedTasks ( ) ; if ( exec . containsKey ( QueryUIState . QueryType . COUNT ) && ! exec . get ( QueryUIState . QueryType . COUNT ) . isDone ( ) ) { exec . get ( QueryUIState . QueryType . COUNT ) . cancel ( true ) ; } if ( exec . containsKey ( QueryUIState . QueryType . FIND ) && ! exec . get ( QueryUIState . QueryType . FIND ) . isDone ( ) ) { exec . get ( QueryUIState . QueryType . FIND ) . cancel ( true ) ; } exec . remove ( QueryUIState . QueryType . COUNT ) ; exec . remove ( QueryUIState . QueryType . FIND ) ; } | Cancel queries from the client side . |
39,593 | public void addHistoryEntry ( Query q ) { try { Query queryCopy = q . clone ( ) ; state . getHistory ( ) . removeItem ( queryCopy ) ; state . getHistory ( ) . addItemAt ( 0 , queryCopy ) ; searchView . getControlPanel ( ) . getQueryPanel ( ) . updateShortHistory ( ) ; } catch ( CloneNotSupportedException ex ) { log . error ( "Can't clone the query" , ex ) ; } } | Adds a history entry to the history panel . |
39,594 | private void setReferent ( SNode n , long index , int value ) { if ( n instanceof SToken ) { SToken tok = ( SToken ) n ; if ( ! referentOfToken . containsKey ( tok . getId ( ) ) ) { HashMap < Long , Integer > newlist = new HashMap < Long , Integer > ( ) ; newlist . put ( index , value ) ; referentOfToken . put ( tok . getId ( ) , newlist ) ; } else { referentOfToken . get ( tok . getId ( ) ) . put ( globalIndex , value ) ; } } else { List < SRelation < SNode , SNode > > outEdges = n . getGraph ( ) . getOutRelations ( n . getId ( ) ) ; if ( outEdges != null ) { for ( SRelation < ? extends SNode , ? extends SNode > edge : outEdges ) { if ( ! ( edge instanceof SPointingRelation ) ) { if ( edge . getSource ( ) != null && edge . getTarget ( ) != null ) { setReferent ( edge . getTarget ( ) , index , value ) ; } } } } } } | adds a Referent for all Nodes dominated or covered by outgoing Edges of AnnisNode a |
39,595 | private List < String > searchTokens ( SNode n , long cnr ) { List < String > result = new LinkedList < String > ( ) ; if ( n instanceof SToken ) { result . add ( n . getId ( ) ) ; if ( componentOfToken . get ( n . getId ( ) ) == null ) { List < Long > newlist = new LinkedList < Long > ( ) ; newlist . add ( cnr ) ; componentOfToken . put ( n . getId ( ) , newlist ) ; } else { List < Long > newlist = componentOfToken . get ( n . getId ( ) ) ; if ( ! newlist . contains ( cnr ) ) { newlist . add ( cnr ) ; } } } else { List < SRelation < SNode , SNode > > outgoing = n . getGraph ( ) . getOutRelations ( n . getId ( ) ) ; if ( outgoing != null ) { for ( SRelation < ? extends SNode , ? extends SNode > e : outgoing ) { if ( ! ( e instanceof SPointingRelation ) && e . getSource ( ) instanceof SNode && e . getTarget ( ) instanceof SNode ) { List < String > Med = searchTokens ( ( SNode ) e . getTarget ( ) , cnr ) ; for ( String s : Med ) { if ( ! result . contains ( s ) ) { result . add ( s ) ; } } } } } } return result ; } | Collects all Token dominated or covered by all outgoing Edges of AnnisNode a |
39,596 | private String getAnnotations ( String id , long component ) { String result = "" ; String incoming = "" , outgoing = "" ; int nri = 1 , nro = 1 ; if ( referentOfToken . get ( id ) != null ) { for ( long l : referentOfToken . get ( id ) . keySet ( ) ) { if ( referentList . get ( ( int ) l ) != null && referentList . get ( ( int ) l ) . component == component && referentList . get ( ( int ) l ) . annotations != null && referentList . get ( ( int ) l ) . annotations . size ( ) > 0 ) { int num = referentOfToken . get ( id ) . get ( l ) ; if ( num == 0 || num == 2 ) { for ( SerializableAnnotation an : referentList . get ( ( int ) l ) . annotations ) { if ( nri == 1 ) { incoming = ", <b>incoming Annotations</b>: " + an . getName ( ) + "=" + an . getValue ( ) ; nri -- ; } else { incoming += ", " + an . getName ( ) + "=" + an . getValue ( ) ; } } } if ( num == 1 || num == 2 ) { for ( SerializableAnnotation an : referentList . get ( ( int ) ( long ) l ) . annotations ) { if ( nro == 1 ) { outgoing = ", <b>outgoing Annotations</b>: " + an . getName ( ) + "=" + an . getValue ( ) ; nro -- ; } else { outgoing += ", " + an . getName ( ) + "=" + an . getValue ( ) ; } } } } } } if ( nro < 1 ) { result += outgoing ; } if ( nri < 1 ) { result += incoming ; } return result ; } | Collects fitting annotations of an Token |
39,597 | private boolean connectionOf ( String pre , String now , long currentComponent ) { List < Long > prel = new LinkedList < Long > ( ) , nowl = new LinkedList < Long > ( ) ; if ( ! pre . equals ( now ) && referentOfToken . get ( pre ) != null && referentOfToken . get ( now ) != null ) { for ( long l : referentOfToken . get ( pre ) . keySet ( ) ) { if ( referentList . get ( ( int ) l ) != null && referentList . get ( ( int ) l ) . component == currentComponent && referentOfToken . get ( pre ) . get ( l ) . equals ( 0 ) ) { prel . add ( l ) ; } } for ( long l : referentOfToken . get ( now ) . keySet ( ) ) { if ( referentList . get ( ( int ) l ) != null && referentList . get ( ( int ) l ) . component == currentComponent && referentOfToken . get ( now ) . get ( l ) . equals ( 0 ) ) { nowl . add ( l ) ; } } for ( long l : nowl ) { if ( prel . contains ( l ) ) { return true ; } } } prel = new LinkedList < Long > ( ) ; nowl = new LinkedList < Long > ( ) ; if ( ! pre . equals ( now ) && referentOfToken . get ( pre ) != null && referentOfToken . get ( now ) != null ) { for ( long l : referentOfToken . get ( pre ) . keySet ( ) ) { if ( referentList . get ( ( int ) l ) != null && referentList . get ( ( int ) l ) . component == currentComponent && referentOfToken . get ( pre ) . get ( l ) . equals ( 1 ) ) { prel . add ( l ) ; } } for ( long l : referentOfToken . get ( now ) . keySet ( ) ) { if ( referentList . get ( ( int ) l ) != null && referentList . get ( ( int ) l ) . component == currentComponent && referentOfToken . get ( now ) . get ( l ) . equals ( 1 ) ) { nowl . add ( l ) ; } } for ( long l : nowl ) { if ( prel . contains ( l ) ) { return true ; } } } return false ; } | Calculates wheather a line determinded by its component should be discontinous |
39,598 | private int getNewColor ( int i ) { int r = ( ( ( i ) * 224 ) % 255 ) ; int g = ( ( ( i + 197 ) * 1034345 ) % 255 ) ; int b = ( ( ( i + 23 ) * 74353 ) % 255 ) ; if ( ( ( r + b + g ) / 3 ) < 100 ) { r = 255 - r ; g = 255 - g ; b = 255 - b ; } else if ( ( ( r + b + g ) / 3 ) > 192 ) { r = 1 * ( r / 2 ) ; g = 1 * ( g / 2 ) ; b = 1 * ( b / 2 ) ; } if ( r == 255 && g == 255 && b == 255 ) { r = 255 ; g = 255 ; b = 0 ; } return ( r * 65536 + g * 256 + b ) ; } | Returns a unique color - value for a given number |
39,599 | @ Produces ( value = "text/plain" ) public String addNewID ( String str ) { Subject user = SecurityUtils . getSubject ( ) ; String remoteIP = request . getRemoteAddr ( ) . replaceAll ( "[.:]" , "_" ) ; user . checkPermission ( "shortener:create:" + remoteIP ) ; return shortenerDao . shorten ( str , "" + user . getPrincipal ( ) ) . toString ( ) ; } | Takes a URI and returns an ID . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.