code
stringlengths
73
34.1k
label
stringclasses
1 value
private void countPropertyMain(UsageStatistics usageStatistics, PropertyIdValue property, int count) { addPropertyCounters(usageStatistics, property); usageStatistics.propertyCountsMain.put(property, usageStatistics.propertyCountsMain.get(property) + count); }
java
private void addPropertyCounters(UsageStatistics usageStatistics, PropertyIdValue property) { if (!usageStatistics.propertyCountsMain.containsKey(property)) { usageStatistics.propertyCountsMain.put(property, 0); usageStatistics.propertyCountsQualifier.put(property, 0); usageStatistics.propertyCountsReferences.put(property, 0); } }
java
private void countKey(Map<String, Integer> map, String key, int count) { if (map.containsKey(key)) { map.put(key, map.get(key) + count); } else { map.put(key, count); } }
java
public void addSite(String siteKey) { ValueMap gv = new ValueMap(siteKey); if (!this.valueMaps.contains(gv)) { this.valueMaps.add(gv); } }
java
private void countCoordinateStatement(Statement statement, ItemDocument itemDocument) { Value value = statement.getValue(); if (!(value instanceof GlobeCoordinatesValue)) { return; } GlobeCoordinatesValue coordsValue = (GlobeCoordinatesValue) value; if (!this.globe.equals((coordsValue.getGlobe()))) { return; } int xCoord = (int) (((coordsValue.getLongitude() + 180.0) / 360.0) * this.width) % this.width; int yCoord = (int) (((coordsValue.getLatitude() + 90.0) / 180.0) * this.height) % this.height; if (xCoord < 0 || yCoord < 0 || xCoord >= this.width || yCoord >= this.height) { System.out.println("Dropping out-of-range coordinate: " + coordsValue); return; } countCoordinates(xCoord, yCoord, itemDocument); this.count += 1; if (this.count % 100000 == 0) { reportProgress(); writeImages(); } }
java
private void countCoordinates(int xCoord, int yCoord, ItemDocument itemDocument) { for (String siteKey : itemDocument.getSiteLinks().keySet()) { Integer count = this.siteCounts.get(siteKey); if (count == null) { this.siteCounts.put(siteKey, 1); } else { this.siteCounts.put(siteKey, count + 1); } } for (ValueMap vm : this.valueMaps) { vm.countCoordinates(xCoord, yCoord, itemDocument); } }
java
private void writeImages() { for (ValueMap gv : this.valueMaps) { gv.writeImage(); } try (PrintStream out = new PrintStream( ExampleHelpers.openExampleFileOuputStream("map-site-count.csv"))) { out.println("Site key,Number of geo items"); out.println("wikidata total," + this.count); for (Entry<String, Integer> entry : this.siteCounts.entrySet()) { out.println(entry.getKey() + "," + entry.getValue()); } } catch (IOException e) { e.printStackTrace(); } }
java
private int getColor(int value) { if (value == 0) { return 0; } double scale = Math.log10(value) / Math.log10(this.topValue); double lengthScale = Math.min(1.0, scale) * (colors.length - 1); int index = 1 + (int) lengthScale; if (index == colors.length) { index--; } double partScale = lengthScale - (index - 1); int r = (int) (colors[index - 1][0] + partScale * (colors[index][0] - colors[index - 1][0])); int g = (int) (colors[index - 1][1] + partScale * (colors[index][1] - colors[index - 1][1])); int b = (int) (colors[index - 1][2] + partScale * (colors[index][2] - colors[index - 1][2])); r = Math.min(255, r); b = Math.min(255, b); g = Math.min(255, g); return (r << 16) | (g << 8) | b; }
java
public ReferenceBuilder withPropertyValue(PropertyIdValue propertyIdValue, Value value) { getSnakList(propertyIdValue).add( factory.getValueSnak(propertyIdValue, value)); return getThis(); }
java
private static void close(Closeable closeable) { if (closeable != null) { try { closeable.close(); } catch (IOException ignored) { logger.error("Failed to close output stream: " + ignored.getMessage()); } } }
java
public static void configureLogging() { // Create the appender that will write log messages to the console. ConsoleAppender consoleAppender = new ConsoleAppender(); // Define the pattern of log messages. // Insert the string "%c{1}:%L" to also show class name and line. String pattern = "%d{yyyy-MM-dd HH:mm:ss} %-5p - %m%n"; consoleAppender.setLayout(new PatternLayout(pattern)); // Change to Level.ERROR for fewer messages: consoleAppender.setThreshold(Level.INFO); consoleAppender.activateOptions(); Logger.getRootLogger().addAppender(consoleAppender); }
java
public static void processEntitiesFromWikidataDump( EntityDocumentProcessor entityDocumentProcessor) { // Controller object for processing dumps: DumpProcessingController dumpProcessingController = new DumpProcessingController( "wikidatawiki"); dumpProcessingController.setOfflineMode(OFFLINE_MODE); // // Optional: Use another download directory: // dumpProcessingController.setDownloadDirectory(System.getProperty("user.dir")); // Should we process historic revisions or only current ones? boolean onlyCurrentRevisions; switch (DUMP_FILE_MODE) { case ALL_REVS: case ALL_REVS_WITH_DAILIES: onlyCurrentRevisions = false; break; case CURRENT_REVS: case CURRENT_REVS_WITH_DAILIES: case JSON: case JUST_ONE_DAILY_FOR_TEST: default: onlyCurrentRevisions = true; } // Subscribe to the most recent entity documents of type wikibase item: dumpProcessingController.registerEntityDocumentProcessor( entityDocumentProcessor, null, onlyCurrentRevisions); // Also add a timer that reports some basic progress information: EntityTimerProcessor entityTimerProcessor = new EntityTimerProcessor( TIMEOUT_SEC); dumpProcessingController.registerEntityDocumentProcessor( entityTimerProcessor, null, onlyCurrentRevisions); MwDumpFile dumpFile = null; try { // Start processing (may trigger downloads where needed): switch (DUMP_FILE_MODE) { case ALL_REVS: case CURRENT_REVS: dumpFile = dumpProcessingController .getMostRecentDump(DumpContentType.FULL); break; case ALL_REVS_WITH_DAILIES: case CURRENT_REVS_WITH_DAILIES: MwDumpFile fullDumpFile = dumpProcessingController .getMostRecentDump(DumpContentType.FULL); MwDumpFile incrDumpFile = dumpProcessingController .getMostRecentDump(DumpContentType.DAILY); lastDumpFileName = fullDumpFile.getProjectName() + "-" + incrDumpFile.getDateStamp() + "." + fullDumpFile.getDateStamp(); dumpProcessingController.processAllRecentRevisionDumps(); break; case JSON: dumpFile = dumpProcessingController .getMostRecentDump(DumpContentType.JSON); break; case JUST_ONE_DAILY_FOR_TEST: dumpFile = dumpProcessingController .getMostRecentDump(DumpContentType.DAILY); break; default: throw new RuntimeException("Unsupported dump processing type " + DUMP_FILE_MODE); } if (dumpFile != null) { lastDumpFileName = dumpFile.getProjectName() + "-" + dumpFile.getDateStamp(); dumpProcessingController.processDump(dumpFile); } } catch (TimeoutException e) { // The timer caused a time out. Continue and finish normally. } // Print final timer results: entityTimerProcessor.close(); }
java
void addValue(V value, Resource resource) { this.valueQueue.add(value); this.valueSubjectQueue.add(resource); }
java
protected void processAliases(List<MonolingualTextValue> addAliases, List<MonolingualTextValue> deleteAliases) { for(MonolingualTextValue val : addAliases) { addAlias(val); } for(MonolingualTextValue val : deleteAliases) { deleteAlias(val); } }
java
protected void deleteAlias(MonolingualTextValue alias) { String lang = alias.getLanguageCode(); AliasesWithUpdate currentAliases = newAliases.get(lang); if (currentAliases != null) { currentAliases.aliases.remove(alias); currentAliases.deleted.add(alias); currentAliases.write = true; } }
java
protected void addAlias(MonolingualTextValue alias) { String lang = alias.getLanguageCode(); AliasesWithUpdate currentAliasesUpdate = newAliases.get(lang); NameWithUpdate currentLabel = newLabels.get(lang); // If there isn't any label for that language, put the alias there if (currentLabel == null) { newLabels.put(lang, new NameWithUpdate(alias, true)); // If the new alias is equal to the current label, skip it } else if (!currentLabel.value.equals(alias)) { if (currentAliasesUpdate == null) { currentAliasesUpdate = new AliasesWithUpdate(new ArrayList<MonolingualTextValue>(), true); } List<MonolingualTextValue> currentAliases = currentAliasesUpdate.aliases; if(!currentAliases.contains(alias)) { currentAliases.add(alias); currentAliasesUpdate.added.add(alias); currentAliasesUpdate.write = true; } newAliases.put(lang, currentAliasesUpdate); } }
java
protected void processDescriptions(List<MonolingualTextValue> descriptions) { for(MonolingualTextValue description : descriptions) { NameWithUpdate currentValue = newDescriptions.get(description.getLanguageCode()); // only mark the description as added if the value we are writing is different from the current one if (currentValue == null || !currentValue.value.equals(description)) { newDescriptions.put(description.getLanguageCode(), new NameWithUpdate(description, true)); } } }
java
protected void processLabels(List<MonolingualTextValue> labels) { for(MonolingualTextValue label : labels) { String lang = label.getLanguageCode(); NameWithUpdate currentValue = newLabels.get(lang); if (currentValue == null || !currentValue.value.equals(label)) { newLabels.put(lang, new NameWithUpdate(label, true)); // Delete any alias that matches the new label AliasesWithUpdate currentAliases = newAliases.get(lang); if (currentAliases != null && currentAliases.aliases.contains(label)) { deleteAlias(label); } } } }
java
@JsonProperty("labels") @JsonInclude(Include.NON_EMPTY) public Map<String, TermImpl> getLabelUpdates() { return getMonolingualUpdatedValues(newLabels); }
java
@JsonProperty("descriptions") @JsonInclude(Include.NON_EMPTY) public Map<String, TermImpl> getDescriptionUpdates() { return getMonolingualUpdatedValues(newDescriptions); }
java
@JsonProperty("aliases") @JsonInclude(Include.NON_EMPTY) public Map<String, List<TermImpl>> getAliasUpdates() { Map<String, List<TermImpl>> updatedValues = new HashMap<>(); for(Map.Entry<String,AliasesWithUpdate> entry : newAliases.entrySet()) { AliasesWithUpdate update = entry.getValue(); if (!update.write) { continue; } List<TermImpl> convertedAliases = new ArrayList<>(); for(MonolingualTextValue alias : update.aliases) { convertedAliases.add(monolingualToJackson(alias)); } updatedValues.put(entry.getKey(), convertedAliases); } return updatedValues; }
java
protected Map<String, TermImpl> getMonolingualUpdatedValues(Map<String, NameWithUpdate> updates) { Map<String, TermImpl> updatedValues = new HashMap<>(); for(NameWithUpdate update : updates.values()) { if (!update.write) { continue; } updatedValues.put(update.value.getLanguageCode(), monolingualToJackson(update.value)); } return updatedValues; }
java
protected RdfSerializer createRdfSerializer() throws IOException { String outputDestinationFinal; if (this.outputDestination != null) { outputDestinationFinal = this.outputDestination; } else { outputDestinationFinal = "{PROJECT}" + this.taskName + "{DATE}" + ".nt"; } OutputStream exportOutputStream = getOutputStream(this.useStdOut, insertDumpInformation(outputDestinationFinal), this.compressionType); RdfSerializer serializer = new RdfSerializer(RDFFormat.NTRIPLES, exportOutputStream, this.sites, PropertyRegister.getWikidataPropertyRegister()); serializer.setTasks(this.tasks); return serializer; }
java
private void setTasks(String tasks) { for (String task : tasks.split(",")) { if (KNOWN_TASKS.containsKey(task)) { this.tasks |= KNOWN_TASKS.get(task); this.taskName += (this.taskName.isEmpty() ? "" : "-") + task; } else { logger.warn("Unsupported RDF serialization task \"" + task + "\". Run without specifying any tasks for help."); } } }
java
void resizeArray(int newArraySize) { long[] newArray = new long[newArraySize]; System.arraycopy(this.arrayOfBits, 0, newArray, 0, Math.min(this.arrayOfBits.length, newArraySize)); this.arrayOfBits = newArray; }
java
public ItemDocument updateStatements(ItemIdValue itemIdValue, List<Statement> addStatements, List<Statement> deleteStatements, String summary) throws MediaWikiApiErrorException, IOException { ItemDocument currentDocument = (ItemDocument) this.wikibaseDataFetcher .getEntityDocument(itemIdValue.getId()); return updateStatements(currentDocument, addStatements, deleteStatements, summary); }
java
public ItemDocument updateTermsStatements(ItemIdValue itemIdValue, List<MonolingualTextValue> addLabels, List<MonolingualTextValue> addDescriptions, List<MonolingualTextValue> addAliases, List<MonolingualTextValue> deleteAliases, List<Statement> addStatements, List<Statement> deleteStatements, String summary) throws MediaWikiApiErrorException, IOException { ItemDocument currentDocument = (ItemDocument) this.wikibaseDataFetcher .getEntityDocument(itemIdValue.getId()); return updateTermsStatements(currentDocument, addLabels, addDescriptions, addAliases, deleteAliases, addStatements, deleteStatements, summary); }
java
@SuppressWarnings("unchecked") public <T extends TermedStatementDocument> T updateTermsStatements(T currentDocument, List<MonolingualTextValue> addLabels, List<MonolingualTextValue> addDescriptions, List<MonolingualTextValue> addAliases, List<MonolingualTextValue> deleteAliases, List<Statement> addStatements, List<Statement> deleteStatements, String summary) throws MediaWikiApiErrorException, IOException { TermStatementUpdate termStatementUpdate = new TermStatementUpdate( currentDocument, addStatements, deleteStatements, addLabels, addDescriptions, addAliases, deleteAliases); termStatementUpdate.setGuidGenerator(guidGenerator); return (T) termStatementUpdate.performEdit(wbEditingAction, editAsBot, summary); }
java
public <T extends StatementDocument> void nullEdit(ItemIdValue itemId) throws IOException, MediaWikiApiErrorException { ItemDocument currentDocument = (ItemDocument) this.wikibaseDataFetcher .getEntityDocument(itemId.getId()); nullEdit(currentDocument); }
java
public <T extends StatementDocument> void nullEdit(PropertyIdValue propertyId) throws IOException, MediaWikiApiErrorException { PropertyDocument currentDocument = (PropertyDocument) this.wikibaseDataFetcher .getEntityDocument(propertyId.getId()); nullEdit(currentDocument); }
java
@SuppressWarnings("unchecked") public <T extends StatementDocument> T nullEdit(T currentDocument) throws IOException, MediaWikiApiErrorException { StatementUpdate statementUpdate = new StatementUpdate(currentDocument, Collections.<Statement>emptyList(), Collections.<Statement>emptyList()); statementUpdate.setGuidGenerator(guidGenerator); return (T) this.wbEditingAction.wbEditEntity(currentDocument .getEntityId().getId(), null, null, null, statementUpdate .getJsonUpdateString(), false, this.editAsBot, currentDocument .getRevisionId(), null); }
java
public static void main(String[] args) throws IOException { ExampleHelpers.configureLogging(); JsonSerializationProcessor.printDocumentation(); JsonSerializationProcessor jsonSerializationProcessor = new JsonSerializationProcessor(); ExampleHelpers.processEntitiesFromWikidataDump(jsonSerializationProcessor); jsonSerializationProcessor.close(); }
java
public void close() throws IOException { System.out.println("Serialized " + this.jsonSerializer.getEntityDocumentCount() + " item documents to JSON file " + OUTPUT_FILE_NAME + "."); this.jsonSerializer.close(); }
java
private boolean includeDocument(ItemDocument itemDocument) { for (StatementGroup sg : itemDocument.getStatementGroups()) { // "P19" is "place of birth" on Wikidata if (!"P19".equals(sg.getProperty().getId())) { continue; } for (Statement s : sg) { if (s.getMainSnak() instanceof ValueSnak) { Value v = s.getValue(); // "Q1731" is "Dresden" on Wikidata if (v instanceof ItemIdValue && "Q1731".equals(((ItemIdValue) v).getId())) { return true; } } } } return false; }
java
public static String insertDumpInformation(String pattern, String dateStamp, String project) { if (pattern == null) { return null; } else { return pattern.replace("{DATE}", dateStamp).replace("{PROJECT}", project); } }
java
private List<DumpProcessingAction> handleArguments(String[] args) { CommandLine cmd; CommandLineParser parser = new GnuParser(); try { cmd = parser.parse(options, args); } catch (ParseException e) { logger.error("Failed to parse arguments: " + e.getMessage()); return Collections.emptyList(); } // Stop processing if a help text is to be printed: if ((cmd.hasOption(CMD_OPTION_HELP)) || (args.length == 0)) { return Collections.emptyList(); } List<DumpProcessingAction> configuration = new ArrayList<>(); handleGlobalArguments(cmd); if (cmd.hasOption(CMD_OPTION_ACTION)) { DumpProcessingAction action = handleActionArguments(cmd); if (action != null) { configuration.add(action); } } if (cmd.hasOption(CMD_OPTION_CONFIG_FILE)) { try { List<DumpProcessingAction> configFile = readConfigFile(cmd .getOptionValue(CMD_OPTION_CONFIG_FILE)); configuration.addAll(configFile); } catch (IOException e) { logger.error("Failed to read configuration file \"" + cmd.getOptionValue(CMD_OPTION_CONFIG_FILE) + "\": " + e.toString()); } } return configuration; }
java
private void handleGlobalArguments(CommandLine cmd) { if (cmd.hasOption(CMD_OPTION_DUMP_LOCATION)) { this.dumpDirectoryLocation = cmd .getOptionValue(CMD_OPTION_DUMP_LOCATION); } if (cmd.hasOption(CMD_OPTION_OFFLINE_MODE)) { this.offlineMode = true; } if (cmd.hasOption(CMD_OPTION_QUIET)) { this.quiet = true; } if (cmd.hasOption(CMD_OPTION_CREATE_REPORT)) { this.reportFilename = cmd.getOptionValue(CMD_OPTION_CREATE_REPORT); } if (cmd.hasOption(OPTION_FILTER_LANGUAGES)) { setLanguageFilters(cmd.getOptionValue(OPTION_FILTER_LANGUAGES)); } if (cmd.hasOption(OPTION_FILTER_SITES)) { setSiteFilters(cmd.getOptionValue(OPTION_FILTER_SITES)); } if (cmd.hasOption(OPTION_FILTER_PROPERTIES)) { setPropertyFilters(cmd.getOptionValue(OPTION_FILTER_PROPERTIES)); } if (cmd.hasOption(CMD_OPTION_LOCAL_DUMPFILE)) { this.inputDumpLocation = cmd.getOptionValue(OPTION_LOCAL_DUMPFILE); } }
java
private void handleGlobalArguments(Section section) { for (String key : section.keySet()) { switch (key) { case OPTION_OFFLINE_MODE: if (section.get(key).toLowerCase().equals("true")) { this.offlineMode = true; } break; case OPTION_QUIET: if (section.get(key).toLowerCase().equals("true")) { this.quiet = true; } break; case OPTION_CREATE_REPORT: this.reportFilename = section.get(key); break; case OPTION_DUMP_LOCATION: this.dumpDirectoryLocation = section.get(key); break; case OPTION_FILTER_LANGUAGES: setLanguageFilters(section.get(key)); break; case OPTION_FILTER_SITES: setSiteFilters(section.get(key)); break; case OPTION_FILTER_PROPERTIES: setPropertyFilters(section.get(key)); break; case OPTION_LOCAL_DUMPFILE: this.inputDumpLocation = section.get(key); break; default: logger.warn("Unrecognized option: " + key); } } }
java
private void checkDuplicateStdOutOutput(DumpProcessingAction newAction) { if (newAction.useStdOut()) { if (this.quiet) { logger.warn("Multiple actions are using stdout as output destination."); } this.quiet = true; } }
java
private void setLanguageFilters(String filters) { this.filterLanguages = new HashSet<>(); if (!"-".equals(filters)) { Collections.addAll(this.filterLanguages, filters.split(",")); } }
java
private void setSiteFilters(String filters) { this.filterSites = new HashSet<>(); if (!"-".equals(filters)) { Collections.addAll(this.filterSites, filters.split(",")); } }
java
private void setPropertyFilters(String filters) { this.filterProperties = new HashSet<>(); if (!"-".equals(filters)) { for (String pid : filters.split(",")) { this.filterProperties.add(Datamodel .makeWikidataPropertyIdValue(pid)); } } }
java
public void writeFinalResults() { printStatus(); try (PrintStream out = new PrintStream( ExampleHelpers.openExampleFileOuputStream("gender-ratios.csv"))) { out.print("Site key,pages total,pages on humans,pages on humans with gender"); for (EntityIdValue gender : this.genderNamesList) { out.print("," + this.genderNames.get(gender) + " (" + gender.getId() + ")"); } out.println(); List<SiteRecord> siteRecords = new ArrayList<>( this.siteRecords.values()); Collections.sort(siteRecords, new SiteRecordComparator()); for (SiteRecord siteRecord : siteRecords) { out.print(siteRecord.siteKey + "," + siteRecord.pageCount + "," + siteRecord.humanPageCount + "," + siteRecord.humanGenderPageCount); for (EntityIdValue gender : this.genderNamesList) { if (siteRecord.genderCounts.containsKey(gender)) { out.print("," + siteRecord.genderCounts.get(gender)); } else { out.print(",0"); } } out.println(); } } catch (IOException e) { e.printStackTrace(); } }
java
public static void printDocumentation() { System.out .println("********************************************************************"); System.out.println("*** Wikidata Toolkit: GenderRatioProcessor"); System.out.println("*** "); System.out .println("*** This program will download and process dumps from Wikidata."); System.out .println("*** It will compute the numbers of articles about humans across"); System.out .println("*** Wikimedia projects, and in particular it will count the articles"); System.out .println("*** for each sex/gender. Results will be stored in a CSV file."); System.out.println("*** See source code for further details."); System.out .println("********************************************************************"); }
java
private boolean containsValue(StatementGroup statementGroup, Value value) { for (Statement s : statementGroup) { if (value.equals(s.getValue())) { return true; } } return false; }
java
private void addNewGenderName(EntityIdValue entityIdValue, String name) { this.genderNames.put(entityIdValue, name); this.genderNamesList.add(entityIdValue); }
java
private SiteRecord getSiteRecord(String siteKey) { SiteRecord siteRecord = this.siteRecords.get(siteKey); if (siteRecord == null) { siteRecord = new SiteRecord(siteKey); this.siteRecords.put(siteKey, siteRecord); } return siteRecord; }
java
private void countGender(EntityIdValue gender, SiteRecord siteRecord) { Integer curValue = siteRecord.genderCounts.get(gender); if (curValue == null) { siteRecord.genderCounts.put(gender, 1); } else { siteRecord.genderCounts.put(gender, curValue + 1); } }
java
public List<WbSearchEntitiesResult> wbSearchEntities(String search, String language, Boolean strictLanguage, String type, Long limit, Long offset) throws MediaWikiApiErrorException { Map<String, String> parameters = new HashMap<String, String>(); parameters.put(ApiConnection.PARAM_ACTION, "wbsearchentities"); if (search != null) { parameters.put("search", search); } else { throw new IllegalArgumentException( "Search parameter must be specified for this action."); } if (language != null) { parameters.put("language", language); } else { throw new IllegalArgumentException( "Language parameter must be specified for this action."); } if (strictLanguage != null) { parameters.put("strictlanguage", Boolean.toString(strictLanguage)); } if (type != null) { parameters.put("type", type); } if (limit != null) { parameters.put("limit", Long.toString(limit)); } if (offset != null) { parameters.put("continue", Long.toString(offset)); } List<WbSearchEntitiesResult> results = new ArrayList<>(); try { JsonNode root = this.connection.sendJsonRequest("POST", parameters); JsonNode entities = root.path("search"); for (JsonNode entityNode : entities) { try { JacksonWbSearchEntitiesResult ed = mapper.treeToValue(entityNode, JacksonWbSearchEntitiesResult.class); results.add(ed); } catch (JsonProcessingException e) { LOGGER.error("Error when reading JSON for entity " + entityNode.path("id").asText("UNKNOWN") + ": " + e.toString()); } } } catch (IOException e) { LOGGER.error("Could not retrive data: " + e.toString()); } return results; }
java
public ItemDocumentBuilder withSiteLink(String title, String siteKey, ItemIdValue... badges) { withSiteLink(factory.getSiteLink(title, siteKey, Arrays.asList(badges))); return this; }
java
public Resource addReference(Reference reference) { Resource resource = this.rdfWriter.getUri(Vocabulary.getReferenceUri(reference)); this.referenceQueue.add(reference); this.referenceSubjectQueue.add(resource); return resource; }
java
public void writeReferences() throws RDFHandlerException { Iterator<Reference> referenceIterator = this.referenceQueue.iterator(); for (Resource resource : this.referenceSubjectQueue) { final Reference reference = referenceIterator.next(); if (this.declaredReferences.add(resource)) { writeReference(reference, resource); } } this.referenceSubjectQueue.clear(); this.referenceQueue.clear(); this.snakRdfConverter.writeAuxiliaryTriples(); }
java
public Sites getSitesInformation() throws IOException { MwDumpFile sitesTableDump = getMostRecentDump(DumpContentType.SITES); if (sitesTableDump == null) { return null; } // Create a suitable processor for such dumps and process the file: MwSitesDumpFileProcessor sitesDumpFileProcessor = new MwSitesDumpFileProcessor(); sitesDumpFileProcessor.processDumpFileContents( sitesTableDump.getDumpFileStream(), sitesTableDump); return sitesDumpFileProcessor.getSites(); }
java
@Deprecated public void processMostRecentDump(DumpContentType dumpContentType, MwDumpFileProcessor dumpFileProcessor) { MwDumpFile dumpFile = getMostRecentDump(dumpContentType); if (dumpFile != null) { processDumpFile(dumpFile, dumpFileProcessor); } }
java
void processDumpFile(MwDumpFile dumpFile, MwDumpFileProcessor dumpFileProcessor) { try (InputStream inputStream = dumpFile.getDumpFileStream()) { dumpFileProcessor.processDumpFileContents(inputStream, dumpFile); } catch (FileAlreadyExistsException e) { logger.error("Dump file " + dumpFile.toString() + " could not be processed since file " + e.getFile() + " already exists. Try deleting the file or dumpfile directory to attempt a new download."); } catch (IOException e) { logger.error("Dump file " + dumpFile.toString() + " could not be processed: " + e.toString()); } }
java
public MwDumpFile findMostRecentDump(DumpContentType dumpContentType) { List<MwDumpFile> dumps = findAllDumps(dumpContentType); for (MwDumpFile dump : dumps) { if (dump.isAvailable()) { return dump; } } return null; }
java
List<MwDumpFile> mergeDumpLists(List<MwDumpFile> localDumps, List<MwDumpFile> onlineDumps) { List<MwDumpFile> result = new ArrayList<>(localDumps); HashSet<String> localDateStamps = new HashSet<>(); for (MwDumpFile dumpFile : localDumps) { localDateStamps.add(dumpFile.getDateStamp()); } for (MwDumpFile dumpFile : onlineDumps) { if (!localDateStamps.contains(dumpFile.getDateStamp())) { result.add(dumpFile); } } result.sort(Collections.reverseOrder(new MwDumpFile.DateComparator())); return result; }
java
List<MwDumpFile> findDumpsLocally(DumpContentType dumpContentType) { String directoryPattern = WmfDumpFile.getDumpFileDirectoryName( dumpContentType, "*"); List<String> dumpFileDirectories; try { dumpFileDirectories = this.dumpfileDirectoryManager .getSubdirectories(directoryPattern); } catch (IOException e) { logger.error("Unable to access dump directory: " + e.toString()); return Collections.emptyList(); } List<MwDumpFile> result = new ArrayList<>(); for (String directory : dumpFileDirectories) { String dateStamp = WmfDumpFile .getDateStampFromDumpFileDirectoryName(dumpContentType, directory); if (dateStamp.matches(WmfDumpFileManager.DATE_STAMP_PATTERN)) { WmfLocalDumpFile dumpFile = new WmfLocalDumpFile(dateStamp, this.projectName, dumpfileDirectoryManager, dumpContentType); if (dumpFile.isAvailable()) { result.add(dumpFile); } else { logger.error("Incomplete local dump file data. Maybe delete " + dumpFile.getDumpfileDirectory() + " to attempt fresh download."); } } // else: silently ignore directories that don't match } result.sort(Collections.reverseOrder(new MwDumpFile.DateComparator())); logger.info("Found " + result.size() + " local dumps of type " + dumpContentType + ": " + result); return result; }
java
List<MwDumpFile> findDumpsOnline(DumpContentType dumpContentType) { List<String> dumpFileDates = findDumpDatesOnline(dumpContentType); List<MwDumpFile> result = new ArrayList<>(); for (String dateStamp : dumpFileDates) { if (dumpContentType == DumpContentType.DAILY) { result.add(new WmfOnlineDailyDumpFile(dateStamp, this.projectName, this.webResourceFetcher, this.dumpfileDirectoryManager)); } else if (dumpContentType == DumpContentType.JSON) { result.add(new JsonOnlineDumpFile(dateStamp, this.projectName, this.webResourceFetcher, this.dumpfileDirectoryManager)); } else { result.add(new WmfOnlineStandardDumpFile(dateStamp, this.projectName, this.webResourceFetcher, this.dumpfileDirectoryManager, dumpContentType)); } } logger.info("Found " + result.size() + " online dumps of type " + dumpContentType + ": " + result); return result; }
java
@Override public void processItemDocument(ItemDocument itemDocument) { this.countItems++; // Do some printing for demonstration/debugging. // Only print at most 50 items (or it would get too slow). if (this.countItems < 10) { System.out.println(itemDocument); } else if (this.countItems == 10) { System.out.println("*** I won't print any further items.\n" + "*** We will never finish if we print all the items.\n" + "*** Maybe remove this debug output altogether."); } }
java
static EntityIdValue fromId(String id, String siteIri) { switch (guessEntityTypeFromId(id)) { case EntityIdValueImpl.JSON_ENTITY_TYPE_ITEM: return new ItemIdValueImpl(id, siteIri); case EntityIdValueImpl.JSON_ENTITY_TYPE_PROPERTY: return new PropertyIdValueImpl(id, siteIri); case EntityIdValueImpl.JSON_ENTITY_TYPE_LEXEME: return new LexemeIdValueImpl(id, siteIri); case EntityIdValueImpl.JSON_ENTITY_TYPE_FORM: return new FormIdValueImpl(id, siteIri); case EntityIdValueImpl.JSON_ENTITY_TYPE_SENSE: return new SenseIdValueImpl(id, siteIri); default: throw new IllegalArgumentException("Entity id \"" + id + "\" is not supported."); } }
java
static String guessEntityTypeFromId(String id) { if(id.isEmpty()) { throw new IllegalArgumentException("Entity ids should not be empty."); } switch (id.charAt(0)) { case 'L': if(id.contains("-F")) { return JSON_ENTITY_TYPE_FORM; } else if(id.contains("-S")) { return JSON_ENTITY_TYPE_SENSE; } else { return JSON_ENTITY_TYPE_LEXEME; } case 'P': return JSON_ENTITY_TYPE_PROPERTY; case 'Q': return JSON_ENTITY_TYPE_ITEM; default: throw new IllegalArgumentException("Entity id \"" + id + "\" is not supported."); } }
java
@Override public void close() { logger.info("Finished processing."); this.timer.stop(); this.lastSeconds = (int) (timer.getTotalWallTime() / 1000000000); printStatus(); }
java
private void countEntity() { if (!this.timer.isRunning()) { startTimer(); } this.entityCount++; if (this.entityCount % 100 == 0) { timer.stop(); int seconds = (int) (timer.getTotalWallTime() / 1000000000); if (seconds >= this.lastSeconds + this.reportInterval) { this.lastSeconds = seconds; printStatus(); if (this.timeout > 0 && seconds > this.timeout) { logger.info("Timeout. Aborting processing."); throw new TimeoutException(); } } timer.start(); } }
java
public static String implodeObjects(Iterable<?> objects) { StringBuilder builder = new StringBuilder(); boolean first = true; for (Object o : objects) { if (first) { first = false; } else { builder.append("|"); } builder.append(o.toString()); } return builder.toString(); }
java
public void logout() throws IOException { if (this.loggedIn) { Map<String, String> params = new HashMap<>(); params.put("action", "logout"); params.put("format", "json"); // reduce the output try { sendJsonRequest("POST", params); } catch (MediaWikiApiErrorException e) { throw new IOException(e.getMessage(), e); //TODO: we should throw a better exception } this.loggedIn = false; this.username = ""; this.password = ""; } }
java
String fetchToken(String tokenType) throws IOException, MediaWikiApiErrorException { Map<String, String> params = new HashMap<>(); params.put(ApiConnection.PARAM_ACTION, "query"); params.put("meta", "tokens"); params.put("type", tokenType); try { JsonNode root = this.sendJsonRequest("POST", params); return root.path("query").path("tokens").path(tokenType + "token").textValue(); } catch (IOException | MediaWikiApiErrorException e) { logger.error("Error when trying to fetch token: " + e.toString()); } return null; }
java
public InputStream sendRequest(String requestMethod, Map<String, String> parameters) throws IOException { String queryString = getQueryString(parameters); URL url = new URL(this.apiBaseUrl); HttpURLConnection connection = (HttpURLConnection) WebResourceFetcherImpl .getUrlConnection(url); setupConnection(requestMethod, queryString, connection); OutputStreamWriter writer = new OutputStreamWriter( connection.getOutputStream()); writer.write(queryString); writer.flush(); writer.close(); int rc = connection.getResponseCode(); if (rc != 200) { logger.warn("Error: API request returned response code " + rc); } InputStream iStream = connection.getInputStream(); fillCookies(connection.getHeaderFields()); return iStream; }
java
List<String> getWarnings(JsonNode root) { ArrayList<String> warnings = new ArrayList<>(); if (root.has("warnings")) { JsonNode warningNode = root.path("warnings"); Iterator<Map.Entry<String, JsonNode>> moduleIterator = warningNode .fields(); while (moduleIterator.hasNext()) { Map.Entry<String, JsonNode> moduleNode = moduleIterator.next(); Iterator<JsonNode> moduleOutputIterator = moduleNode.getValue() .elements(); while (moduleOutputIterator.hasNext()) { JsonNode moduleOutputNode = moduleOutputIterator.next(); if (moduleOutputNode.isTextual()) { warnings.add("[" + moduleNode.getKey() + "]: " + moduleOutputNode.textValue()); } else if (moduleOutputNode.isArray()) { Iterator<JsonNode> messageIterator = moduleOutputNode .elements(); while (messageIterator.hasNext()) { JsonNode messageNode = messageIterator.next(); warnings.add("[" + moduleNode.getKey() + "]: " + messageNode.path("html").path("*") .asText(messageNode.toString())); } } else { warnings.add("[" + moduleNode.getKey() + "]: " + "Warning was not understood. Please report this to Wikidata Toolkit. JSON source: " + moduleOutputNode.toString()); } } } } return warnings; }
java
String getQueryString(Map<String, String> params) { StringBuilder builder = new StringBuilder(); try { boolean first = true; for (Map.Entry<String,String> entry : params.entrySet()) { if (first) { first = false; } else { builder.append("&"); } builder.append(URLEncoder.encode(entry.getKey(), "UTF-8")); builder.append("="); builder.append(URLEncoder.encode(entry.getValue(), "UTF-8")); } } catch (UnsupportedEncodingException e) { throw new RuntimeException( "Your Java version does not support UTF-8 encoding."); } return builder.toString(); }
java
public static <T> T load(String resourcePath, BeanSpec spec) { return load(resourcePath, spec, false); }
java
protected void printCenter(String format, Object... args) { String text = S.fmt(format, args); info(S.center(text, 80)); }
java
protected void printCenterWithLead(String lead, String format, Object ... args) { String text = S.fmt(format, args); int len = 80 - lead.length(); info(S.concat(lead, S.center(text, len))); }
java
public String convert(BufferedImage image, boolean favicon) { // Reset statistics before anything statsArray = new int[12]; // Begin the timer dStart = System.nanoTime(); // Scale the image image = scale(image, favicon); // The +1 is for the newline characters StringBuilder sb = new StringBuilder((image.getWidth() + 1) * image.getHeight()); for (int y = 0; y < image.getHeight(); y++) { // At the end of each line, add a newline character if (sb.length() != 0) sb.append("\n"); for (int x = 0; x < image.getWidth(); x++) { // Color pixelColor = new Color(image.getRGB(x, y), true); int alpha = pixelColor.getAlpha(); boolean isTransient = alpha < 0.1; double gValue = isTransient ? 250 : ((double) pixelColor.getRed() * 0.2989 + (double) pixelColor.getBlue() * 0.5870 + (double) pixelColor.getGreen() * 0.1140) / ((double)alpha / (double)250); final char s = gValue < 130 ? darkGrayScaleMap(gValue) : lightGrayScaleMap(gValue); sb.append(s); } } imgArray = sb.toString().toCharArray(); dEnd = System.nanoTime(); return sb.toString(); }
java
private static BufferedImage scale(BufferedImage imageToScale, int dWidth, int dHeight, double fWidth, double fHeight) { BufferedImage dbi = null; // Needed to create a new BufferedImage object int imageType = imageToScale.getType(); if (imageToScale != null) { dbi = new BufferedImage(dWidth, dHeight, imageType); Graphics2D g = dbi.createGraphics(); AffineTransform at = AffineTransform.getScaleInstance(fWidth, fHeight); g.drawRenderedImage(imageToScale, at); } return dbi; }
java
public EventBus emit(Enum<?> event, Object... args) { return _emitWithOnceBus(eventContext(event, args)); }
java
public EventBus emit(String event, Object... args) { return _emitWithOnceBus(eventContext(event, args)); }
java
public EventBus emit(EventObject event, Object... args) { return _emitWithOnceBus(eventContext(event, args)); }
java
public EventBus emitAsync(Enum<?> event, Object... args) { return _emitWithOnceBus(eventContextAsync(event, args)); }
java
public EventBus emitAsync(String event, Object... args) { return _emitWithOnceBus(eventContextAsync(event, args)); }
java
public EventBus emitAsync(EventObject event, Object... args) { return _emitWithOnceBus(eventContextAsync(event, args)); }
java
public EventBus emitSync(Enum<?> event, Object... args) { return _emitWithOnceBus(eventContextSync(event, args)); }
java
public EventBus emitSync(String event, Object... args) { return _emitWithOnceBus(eventContextSync(event, args)); }
java
public EventBus emitSync(EventObject event, Object... args) { return _emitWithOnceBus(eventContextSync(event, args)); }
java
public static String common() { String common = SysProps.get(KEY_COMMON_CONF_TAG); if (S.blank(common)) { common = "common"; } return common; }
java
public static String confSetName() { String profile = SysProps.get(AppConfigKey.PROFILE.key()); if (S.blank(profile)) { profile = Act.mode().name().toLowerCase(); } return profile; }
java
private static Map<String, Object> processConf(Map<String, ?> conf) { Map<String, Object> m = new HashMap<String, Object>(conf.size()); for (String s : conf.keySet()) { Object o = conf.get(s); if (s.startsWith("act.")) s = s.substring(4); m.put(s, o); m.put(Config.canonical(s), o); } return m; }
java
public File curDir() { File file = session().attribute(ATTR_PWD); if (null == file) { file = new File(System.getProperty("user.dir")); session().attribute(ATTR_PWD, file); } return file; }
java
private String getRowLineBuf(int colCount, List<Integer> colMaxLenList, String[][] data) { S.Buffer rowBuilder = S.buffer(); int colWidth; for (int i = 0 ; i < colCount ; i ++) { colWidth = colMaxLenList.get(i) + 3; for (int j = 0; j < colWidth ; j ++) { if (j==0) { rowBuilder.append("+"); } else if ((i+1 == colCount && j+1 == colWidth)) {//for last column close the border rowBuilder.append("-+"); } else { rowBuilder.append("-"); } } } return rowBuilder.append("\n").toString(); }
java
public byte[] toByteArray() { try { ByteArrayOutputStream baos = new ByteArrayOutputStream(); ObjectOutputStream oos = new ObjectOutputStream(baos); oos.writeObject(this); return baos.toByteArray(); } catch (IOException e) { throw E.ioException(e); } }
java
public static AppDescriptor of(String appName, Class<?> entryClass) { System.setProperty("osgl.version.suppress-var-found-warning", "true"); return of(appName, entryClass, Version.of(entryClass)); }
java
public static AppDescriptor of(String appName, String packageName) { String[] packages = packageName.split(S.COMMON_SEP); return of(appName, packageName, Version.ofPackage(packages[0])); }
java
public static AppDescriptor deserializeFrom(byte[] bytes) { try { ByteArrayInputStream bais = new ByteArrayInputStream(bytes); ObjectInputStream ois = new ObjectInputStream(bais); return (AppDescriptor) ois.readObject(); } catch (IOException e) { throw E.ioException(e); } catch (ClassNotFoundException e) { throw E.unexpected(e); } }
java
static String from(Class<?> entryClass) { List<String> tokens = tokenOf(entryClass); return fromTokens(tokens); }
java
static String fromPackageName(String packageName) { List<String> tokens = tokenOf(packageName); return fromTokens(tokens); }
java
public static String nextWord(String string) { int index = 0; while (index < string.length() && !Character.isWhitespace(string.charAt(index))) { index++; } return string.substring(0, index); }
java
private final String getHeader(Map /* String, String */ headers, String name) { return (String) headers.get(name.toLowerCase()); }
java
public ProgressBar stop() { target.kill(); try { thread.join(); target.consoleStream.print("\n"); target.consoleStream.flush(); } catch (InterruptedException ex) { } return this; }
java
public synchronized List<String> propertyListOf(Class<?> c) { String cn = c.getName(); List<String> ls = repo.get(cn); if (ls != null) { return ls; } Set<Class<?>> circularReferenceDetector = new HashSet<>(); ls = propertyListOf(c, circularReferenceDetector, null); repo.put(c.getName(), ls); return ls; }
java
public ClassNode parent(String name) { this.parent = infoBase.node(name); this.parent.addChild(this); for (ClassNode intf : parent.interfaces.values()) { addInterface(intf); } return this; }
java