answer
stringlengths
17
10.2M
package org.commcare.util; /** * Defines different possible LogTypes */ public class LogTypes { //Log Types: /** * Fatal problem with one of CommCare's cryptography libraries */ public static final String TYPE_ERROR_CRYPTO = "error-crypto"; /** * Some invariant application assumption has been violated */ public static final String TYPE_ERROR_ASSERTION = "error-state"; /** * Some invariant application assumption has been violated */ public static final String TYPE_ERROR_WORKFLOW = "error-workflow"; /** * There is a problem with the underlying storage layer which is preventing the app from working correctly */ public static final String TYPE_ERROR_STORAGE = "error-storage"; /** * One of the config files (suite, profile, xform, locale, etc) contains something * which is invalid and prevented the app from working properly */ public static final String TYPE_ERROR_CONFIG_STRUCTURE = "error-config"; /** * Something bad happened which the app should not have allowed to happen. This * category of error should be aggressively caught and addressed by the software team * */ public static final String TYPE_ERROR_DESIGN = "error-design"; /** * Something bad happened because of network connectivity * */ public static final String TYPE_WARNING_NETWORK = "warning-network"; /** * We were incapable of processing or understanding something that the server sent down */ public static final String TYPE_ERROR_SERVER_COMMS = "error-server-comms"; /** * Logs relating to user events (login/logout/restore, etc) * */ public static final String TYPE_USER = "user"; /** * Logs relating to the external files and resources which make up an app * */ public static final String TYPE_RESOURCES = "resources"; /** * Maintenance events (autopurging, cleanups, etc) * */ public static final String TYPE_MAINTENANCE = "maintenance"; /** * Form Entry workflow messages * */ public static final String TYPE_FORM_ENTRY = "form-entry"; /** * Form submission messages * */ public static final String TYPE_FORM_SUBMISSION = "form-submission"; /** * Used to track when we knowingly delete a form record */ public static final String TYPE_FORM_DELETION = "form-deletion"; /** * Problem reported via report activity at home screen * */ public static final String USER_REPORTED_PROBLEM = "user-report"; /** * Used for internal checking of whether or not certain sections of code ever get called */ public static final String SOFT_ASSERT = "soft-assert"; /** * Used for tracking the behavior of the form dump activity */ public static final String TYPE_FORM_DUMP = "form-dump"; public static final String TYPE_FORCECLOSE = "forceclose"; public static final String TYPE_GRAPHING = "graphing"; public static final String TYPE_PRINTING = "printing"; }
package org.dasein.cloud.dc; import java.util.List; public class Folder { private String id; private String name; private FolderType type; private Folder parent; private List<Folder> children; public String getId() { return id; } public void setId(String id) { this.id = id; } public String getName() { return name; } public void setName(String name) { this.name = name; } public FolderType getType() { return type; } public void setType(FolderType type) { this.type = type; } public Folder getParent() { return parent; } public void setParent(Folder parent) { this.parent = parent; } public List<Folder> getChildren() { return children; } public void setChildren(List<Folder> children) { this.children = children; } }
package org.gitlab4j.api; import java.util.List; import java.util.stream.Stream; import org.gitlab4j.api.models.Commit; import org.gitlab4j.api.models.Issue; import org.gitlab4j.api.models.MergeRequest; import org.gitlab4j.api.models.Milestone; import org.gitlab4j.api.models.Note; import org.gitlab4j.api.models.Project; import org.gitlab4j.api.models.SearchBlob; import org.gitlab4j.api.models.Snippet; import org.gitlab4j.api.models.User; public class SearchApi extends AbstractApi { public SearchApi(GitLabApi gitLabApi) { super(gitLabApi); } /** * Search globally across the GitLab instance. * * <pre><code>GitLab Endpoint: POST /search?scope=:scope&amp;search=:search-query</code></pre> * * @param scope search the expression within the specified scope. Currently these scopes are supported: * projects, issues, merge_requests, milestones, snippet_titles, snippet_blobs, users * @param search the search query * @return a List containing the object type specified by the scope * @throws GitLabApiException if any exception occurs * @since GitLab 10.5 */ public List<?> globalSearch(SearchScope scope, String search) throws GitLabApiException { return (globalSearch(scope, search, this.getDefaultPerPage()).all()); } /** * Search globally across the GitLab instance. * * <pre><code>GitLab Endpoint: POST /search?scope=:scope&amp;search=:search-query</code></pre> * * @param scope search the expression within the specified scope. Currently these scopes are supported: * projects, issues, merge_requests, milestones, snippet_titles, snippet_blobs, users * @param search the search query * @return a Stream containing the object type specified by the scope * @throws GitLabApiException if any exception occurs * @since GitLab 10.5 */ public Stream<?> globalSearchStream(SearchScope scope, String search) throws GitLabApiException { return (globalSearch(scope, search, getDefaultPerPage()).stream()); } /** * Search globally across the GitLab instance. * * <pre><code>GitLab Endpoint: POST /search?scope=:scope&amp;search=:search-query</code></pre> * * @param scope search the expression within the specified scope. Currently these scopes are supported: * projects, issues, merge_requests, milestones, snippet_titles, snippet_blobs, users * @param search the search query * @param itemsPerPage the number of items that will be fetched per page * @return a Pager containing the object type specified by the scope * @throws GitLabApiException if any exception occurs * @since GitLab 10.5 */ public Pager<?> globalSearch(SearchScope scope, String search, int itemsPerPage) throws GitLabApiException { GitLabApiForm formData = new GitLabApiForm() .withParam("scope", scope, true) .withParam("search", search, true); switch (scope) { case BLOBS: return (new Pager<SearchBlob>(this, SearchBlob.class, itemsPerPage, formData.asMap(), "search")); case COMMITS: return (new Pager<Commit>(this, Commit.class, itemsPerPage, formData.asMap(), "search")); case PROJECTS: return (new Pager<Project>(this, Project.class, itemsPerPage, formData.asMap(), "search")); case ISSUES: return (new Pager<Issue>(this, Issue.class, itemsPerPage, formData.asMap(), "search")); case MERGE_REQUESTS: return (new Pager<MergeRequest>(this, MergeRequest.class, itemsPerPage, formData.asMap(), "search")); case MILESTONES: return (new Pager<Milestone>(this, Milestone.class, itemsPerPage, formData.asMap(), "search")); case SNIPPET_TITLES: return (new Pager<Snippet>(this, Snippet.class, itemsPerPage, formData.asMap(), "search")); case SNIPPET_BLOBS: return (new Pager<Snippet>(this, Snippet.class, itemsPerPage, formData.asMap(), "search")); case USERS: return (new Pager<User>(this, User.class, itemsPerPage, formData.asMap(), "search")); case WIKI_BLOBS: return (new Pager<SearchBlob>(this, SearchBlob.class, itemsPerPage, formData.asMap(), "search")); default: throw new GitLabApiException("Invalid SearchScope [" + scope + "]"); } } /** * Search within the specified group. If a user is not a member of a group and the group is private, * a request on that group will result to a 404 status code. * * <pre><code>GitLab Endpoint: POST /groups/:groupId/search?scope=:scope&amp;search=:search-query</code></pre> * * @param groupIdOrPath the group ID, path of the group, or a Group instance holding the group ID or path, required * @param scope search the expression within the specified scope. Currently these scopes are supported: * projects, issues, merge_requests, milestones, users * @param search the search query * @return a List containing the object type specified by the scope * @throws GitLabApiException if any exception occurs * @since GitLab 10.5 */ public List<?> groupSearch(Object groupIdOrPath, GroupSearchScope scope, String search) throws GitLabApiException { return (groupSearch(groupIdOrPath, scope, search, this.getDefaultPerPage()).all()); } /** * Search within the specified group. If a user is not a member of a group and the group is private, * a request on that group will result to a 404 status code. * * <pre><code>GitLab Endpoint: POST /groups/:groupId/search?scope=:scope&amp;search=:search-query</code></pre> * * @param groupIdOrPath the group ID, path of the group, or a Group instance holding the group ID or path, required * @param scope search the expression within the specified scope. Currently these scopes are supported: * projects, issues, merge_requests, milestones, users * @param search the search query * @return a Stream containing the object type specified by the scope * @throws GitLabApiException if any exception occurs * @since GitLab 10.5 */ public Stream<?> groupSearchStream(Object groupIdOrPath, GroupSearchScope scope, String search) throws GitLabApiException { return (groupSearch(groupIdOrPath, scope, search, getDefaultPerPage()).stream()); } /** * Search within the specified group. If a user is not a member of a group and the group is private, * a request on that group will result to a 404 status code. * * <pre><code>GitLab Endpoint: POST /groups/:groupId/search?scope=:scope&amp;search=:search-query</code></pre> * * @param groupIdOrPath the group ID, path of the group, or a Group instance holding the group ID or path, required * @param scope search the expression within the specified scope. Currently these scopes are supported: * projects, issues, merge_requests, milestones, users * @param search the search query * @param itemsPerPage the number of items that will be fetched per page * @return a Pager containing the object type specified by the scope * @throws GitLabApiException if any exception occurs * @since GitLab 10.5 */ public Pager<?> groupSearch(Object groupIdOrPath, GroupSearchScope scope, String search, int itemsPerPage) throws GitLabApiException { GitLabApiForm formData = new GitLabApiForm() .withParam("scope", scope, true) .withParam("search", search, true); switch (scope) { case PROJECTS: return (new Pager<Project>(this, Project.class, itemsPerPage, formData.asMap(), "groups", getGroupIdOrPath(groupIdOrPath), "search")); case ISSUES: return (new Pager<Issue>(this, Issue.class, itemsPerPage, formData.asMap(), "groups", getGroupIdOrPath(groupIdOrPath), "search")); case MERGE_REQUESTS: return (new Pager<MergeRequest>(this, MergeRequest.class, itemsPerPage, formData.asMap(), "groups", getGroupIdOrPath(groupIdOrPath), "search")); case MILESTONES: return (new Pager<Milestone>(this, Milestone.class, itemsPerPage, formData.asMap(), "groups", getGroupIdOrPath(groupIdOrPath), "search")); case USERS: return (new Pager<User>(this, User.class, itemsPerPage, formData.asMap(), "groups", getGroupIdOrPath(groupIdOrPath), "search")); default: throw new GitLabApiException("Invalid GroupSearchScope [" + scope + "]"); } } /** * Search within the specified project. If a user is not a member of a project and the project is private, * a request on that project will result to a 404 status code. * * <pre><code>GitLab Endpoint: POST /projects/:projectId/search?scope=:scope&amp;search=:search-query</code></pre> * * @param projectIdOrPath the project in the form of an Integer(ID), String(path), or Project instance, required * @param scope search the expression within the specified scope. Currently these scopes are supported: * issues, merge_requests, milestones, notes, wiki_blobs, commits, blobs, users * @param search the search query * @return a List containing the object type specified by the scope * @throws GitLabApiException if any exception occurs * @since GitLab 10.5 */ public List<?> projectSearch(Object projectIdOrPath, ProjectSearchScope scope, String search) throws GitLabApiException { return (projectSearch(projectIdOrPath, scope, search, this.getDefaultPerPage()).all()); } /** * Search within the specified project. If a user is not a member of a project and the project is private, * a request on that project will result to a 404 status code. * * <pre><code>GitLab Endpoint: POST /projects/:projectId/search?scope=:scope&amp;search=:search-query</code></pre> * * @param projectIdOrPath the project in the form of an Integer(ID), String(path), or Project instance, required * @param scope search the expression within the specified scope. Currently these scopes are supported: * issues, merge_requests, milestones, notes, wiki_blobs, commits, blobs, users * @param search the search query * @return a Stream containing the object type specified by the scope * @throws GitLabApiException if any exception occurs * @since GitLab 10.5 */ public Stream<?> projectSearchStream(Object projectIdOrPath, ProjectSearchScope scope, String search) throws GitLabApiException { return (projectSearch(projectIdOrPath, scope, search, getDefaultPerPage()).stream()); } /** * Search within the specified project. If a user is not a member of a project and the project is private, * a request on that project will result to a 404 status code. * * <pre><code>GitLab Endpoint: POST /project/:projectId/search?scope=:scope&amp;search=:search-query</code></pre> * * @param projectIdOrPath the project in the form of an Integer(ID), String(path), or Project instance, required * @param scope search the expression within the specified scope. Currently these scopes are supported: * issues, merge_requests, milestones, notes, wiki_blobs, commits, blobs, users * @param search the search query * @param itemsPerPage the number of items that will be fetched per page * @return a Pager containing the object type specified by the scope * @throws GitLabApiException if any exception occurs * @since GitLab 10.5 */ public Pager<?> projectSearch(Object projectIdOrPath, ProjectSearchScope scope, String search, int itemsPerPage) throws GitLabApiException { GitLabApiForm formData = new GitLabApiForm() .withParam("scope", scope, true) .withParam("search", search, true); switch (scope) { case BLOBS: return (new Pager<SearchBlob>(this, SearchBlob.class, itemsPerPage, formData.asMap(), "projects", getProjectIdOrPath(projectIdOrPath), "search")); case COMMITS: return (new Pager<Commit>(this, Commit.class, itemsPerPage, formData.asMap(), "projects", getProjectIdOrPath(projectIdOrPath), "search")); case ISSUES: return (new Pager<Issue>(this, Issue.class, itemsPerPage, formData.asMap(), "projects", getProjectIdOrPath(projectIdOrPath), "search")); case MERGE_REQUESTS: return (new Pager<MergeRequest>(this, MergeRequest.class, itemsPerPage, formData.asMap(), "projects", getProjectIdOrPath(projectIdOrPath), "search")); case MILESTONES: return (new Pager<Milestone>(this, Milestone.class, itemsPerPage, formData.asMap(), "projects", getProjectIdOrPath(projectIdOrPath), "search")); case NOTES: return (new Pager<Note>(this, Note.class, itemsPerPage, formData.asMap(), "projects", getProjectIdOrPath(projectIdOrPath), "search")); case WIKI_BLOBS: return (new Pager<SearchBlob>(this, SearchBlob.class, itemsPerPage, formData.asMap(), "projects", getProjectIdOrPath(projectIdOrPath), "search")); case USERS: return (new Pager<User>(this, User.class, itemsPerPage, formData.asMap(), "projects", getProjectIdOrPath(projectIdOrPath), "search")); default: throw new GitLabApiException("Invalid ProjectSearchScope [" + scope + "]"); } } }
package com.github.ferstl.jarscan; import java.io.BufferedWriter; import java.io.File; import java.io.IOException; import java.io.OutputStreamWriter; import java.io.PrintWriter; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; import java.util.List; import java.util.Set; import org.adoptopenjdk.jitwatch.jarscan.JarScan; import org.apache.maven.artifact.Artifact; import org.apache.maven.artifact.resolver.filter.AndArtifactFilter; import org.apache.maven.artifact.resolver.filter.ArtifactFilter; import org.apache.maven.plugin.AbstractMojo; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.plugins.annotations.Component; import org.apache.maven.plugins.annotations.LifecyclePhase; import org.apache.maven.plugins.annotations.Mojo; import org.apache.maven.plugins.annotations.Parameter; import org.apache.maven.plugins.annotations.ResolutionScope; import org.apache.maven.project.MavenProject; import org.apache.maven.shared.artifact.filter.ScopeArtifactFilter; import org.apache.maven.shared.artifact.filter.StrictPatternExcludesArtifactFilter; import org.apache.maven.shared.artifact.filter.StrictPatternIncludesArtifactFilter; import static java.nio.file.StandardOpenOption.CREATE; import static java.nio.file.StandardOpenOption.TRUNCATE_EXISTING; /** * Scans the project's artifact and, if enabled, it's dependencies for large methods using JitWatch's JarScan utility. */ @Mojo( name = "scan", aggregator = false, defaultPhase = LifecyclePhase.VERIFY, requiresDependencyCollection = ResolutionScope.TEST, requiresDependencyResolution = ResolutionScope.TEST, requiresDirectInvocation = false, threadSafe = true) public class JarScanMojo extends AbstractMojo { @Component private MavenProject project; /** * The value of {@code XX:FreqInlineSize} option. The default is 325. * * @since 1.0.0 */ @Parameter(property = "freqInlineSize", defaultValue = "325") private int freqInlineSize; /** * The path of the report file. If not set the report is written to the console. * * @since 1.0.0 */ @Parameter(property = "reportFile") private File reportFile; /** * Analyze dependency of the project. * * @since 1.0.0 */ @Parameter(property = "analyzeDependencies", defaultValue = "false") private boolean analyzeDependencies; /** * The scope of the artifacts that should be included. Only relevant when {@code analyzeDependencies=true}. * * @since 1.0.0 */ @Parameter(property = "scope") private String scope; /** * Comma-separated list of artifacts to be included in the form of {@code groupId:artifactId:type:classifier}. Only * relevant when {@code analyzeDependencies=true}. * * @since 1.0.0 */ @Parameter(property = "includes", defaultValue = "") private List<String> includes; /** * Comma-separated list of artifacts to be excluded in the form of {@code groupId:artifactId:type:classifier}. Only * relevant when {@code analyzeDependencies=true}. * * @since 1.0.0 */ @Parameter(property = "excludes", defaultValue = "") private List<String> excludes; @Override public void execute() throws MojoExecutionException { analyzeOwnArtifact(); if (this.analyzeDependencies) { analyzeDependencies(); } } private void analyzeOwnArtifact() throws MojoExecutionException { // Ignore parent projects if (!"pom".equals(this.project.getPackaging())) { String buildDirectory = this.project.getBuild().getDirectory(); String finalName = this.project.getBuild().getFinalName(); Path jarFile = Paths.get(buildDirectory, finalName + ".jar"); if (Files.exists(jarFile)) { printReport(this.project.getArtifact().toString(), jarFile.toFile()); } else { getLog().warn("JAR file not found: " + jarFile); } } } private void analyzeDependencies() throws MojoExecutionException { Set<Artifact> dependencies = this.project.getDependencyArtifacts(); ArtifactFilter filter = createArtifactFilter(); for (Artifact dependency : dependencies) { if (filter.include(dependency)) { printReport(dependency.toString(), dependency.getFile()); } } } private void printReport(String name, File file) throws MojoExecutionException { try (PrintWriter writer = createReportWriter()) { System.out.println("Artifact: " + name); JarScan.iterateJar(file, this.freqInlineSize, writer); System.out.println(); } catch (IOException e) { throw new MojoExecutionException(e.getMessage()); } } private ArtifactFilter createArtifactFilter() { List<ArtifactFilter> filters = new ArrayList<>(3); if (this.scope != null) { filters.add(new ScopeArtifactFilter(this.scope)); } if (!this.includes.isEmpty()) { filters.add(new StrictPatternIncludesArtifactFilter(this.includes)); } if (!this.excludes.isEmpty()) { filters.add(new StrictPatternExcludesArtifactFilter(this.excludes)); } return new AndArtifactFilter(filters); } private PrintWriter createReportWriter() throws IOException { if (this.reportFile != null) { BufferedWriter bw = Files.newBufferedWriter(this.reportFile.toPath(), StandardCharsets.UTF_8, CREATE, TRUNCATE_EXISTING); return new PrintWriter(bw); } return new PrintWriter(new OutputStreamWriter(System.out)) { @Override public void close() { /* NOP */ } }; } }
package org.jabref.gui; import java.awt.Component; import java.awt.GridBagConstraints; import java.awt.GridBagLayout; import java.awt.Insets; import java.awt.Window; import java.awt.event.ActionEvent; import java.io.File; import java.io.IOException; import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.TimerTask; import javax.swing.AbstractAction; import javax.swing.Action; import javax.swing.Icon; import javax.swing.JComponent; import javax.swing.JLabel; import javax.swing.JMenu; import javax.swing.JOptionPane; import javax.swing.JPanel; import javax.swing.JProgressBar; import javax.swing.KeyStroke; import javax.swing.SwingConstants; import javax.swing.SwingUtilities; import javax.swing.UIManager; import javafx.application.Platform; import javafx.beans.value.ChangeListener; import javafx.beans.value.ObservableValue; import javafx.collections.ListChangeListener; import javafx.scene.Node; import javafx.scene.control.Button; import javafx.scene.control.Menu; import javafx.scene.control.MenuBar; import javafx.scene.control.SeparatorMenuItem; import javafx.scene.control.SplitPane; import javafx.scene.control.Tab; import javafx.scene.control.TabPane; import javafx.scene.control.ToolBar; import javafx.scene.control.Tooltip; import javafx.scene.input.KeyEvent; import javafx.scene.layout.BorderPane; import javafx.scene.layout.HBox; import javafx.scene.layout.Pane; import javafx.scene.layout.Priority; import javafx.stage.Stage; import org.jabref.Globals; import org.jabref.JabRefExecutorService; import org.jabref.gui.actions.ActionFactory; import org.jabref.gui.actions.Actions; import org.jabref.gui.actions.AutoLinkFilesAction; import org.jabref.gui.actions.BibtexKeyPatternAction; import org.jabref.gui.actions.ConnectToSharedDatabaseCommand; import org.jabref.gui.actions.CopyFilesAction; import org.jabref.gui.actions.CustomizeEntryAction; import org.jabref.gui.actions.CustomizeKeyBindingAction; import org.jabref.gui.actions.EditExternalFileTypesAction; import org.jabref.gui.actions.ErrorConsoleAction; import org.jabref.gui.actions.FindUnlinkedFilesAction; import org.jabref.gui.actions.IntegrityCheckAction; import org.jabref.gui.actions.LibraryPropertiesAction; import org.jabref.gui.actions.LookupIdentifierAction; import org.jabref.gui.actions.ManageCustomExportsAction; import org.jabref.gui.actions.ManageCustomImportsAction; import org.jabref.gui.actions.ManageJournalsAction; import org.jabref.gui.actions.ManageKeywordsAction; import org.jabref.gui.actions.ManageProtectedTermsAction; import org.jabref.gui.actions.MassSetFieldAction; import org.jabref.gui.actions.MergeEntriesAction; import org.jabref.gui.actions.MnemonicAwareAction; import org.jabref.gui.actions.NewDatabaseAction; import org.jabref.gui.actions.NewEntryAction; import org.jabref.gui.actions.NewEntryFromPlainTextAction; import org.jabref.gui.actions.NewSubLibraryAction; import org.jabref.gui.actions.OldDatabaseCommandWrapper; import org.jabref.gui.actions.OpenBrowserAction; import org.jabref.gui.actions.SearchForUpdateAction; import org.jabref.gui.actions.SetupGeneralFieldsAction; import org.jabref.gui.actions.ShowDocumentViewerAction; import org.jabref.gui.actions.ShowPreferencesAction; import org.jabref.gui.actions.SimpleCommand; import org.jabref.gui.actions.StandardActions; import org.jabref.gui.dialogs.AutosaveUIManager; import org.jabref.gui.exporter.ExportCommand; import org.jabref.gui.exporter.SaveAllAction; import org.jabref.gui.exporter.SaveDatabaseAction; import org.jabref.gui.externalfiletype.ExternalFileTypes; import org.jabref.gui.help.AboutAction; import org.jabref.gui.help.HelpAction; import org.jabref.gui.importer.ImportCommand; import org.jabref.gui.importer.ImportInspectionDialog; import org.jabref.gui.importer.actions.OpenDatabaseAction; import org.jabref.gui.keyboard.KeyBinding; import org.jabref.gui.menus.FileHistoryMenu; import org.jabref.gui.push.PushToApplicationButton; import org.jabref.gui.push.PushToApplications; import org.jabref.gui.search.GlobalSearchBar; import org.jabref.gui.specialfields.SpecialFieldValueViewModel; import org.jabref.gui.undo.CountingUndoManager; import org.jabref.gui.util.DefaultTaskExecutor; import org.jabref.logic.autosaveandbackup.AutosaveManager; import org.jabref.logic.autosaveandbackup.BackupManager; import org.jabref.logic.importer.IdFetcher; import org.jabref.logic.importer.OutputPrinter; import org.jabref.logic.importer.ParserResult; import org.jabref.logic.importer.WebFetchers; import org.jabref.logic.l10n.Localization; import org.jabref.logic.search.SearchQuery; import org.jabref.logic.undo.AddUndoableActionEvent; import org.jabref.logic.undo.UndoChangeEvent; import org.jabref.logic.undo.UndoRedoEvent; import org.jabref.logic.util.OS; import org.jabref.logic.util.io.FileUtil; import org.jabref.model.database.BibDatabaseContext; import org.jabref.model.database.BibDatabaseMode; import org.jabref.model.database.shared.DatabaseLocation; import org.jabref.model.entry.BibEntry; import org.jabref.model.entry.BiblatexEntryTypes; import org.jabref.model.entry.BibtexEntryTypes; import org.jabref.model.entry.FieldName; import org.jabref.model.entry.specialfields.SpecialField; import org.jabref.preferences.JabRefPreferences; import org.jabref.preferences.LastFocusedTabPreferences; import org.jabref.preferences.SearchPreferences; import com.google.common.eventbus.Subscribe; import org.eclipse.fx.ui.controls.tabpane.DndTabPane; import org.eclipse.fx.ui.controls.tabpane.DndTabPaneFactory; import org.fxmisc.easybind.EasyBind; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import osx.macadapter.MacAdapter; /** * The main window of the application. */ public class JabRefFrame extends BorderPane implements OutputPrinter { // Frame titles. public static final String FRAME_TITLE = "JabRef"; private static final Logger LOGGER = LoggerFactory.getLogger(JabRefFrame.class); private final SplitPane splitPane = new SplitPane(); private final JabRefPreferences prefs = Globals.prefs; private final GlobalSearchBar globalSearchBar = new GlobalSearchBar(this); private final JLabel statusLine = new JLabel("", SwingConstants.LEFT); private final JLabel statusLabel = new JLabel( Localization.lang("Status") + ':', SwingConstants.LEFT); private final JProgressBar progressBar = new JProgressBar(); private final FileHistoryMenu fileHistory = new FileHistoryMenu(prefs, this); // Here we instantiate menu/toolbar actions. Actions regarding // the currently open database are defined as a GeneralAction // with a unique command string. This causes the appropriate // BasePanel's runCommand() method to be called with that command. // Note: GeneralAction's constructor automatically gets translations // for the name and message strings. private final AbstractAction toggleRelevance = new GeneralAction( new SpecialFieldValueViewModel(SpecialField.RELEVANCE.getValues().get(0)).getCommand(), new SpecialFieldValueViewModel(SpecialField.RELEVANCE.getValues().get(0)).getMenuString(), new SpecialFieldValueViewModel(SpecialField.RELEVANCE.getValues().get(0)).getToolTipText(), IconTheme.JabRefIcons.RELEVANCE.getIcon()); private final AbstractAction toggleQualityAssured = new GeneralAction( new SpecialFieldValueViewModel(SpecialField.QUALITY.getValues().get(0)).getCommand(), new SpecialFieldValueViewModel(SpecialField.QUALITY.getValues().get(0)).getMenuString(), new SpecialFieldValueViewModel(SpecialField.QUALITY.getValues().get(0)).getToolTipText(), IconTheme.JabRefIcons.QUALITY_ASSURED.getIcon()); private final AbstractAction togglePrinted = new GeneralAction( new SpecialFieldValueViewModel(SpecialField.PRINTED.getValues().get(0)).getCommand(), new SpecialFieldValueViewModel(SpecialField.PRINTED.getValues().get(0)).getMenuString(), new SpecialFieldValueViewModel(SpecialField.PRINTED.getValues().get(0)).getToolTipText(), IconTheme.JabRefIcons.PRINTED.getIcon()); // Lists containing different subsets of actions for different purposes private final List<Object> specialFieldButtons = new LinkedList<>(); private final List<Object> openDatabaseOnlyActions = new LinkedList<>(); private final List<Object> severalDatabasesOnlyActions = new LinkedList<>(); private final List<Object> openAndSavedDatabasesOnlyActions = new LinkedList<>(); private final List<Object> sharedDatabaseOnlyActions = new LinkedList<>(); private final List<Object> noSharedDatabaseActions = new LinkedList<>(); private final List<Object> oneEntryOnlyActions = new LinkedList<>(); private final List<Object> oneEntryWithFileOnlyActions = new LinkedList<>(); private final List<Object> oneEntryWithURLorDOIOnlyActions = new LinkedList<>(); private final List<Object> twoEntriesOnlyActions = new LinkedList<>(); private final List<Object> atLeastOneEntryActions = new LinkedList<>(); private final Stage mainStage; // The sidepane manager takes care of populating the sidepane. private SidePaneManager sidePaneManager; private TabPane tabbedPane; private PushToApplications pushApplications; private final CountingUndoManager undoManager = new CountingUndoManager(); private final DialogService dialogService; private SidePane sidePane; public JabRefFrame(Stage mainStage) { this.mainStage = mainStage; this.dialogService = new FXDialogService(mainStage); init(); } private static JMenu subMenu(String name) { int i = name.indexOf('&'); JMenu res; if (i >= 0) { res = new JMenu(name.substring(0, i) + name.substring(i + 1)); char mnemonic = Character.toUpperCase(name.charAt(i + 1)); res.setMnemonic((int) mnemonic); } else { res = new JMenu(name); } return res; } /** * Takes a list of Object and calls the method setEnabled on them, depending on whether it is an Action or a * Component. * * @param list List that should contain Actions and Components. */ private static void setEnabled(List<Object> list, boolean enabled) { for (Object actionOrComponent : list) { if (actionOrComponent instanceof Action) { ((Action) actionOrComponent).setEnabled(enabled); } if (actionOrComponent instanceof Component) { ((Component) actionOrComponent).setEnabled(enabled); if (actionOrComponent instanceof JPanel) { JPanel root = (JPanel) actionOrComponent; for (int index = 0; index < root.getComponentCount(); index++) { root.getComponent(index).setEnabled(enabled); } } } } } private void init() { sidePaneManager = new SidePaneManager(Globals.prefs, this); sidePane = sidePaneManager.getPane(); Pane containerPane = DndTabPaneFactory.createDefaultDnDPane(DndTabPaneFactory.FeedbackType.MARKER, null); tabbedPane = (DndTabPane) containerPane.getChildren().get(0); initLayout(); initActions(); initKeyBindings(); //setBounds(GraphicsEnvironment.getLocalGraphicsEnvironment().getMaximumWindowBounds()); //WindowLocation pw = new WindowLocation(this, JabRefPreferences.POS_X, JabRefPreferences.POS_Y, JabRefPreferences.SIZE_X, // JabRefPreferences.SIZE_Y); //pw.displayWindowAtStoredLocation(); /* * The following state listener makes sure focus is registered with the * correct database when the user switches tabs. Without this, * cut/paste/copy operations would some times occur in the wrong tab. */ EasyBind.subscribe(tabbedPane.getSelectionModel().selectedItemProperty(), e -> { if (e == null) { Globals.stateManager.activeDatabaseProperty().setValue(Optional.empty()); return; } BasePanel currentBasePanel = getCurrentBasePanel(); if (currentBasePanel == null) { return; } // Poor-mans binding to global state // We need to invoke this in the JavaFX thread as all the listeners sit there Platform.runLater(() -> Globals.stateManager.activeDatabaseProperty().setValue(Optional.of(currentBasePanel.getBibDatabaseContext()))); if (new SearchPreferences(Globals.prefs).isGlobalSearch()) { globalSearchBar.performSearch(); } else { String content = ""; Optional<SearchQuery> currentSearchQuery = currentBasePanel.getCurrentSearchQuery(); if (currentSearchQuery.isPresent()) { content = currentSearchQuery.get().getQuery(); } globalSearchBar.setSearchTerm(content); } currentBasePanel.getPreviewPanel().updateLayout(Globals.prefs.getPreviewPreferences()); // groupSidePane.getToggleCommand().setSelected(sidePaneManager.isComponentVisible(GroupSidePane.class)); //previewToggle.setSelected(Globals.prefs.getPreviewPreferences().isPreviewPanelEnabled()); //generalFetcher.getToggleCommand().setSelected(sidePaneManager.isComponentVisible(GeneralFetcher.class)); //openOfficePanel.getToggleCommand().setSelected(sidePaneManager.isComponentVisible(OpenOfficeSidePanel.class)); // TODO: Can't notify focus listener since it is expecting a swing component //Globals.getFocusListener().setFocused(currentBasePanel.getMainTable()); setWindowTitle(); // Update search autocompleter with information for the correct database: currentBasePanel.updateSearchManager(); currentBasePanel.getUndoManager().postUndoRedoEvent(); currentBasePanel.getMainTable().requestFocus(); }); //Note: The registration of Apple event is at the end of initialization, because //if the events happen too early (ie when the window is not initialized yet), the //opened (double-clicked) documents are not displayed. if (OS.OS_X) { try { new MacAdapter().registerMacEvents(this); } catch (Exception e) { LOGGER.error("Could not interface with Mac OS X methods.", e); } } initShowTrackingNotification(); } private void initKeyBindings() { addEventFilter(KeyEvent.KEY_PRESSED, event -> { Optional<KeyBinding> keyBinding = Globals.getKeyPrefs().mapToKeyBinding(event); if (keyBinding.isPresent()) { switch (keyBinding.get()) { case FOCUS_ENTRY_TABLE: getCurrentBasePanel().getMainTable().requestFocus(); event.consume(); break; case NEXT_LIBRARY: tabbedPane.getSelectionModel().selectNext(); event.consume(); break; case PREVIOUS_LIBRARY: tabbedPane.getSelectionModel().selectPrevious(); event.consume(); break; case INCREASE_TABLE_FONT_SIZE: increaseTableFontSize(); event.consume(); break; case DECREASE_TABLE_FONT_SIZE: decreaseTableFontSize(); event.consume(); break; case DEFAULT_TABLE_FONT_SIZE: setDefaultTableFontSize(); event.consume(); break; default: } } }); } private void initShowTrackingNotification() { if (!Globals.prefs.shouldAskToCollectTelemetry()) { JabRefExecutorService.INSTANCE.submit(new TimerTask() { @Override public void run() { SwingUtilities.invokeLater(() -> { DefaultTaskExecutor.runInJavaFXThread(JabRefFrame.this::showTrackingNotification); }); } }, 60000); // run in one minute } } private Void showTrackingNotification() { if (!Globals.prefs.shouldCollectTelemetry()) { boolean shouldCollect = dialogService.showConfirmationDialogAndWait( Localization.lang("Telemetry: Help make JabRef better"), Localization.lang("To improve the user experience, we would like to collect anonymous statistics on the features you use. We will only record what features you access and how often you do it. We will neither collect any personal data nor the content of bibliographic items. If you choose to allow data collection, you can later disable it via Options -> Preferences -> General."), Localization.lang("Share anonymous statistics"), Localization.lang("Don't share")); Globals.prefs.setShouldCollectTelemetry(shouldCollect); } Globals.prefs.askedToCollectTelemetry(); return null; } public void refreshTitleAndTabs() { DefaultTaskExecutor.runInJavaFXThread(() -> { setWindowTitle(); updateAllTabTitles(); }); } /** * Sets the title of the main window. */ public void setWindowTitle() { BasePanel panel = getCurrentBasePanel(); // no database open if (panel == null) { //setTitle(FRAME_TITLE); return; } String mode = panel.getBibDatabaseContext().getMode().getFormattedName(); String modeInfo = String.format(" (%s)", Localization.lang("%0 mode", mode)); boolean isAutosaveEnabled = Globals.prefs.getBoolean(JabRefPreferences.LOCAL_AUTO_SAVE); if (panel.getBibDatabaseContext().getLocation() == DatabaseLocation.LOCAL) { String changeFlag = panel.isModified() && !isAutosaveEnabled ? "*" : ""; String databaseFile = panel.getBibDatabaseContext() .getDatabaseFile() .map(File::getPath) .orElse(GUIGlobals.UNTITLED_TITLE); //setTitle(FRAME_TITLE + " - " + databaseFile + changeFlag + modeInfo); } else if (panel.getBibDatabaseContext().getLocation() == DatabaseLocation.SHARED) { //setTitle(FRAME_TITLE + " - " + panel.getBibDatabaseContext().getDBMSSynchronizer().getDBName() + " [" // + Localization.lang("shared") + "]" + modeInfo); } } /** * The MacAdapter calls this method when a "BIB" file has been double-clicked from the Finder. */ public void openAction(String filePath) { Path file = Paths.get(filePath); // all the logic is done in openIt. Even raising an existing panel getOpenDatabaseAction().openFile(file, true); } /** * The MacAdapter calls this method when "About" is selected from the application menu. */ public void about() { HelpAction.getCommand().execute(); } public JabRefPreferences prefs() { return prefs; } /** * Tears down all things started by JabRef * <p> * FIXME: Currently some threads remain and therefore hinder JabRef to be closed properly * * @param filenames the filenames of all currently opened files - used for storing them if prefs openLastEdited is set to true */ private void tearDownJabRef(List<String> filenames) { Globals.stopBackgroundTasks(); Globals.shutdownThreadPools(); //dispose(); //prefs.putBoolean(JabRefPreferences.WINDOW_MAXIMISED, getExtendedState() == Frame.MAXIMIZED_BOTH); if (prefs.getBoolean(JabRefPreferences.OPEN_LAST_EDITED)) { // Here we store the names of all current files. If // there is no current file, we remove any // previously stored filename. if (filenames.isEmpty()) { prefs.remove(JabRefPreferences.LAST_EDITED); } else { prefs.putStringList(JabRefPreferences.LAST_EDITED, filenames); File focusedDatabase = getCurrentBasePanel().getBibDatabaseContext().getDatabaseFile().orElse(null); new LastFocusedTabPreferences(prefs).setLastFocusedTab(focusedDatabase); } } fileHistory.storeHistory(); prefs.customExports.store(Globals.prefs); prefs.customImports.store(); prefs.flush(); // dispose all windows, even if they are not displayed anymore for (Window window : Window.getWindows()) { window.dispose(); } } /** * General info dialog. The MacAdapter calls this method when "Quit" * is selected from the application menu, Cmd-Q is pressed, or "Quit" is selected from the Dock. * The function returns a boolean indicating if quitting is ok or not. * <p> * Non-OSX JabRef calls this when choosing "Quit" from the menu * <p> * SIDE EFFECT: tears down JabRef * * @return true if the user chose to quit; false otherwise */ public boolean quit() { // Ask here if the user really wants to close, if the base // has not been saved since last save. boolean close = true; List<String> filenames = new ArrayList<>(); for (int i = 0; i < tabbedPane.getTabs().size(); i++) { BibDatabaseContext context = getBasePanelAt(i).getBibDatabaseContext(); if (getBasePanelAt(i).isModified() && (context.getLocation() == DatabaseLocation.LOCAL)) { tabbedPane.getSelectionModel().select(i); String filename = context.getDatabaseFile().map(File::getAbsolutePath).orElse(GUIGlobals.UNTITLED_TITLE); int answer = showSaveDialog(filename); if ((answer == JOptionPane.CANCEL_OPTION) || (answer == JOptionPane.CLOSED_OPTION)) { return false; } if (answer == JOptionPane.YES_OPTION) { // The user wants to save. try { //getCurrentBasePanel().runCommand("save"); SaveDatabaseAction saveAction = new SaveDatabaseAction(getCurrentBasePanel()); saveAction.runCommand(); if (saveAction.isCanceled() || !saveAction.isSuccess()) { // The action was either canceled or unsuccessful. // Break! output(Localization.lang("Unable to save library")); close = false; } } catch (Throwable ex) { // Something prevented the file // from being saved. Break!!! close = false; break; } } } else if (context.getLocation() == DatabaseLocation.SHARED) { context.convertToLocalDatabase(); context.getDBMSSynchronizer().closeSharedDatabase(); context.clearDBMSSynchronizer(); } AutosaveManager.shutdown(context); BackupManager.shutdown(context); context.getDatabaseFile().map(File::getAbsolutePath).ifPresent(filenames::add); } if (close) { for (int i = 0; i < tabbedPane.getTabs().size(); i++) { if (getBasePanelAt(i).isSaving()) { // There is a database still being saved, so we need to wait. WaitForSaveOperation w = new WaitForSaveOperation(this); w.show(); // This method won't return until canceled or the save operation is done. if (w.canceled()) { return false; // The user clicked cancel. } } } tearDownJabRef(filenames); return true; } return false; } private void initLayout() { setProgressBarVisible(false); pushApplications = new PushToApplications(this.getDialogService()); BorderPane head = new BorderPane(); head.setTop(createMenu()); head.setCenter(createToolbar()); setTop(head); SplitPane.setResizableWithParent(sidePane, Boolean.FALSE); splitPane.getItems().addAll(sidePane, tabbedPane); // We need to wait with setting the divider since it gets reset a few times during the initial set-up mainStage.showingProperty().addListener(new ChangeListener<Boolean>() { @Override public void changed(ObservableValue<? extends Boolean> observable, Boolean oldValue, Boolean showing) { if (showing) { setDividerPosition(); EasyBind.subscribe(sidePane.visibleProperty(), visible -> { if (visible) { if (!splitPane.getItems().contains(sidePane)) { splitPane.getItems().add(0, sidePane); setDividerPosition(); } } else { splitPane.getItems().remove(sidePane); } }); mainStage.showingProperty().removeListener(this); observable.removeListener(this); } } }); setCenter(splitPane); UIManager.put("TabbedPane.contentBorderInsets", new Insets(0, 0, 0, 0)); GridBagLayout gbl = new GridBagLayout(); GridBagConstraints con = new GridBagConstraints(); con.fill = GridBagConstraints.BOTH; con.anchor = GridBagConstraints.WEST; JPanel status = new JPanel(); status.setLayout(gbl); con.weighty = 0; con.weightx = 0; con.gridwidth = 1; con.insets = new Insets(0, 2, 0, 0); gbl.setConstraints(statusLabel, con); status.add(statusLabel); con.weightx = 1; con.insets = new Insets(0, 4, 0, 0); con.gridwidth = 1; gbl.setConstraints(statusLine, con); status.add(statusLine); con.weightx = 0; con.gridwidth = GridBagConstraints.REMAINDER; con.insets = new Insets(2, 4, 2, 2); gbl.setConstraints(progressBar, con); status.add(progressBar); statusLabel.setForeground(GUIGlobals.ENTRY_EDITOR_LABEL_COLOR.darker()); } private void setDividerPosition() { splitPane.setDividerPositions(prefs.getDouble(JabRefPreferences.SIDE_PANE_WIDTH)); if (!splitPane.getDividers().isEmpty()) { EasyBind.subscribe(splitPane.getDividers().get(0).positionProperty(), position -> prefs.putDouble(JabRefPreferences.SIDE_PANE_WIDTH, position.doubleValue())); } } private Node createToolbar() { Pane leftSpacer = new Pane(); HBox.setHgrow(leftSpacer, Priority.SOMETIMES); Pane rightSpacer = new Pane(); HBox.setHgrow(rightSpacer, Priority.SOMETIMES); ActionFactory factory = new ActionFactory(Globals.getKeyPrefs()); Button newLibrary; if (Globals.prefs.getBoolean(JabRefPreferences.BIBLATEX_DEFAULT_MODE)) { newLibrary = factory.createIconButton(StandardActions.NEW_LIBRARY_BIBLATEX, new NewDatabaseAction(this, BibDatabaseMode.BIBLATEX)); } else { newLibrary = factory.createIconButton(StandardActions.NEW_LIBRARY_BIBTEX, new NewDatabaseAction(this, BibDatabaseMode.BIBTEX)); } HBox leftSide = new HBox( newLibrary, factory.createIconButton(StandardActions.OPEN_LIBRARY, new OpenDatabaseAction(this)), factory.createIconButton(StandardActions.SAVE_LIBRARY, new OldDatabaseCommandWrapper(Actions.SAVE, this, Globals.stateManager)), leftSpacer); leftSide.setMinWidth(100); leftSide.prefWidthProperty().bind(sidePane.widthProperty()); leftSide.maxWidthProperty().bind(sidePane.widthProperty()); PushToApplicationButton pushToExternal = new PushToApplicationButton(this, pushApplications.getApplications()); HBox rightSide = new HBox ( factory.createIconButton(StandardActions.NEW_ENTRY, new NewEntryAction(this, BiblatexEntryTypes.ARTICLE)), factory.createIconButton(StandardActions.DELETE_ENTRY, new OldDatabaseCommandWrapper(Actions.DELETE, this, Globals.stateManager)), factory.createIconButton(StandardActions.UNDO, new OldDatabaseCommandWrapper(Actions.UNDO, this, Globals.stateManager)), factory.createIconButton(StandardActions.REDO, new OldDatabaseCommandWrapper(Actions.REDO, this, Globals.stateManager)), factory.createIconButton(StandardActions.CUT, new OldDatabaseCommandWrapper(Actions.CUT, this, Globals.stateManager)), factory.createIconButton(StandardActions.COPY, new OldDatabaseCommandWrapper(Actions.COPY, this, Globals.stateManager)), factory.createIconButton(StandardActions.PASTE, new OldDatabaseCommandWrapper(Actions.PASTE, this, Globals.stateManager)), factory.createIconButton(StandardActions.CLEANUP_ENTRIES, new OldDatabaseCommandWrapper(Actions.CLEANUP, this, Globals.stateManager)), factory.createIconButton(pushToExternal.getMenuAction(), pushToExternal), factory.createIconButton(StandardActions.FORK_ME, new OpenBrowserAction("https://github.com/JabRef/jabref")), factory.createIconButton(StandardActions.OPEN_FACEBOOK, new OpenBrowserAction("https: factory.createIconButton(StandardActions.OPEN_TWITTER, new OpenBrowserAction("https://twitter.com/jabref_org")) ); HBox.setHgrow(globalSearchBar, Priority.ALWAYS); ToolBar toolBar = new ToolBar( leftSide, globalSearchBar, rightSpacer, rightSide ); toolBar.getStyleClass().add("mainToolbar"); return toolBar; } /** * Returns the indexed BasePanel. * * @param i Index of base */ public BasePanel getBasePanelAt(int i) { return (BasePanel) tabbedPane.getTabs().get(i).getContent(); } /** * Returns a list of BasePanel. * */ public List<BasePanel> getBasePanelList() { List<BasePanel> returnList = new ArrayList<>(); for (int i = 0; i < getBasePanelCount(); i++) { returnList.add(getBasePanelAt(i)); } return returnList; } public void showBasePanelAt(int i) { tabbedPane.getSelectionModel().select(i); } public void showBasePanel(BasePanel bp) { tabbedPane.getSelectionModel().select(getTab(bp)); } /** * Returns the currently viewed BasePanel. */ public BasePanel getCurrentBasePanel() { if ((tabbedPane == null) || (tabbedPane.getSelectionModel().getSelectedItem() == null)) { return null; } return (BasePanel) tabbedPane.getSelectionModel().getSelectedItem().getContent(); } /** * @return the BasePanel count. */ public int getBasePanelCount() { return tabbedPane.getTabs().size(); } private Tab getTab(BasePanel comp) { for (Tab tab : tabbedPane.getTabs()) { if (tab.getContent() == comp) { return tab; } } return null; } /** * @deprecated do not operate on tabs but on BibDatabaseContexts */ @Deprecated public TabPane getTabbedPane() { return tabbedPane; } public void setTabTitle(BasePanel comp, String title, String toolTip) { DefaultTaskExecutor.runInJavaFXThread(() -> { Tab tab = getTab(comp); tab.setText(title); tab.setTooltip(new Tooltip(toolTip)); }); } /** * JavaFX Menus * @return Menubar */ private MenuBar createMenu() { ActionFactory factory = new ActionFactory(Globals.getKeyPrefs()); Menu file = new Menu(Localization.menuTitle("File")); Menu edit = new Menu(Localization.menuTitle("Edit")); Menu library = new Menu(Localization.lang("Library")); Menu quality = new Menu(Localization.menuTitle("Quality")); Menu view = new Menu(Localization.menuTitle("View")); Menu tools = new Menu(Localization.menuTitle("Tools")); Menu options = new Menu(Localization.menuTitle("Options")); Menu help = new Menu(Localization.menuTitle("Help")); file.getItems().addAll( factory.createMenuItem(StandardActions.NEW_LIBRARY_BIBTEX, new NewDatabaseAction(this, BibDatabaseMode.BIBTEX)), factory.createMenuItem(StandardActions.NEW_LIBRARY_BIBLATEX, new NewDatabaseAction(this, BibDatabaseMode.BIBLATEX)), factory.createMenuItem(StandardActions.OPEN_LIBRARY, getOpenDatabaseAction()), factory.createMenuItem(StandardActions.SAVE_LIBRARY, new OldDatabaseCommandWrapper(Actions.SAVE, this, Globals.stateManager)), factory.createMenuItem(StandardActions.SAVE_LIBRARY_AS, new OldDatabaseCommandWrapper(Actions.SAVE_AS, this, Globals.stateManager)), factory.createMenuItem(StandardActions.SAVE_ALL, new SaveAllAction(this)), factory.createSubMenu(StandardActions.IMPORT_EXPORT, factory.createMenuItem(StandardActions.MERGE_DATABASE, new OldDatabaseCommandWrapper(Actions.MERGE_DATABASE, this, Globals.stateManager)), // TODO: merge with import factory.createMenuItem(StandardActions.IMPORT_INTO_CURRENT_LIBRARY, new ImportCommand(this, true)), factory.createMenuItem(StandardActions.IMPORT_INTO_NEW_LIBRARY, new ImportCommand(this, false)), factory.createMenuItem(StandardActions.EXPORT_ALL, new ExportCommand(this, false)), factory.createMenuItem(StandardActions.EXPORT_SELECTED, new ExportCommand(this, true)), factory.createMenuItem(StandardActions.SAVE_SELECTED_AS_PLAIN_BIBTEX, new OldDatabaseCommandWrapper(Actions.SAVE_SELECTED_AS_PLAIN, this, Globals.stateManager)) ), new SeparatorMenuItem(), factory.createMenuItem(StandardActions.CONNECT_TO_SHARED_DB, new ConnectToSharedDatabaseCommand(this)), factory.createMenuItem(StandardActions.PULL_CHANGES_FROM_SHARED_DB, new OldDatabaseCommandWrapper(Actions.PULL_CHANGES_FROM_SHARED_DATABASE, this, Globals.stateManager)), new SeparatorMenuItem(), fileHistory, new SeparatorMenuItem(), factory.createMenuItem(StandardActions.CLOSE_LIBRARY, new CloseDatabaseAction()), factory.createMenuItem(StandardActions.QUIT, new CloseAction()) ); edit.getItems().addAll( factory.createMenuItem(StandardActions.UNDO, new OldDatabaseCommandWrapper(Actions.UNDO, this, Globals.stateManager)), factory.createMenuItem(StandardActions.REDO, new OldDatabaseCommandWrapper(Actions.REDO, this, Globals.stateManager)), new SeparatorMenuItem(), factory.createMenuItem(StandardActions.CUT, new EditAction(Actions.CUT)), factory.createMenuItem(StandardActions.COPY, new EditAction(Actions.COPY)), factory.createSubMenu(StandardActions.COPY_MORE, factory.createMenuItem(StandardActions.COPY_TITLE, new OldDatabaseCommandWrapper(Actions.COPY_TITLE, this, Globals.stateManager)), factory.createMenuItem(StandardActions.COPY_KEY, new OldDatabaseCommandWrapper(Actions.COPY_KEY, this, Globals.stateManager)), factory.createMenuItem(StandardActions.COPY_CITE_KEY, new OldDatabaseCommandWrapper(Actions.COPY_CITE_KEY, this, Globals.stateManager)), factory.createMenuItem(StandardActions.COPY_KEY_AND_TITLE, new OldDatabaseCommandWrapper(Actions.COPY_KEY_AND_TITLE, this, Globals.stateManager)), factory.createMenuItem(StandardActions.COPY_KEY_AND_LINK, new OldDatabaseCommandWrapper(Actions.COPY_KEY_AND_LINK, this, Globals.stateManager)), factory.createMenuItem(StandardActions.COPY_CITATION_PREVIEW, new OldDatabaseCommandWrapper(Actions.COPY_CITATION_HTML, this, Globals.stateManager)), factory.createMenuItem(StandardActions.EXPORT_SELECTED_TO_CLIPBOARD, new OldDatabaseCommandWrapper(Actions.EXPORT_TO_CLIPBOARD, this, Globals.stateManager))), factory.createMenuItem(StandardActions.PASTE, new EditAction(Actions.PASTE)), new SeparatorMenuItem(), factory.createMenuItem(StandardActions.SEND_AS_EMAIL, new OldDatabaseCommandWrapper(Actions.SEND_AS_EMAIL, this, Globals.stateManager)), new SeparatorMenuItem() ); /* TODO if (Globals.prefs.getBoolean(JabRefPreferences.SPECIALFIELDSENABLED)) { boolean menuitem = false; if (Globals.prefs.getBoolean(JabRefPreferences.SHOWCOLUMN_RANKING)) { rankSubMenu = new JMenu(); // TODO RightClickMenu.createSpecialFieldMenu(rankSubMenu, SpecialField.RANKING, this); edit.add(rankSubMenu); menuitem = true; } if (Globals.prefs.getBoolean(JabRefPreferences.SHOWCOLUMN_RELEVANCE)) { edit.add(toggleRelevance); menuitem = true; } if (Globals.prefs.getBoolean(JabRefPreferences.SHOWCOLUMN_QUALITY)) { edit.add(toggleQualityAssured); menuitem = true; } if (Globals.prefs.getBoolean(JabRefPreferences.SHOWCOLUMN_PRIORITY)) { rankSubMenu = new JMenu(); // TODO RightClickMenu.createSpecialFieldMenu(rankSubMenu, SpecialField.PRIORITY, this); edit.add(rankSubMenu); menuitem = true; } if (Globals.prefs.getBoolean(JabRefPreferences.SHOWCOLUMN_PRINTED)) { edit.add(togglePrinted); menuitem = true; } if (Globals.prefs.getBoolean(JabRefPreferences.SHOWCOLUMN_READ)) { rankSubMenu = new JMenu(); // TODO RightClickMenu.createSpecialFieldMenu(rankSubMenu, SpecialField.READ_STATUS, this); edit.add(rankSubMenu); menuitem = true; } if (menuitem) { edit.addSeparator(); } } */ edit.getItems().addAll( factory.createMenuItem(StandardActions.MANAGE_KEYWORDS, new ManageKeywordsAction(this)), factory.createMenuItem(StandardActions.REPLACE_ALL, new OldDatabaseCommandWrapper(Actions.REPLACE_ALL, this, Globals.stateManager)), factory.createMenuItem(StandardActions.MASS_SET_FIELDS, new MassSetFieldAction(this)) ); library.getItems().addAll( factory.createMenuItem(StandardActions.NEW_ARTICLE, new NewEntryAction(this, BibtexEntryTypes.ARTICLE)), factory.createMenuItem(StandardActions.NEW_ENTRY, new NewEntryAction(this)), factory.createMenuItem(StandardActions.NEW_ENTRY_FROM_PLAINTEX, new NewEntryFromPlainTextAction(this, Globals.prefs.getUpdateFieldPreferences())), new SeparatorMenuItem(), factory.createMenuItem(StandardActions.DELETE_ENTRY, new OldDatabaseCommandWrapper(Actions.DELETE, this, Globals.stateManager)), new SeparatorMenuItem(), factory.createMenuItem(StandardActions.LIBRARY_PROPERTIES, new LibraryPropertiesAction(this)), factory.createMenuItem(StandardActions.EDIT_PREAMBLE, new OldDatabaseCommandWrapper(Actions.EDIT_PREAMBLE, this, Globals.stateManager)), factory.createMenuItem(StandardActions.EDIT_STRINGS, new OldDatabaseCommandWrapper(Actions.EDIT_STRINGS, this, Globals.stateManager)) ); Menu lookupIdentifiers = factory.createSubMenu(StandardActions.LOOKUP_DOC_IDENTIFIER); for (IdFetcher<?> fetcher : WebFetchers.getIdFetchers(Globals.prefs.getImportFormatPreferences())) { LookupIdentifierAction<?> identifierAction = new LookupIdentifierAction<>(this, fetcher); lookupIdentifiers.getItems().add(factory.createMenuItem(identifierAction.getAction(), identifierAction)); } quality.getItems().addAll( factory.createMenuItem(StandardActions.FIND_DUPLICATES, new DuplicateSearch(this)), factory.createMenuItem(StandardActions.MERGE_ENTRIES, new MergeEntriesAction(this)), new SeparatorMenuItem(), factory.createMenuItem(StandardActions.RESOLVE_DUPLICATE_KEYS, new OldDatabaseCommandWrapper(Actions.RESOLVE_DUPLICATE_KEYS, this, Globals.stateManager)), factory.createMenuItem(StandardActions.CHECK_INTEGRITY, new IntegrityCheckAction(this)), factory.createMenuItem(StandardActions.CLEANUP_ENTRIES, new OldDatabaseCommandWrapper(Actions.CLEANUP, this, Globals.stateManager)), factory.createMenuItem(StandardActions.GENERATE_CITE_KEY, new OldDatabaseCommandWrapper(Actions.MAKE_KEY, this, Globals.stateManager)), new SeparatorMenuItem(), factory.createMenuItem(StandardActions.SET_FILE_LINKS, new AutoLinkFilesAction()), factory.createMenuItem(StandardActions.FIND_UNLINKED_FILES, new FindUnlinkedFilesAction(this)), lookupIdentifiers, factory.createMenuItem(StandardActions.DOWNLOAD_FULL_TEXT, new OldDatabaseCommandWrapper(Actions.DOWNLOAD_FULL_TEXT, this, Globals.stateManager)) ); SidePaneComponent webSearch = sidePaneManager.getComponent(SidePaneType.WEB_SEARCH); SidePaneComponent groups = sidePaneManager.getComponent(SidePaneType.GROUPS); SidePaneComponent openOffice = sidePaneManager.getComponent(SidePaneType.OPEN_OFFICE); view.getItems().addAll( factory.createMenuItem(webSearch.getToggleAction(), webSearch.getToggleCommand()), factory.createMenuItem(groups.getToggleAction(), groups.getToggleCommand()), factory.createMenuItem(StandardActions.TOGGLE_PREVIEW, new OldDatabaseCommandWrapper(Actions.TOGGLE_PREVIEW, this, Globals.stateManager)), factory.createMenuItem(StandardActions.EDIT_ENTRY, new OldDatabaseCommandWrapper(Actions.EDIT, this, Globals.stateManager)), factory.createMenuItem(StandardActions.SHOW_PDV_VIEWER, new ShowDocumentViewerAction()), new SeparatorMenuItem(), factory.createMenuItem(StandardActions.SELECT_ALL, new OldDatabaseCommandWrapper(Actions.SELECT_ALL, this, Globals.stateManager)), new SeparatorMenuItem(), factory.createMenuItem(StandardActions.NEXT_PREVIEW_STYLE, new OldDatabaseCommandWrapper(Actions.NEXT_PREVIEW_STYLE, this, Globals.stateManager)), factory.createMenuItem(StandardActions.PREVIOUS_PREVIEW_STYLE, new OldDatabaseCommandWrapper(Actions.PREVIOUS_PREVIEW_STYLE, this, Globals.stateManager)) ); PushToApplicationButton pushToExternal = new PushToApplicationButton(this, pushApplications.getApplications()); tools.getItems().addAll( factory.createMenuItem(StandardActions.NEW_SUB_LIBRARY_FROM_AUX, new NewSubLibraryAction(this)), factory.createMenuItem(StandardActions.WRITE_XMP, new OldDatabaseCommandWrapper(Actions.WRITE_XMP, this, Globals.stateManager)), new SeparatorMenuItem(), factory.createMenuItem(openOffice.getToggleAction(), openOffice.getToggleCommand()), factory.createMenuItem(pushToExternal.getMenuAction(), pushToExternal), new SeparatorMenuItem(), factory.createMenuItem(StandardActions.OPEN_FOLDER, new OldDatabaseCommandWrapper(Actions.OPEN_FOLDER, this, Globals.stateManager)), factory.createMenuItem(StandardActions.OPEN_FILE, new OldDatabaseCommandWrapper(Actions.OPEN_EXTERNAL_FILE, this, Globals.stateManager)), factory.createMenuItem(StandardActions.OPEN_URL, new OldDatabaseCommandWrapper(Actions.OPEN_URL, this, Globals.stateManager)), factory.createMenuItem(StandardActions.OPEN_CONSOLE, new OldDatabaseCommandWrapper(Actions.OPEN_CONSOLE, this, Globals.stateManager)), factory.createMenuItem(StandardActions.COPY_LINKED_FILES, new CopyFilesAction(this)), new SeparatorMenuItem(), factory.createMenuItem(StandardActions.ABBREVIATE_ISO, new OldDatabaseCommandWrapper(Actions.ABBREVIATE_ISO, this, Globals.stateManager)), factory.createMenuItem(StandardActions.ABBREVIATE_MEDLINE, new OldDatabaseCommandWrapper(Actions.ABBREVIATE_MEDLINE, this, Globals.stateManager)), factory.createMenuItem(StandardActions.UNABBREVIATE, new OldDatabaseCommandWrapper(Actions.UNABBREVIATE, this, Globals.stateManager)) ); options.getItems().addAll( factory.createMenuItem(StandardActions.SHOW_PREFS, new ShowPreferencesAction(this)), new SeparatorMenuItem(), factory.createMenuItem(StandardActions.SETUP_GENERAL_FIELDS, new SetupGeneralFieldsAction(this)), factory.createMenuItem(StandardActions.MANAGE_CUSTOM_IMPORTS, new ManageCustomImportsAction(this)), factory.createMenuItem(StandardActions.MANAGE_CUSTOM_EXPORTS, new ManageCustomExportsAction(this)), factory.createMenuItem(StandardActions.MANAGE_EXTERNAL_FILETYPES, new EditExternalFileTypesAction()), factory.createMenuItem(StandardActions.MANAGE_JOURNALS, new ManageJournalsAction()), factory.createMenuItem(StandardActions.CUSTOMIZE_KEYBINDING, new CustomizeKeyBindingAction()), factory.createMenuItem(StandardActions.MANAGE_PROTECTED_TERMS, new ManageProtectedTermsAction(this, Globals.protectedTermsLoader)), new SeparatorMenuItem(), factory.createMenuItem(StandardActions.MANAGE_CONTENT_SELECTORS, new OldDatabaseCommandWrapper(Actions.MANAGE_SELECTORS, this, Globals.stateManager)), factory.createMenuItem(StandardActions.CUSTOMIZE_ENTRY_TYPES, new CustomizeEntryAction(this)), factory.createMenuItem(StandardActions.MANAGE_CITE_KEY_PATTERNS, new BibtexKeyPatternAction(this)) ); help.getItems().addAll( factory.createMenuItem(StandardActions.HELP, HelpAction.getCommand()), factory.createMenuItem(StandardActions.OPEN_FORUM, new OpenBrowserAction("http://discourse.jabref.org/")), new SeparatorMenuItem(), factory.createMenuItem(StandardActions.ERROR_CONSOLE, new ErrorConsoleAction()), new SeparatorMenuItem(), factory.createMenuItem(StandardActions.SEARCH_FOR_UPDATES, new SearchForUpdateAction()), factory.createSubMenu(StandardActions.WEB_MENU, factory.createMenuItem(StandardActions.OPEN_WEBPAGE, new OpenBrowserAction("https://jabref.org/")), factory.createMenuItem(StandardActions.OPEN_BLOG, new OpenBrowserAction("https://blog.jabref.org/")), factory.createMenuItem(StandardActions.OPEN_FACEBOOK, new OpenBrowserAction("https: factory.createMenuItem(StandardActions.OPEN_TWITTER, new OpenBrowserAction("https://twitter.com/jabref_org")), new SeparatorMenuItem(), factory.createMenuItem(StandardActions.FORK_ME, new OpenBrowserAction("https://github.com/JabRef/jabref")), factory.createMenuItem(StandardActions.OPEN_DEV_VERSION_LINK, new OpenBrowserAction("https://builds.jabref.org/master/")), factory.createMenuItem(StandardActions.OPEN_CHANGELOG, new OpenBrowserAction("https://github.com/JabRef/jabref/blob/master/CHANGELOG.md")), new SeparatorMenuItem(), factory.createMenuItem(StandardActions.DONATE, new OpenBrowserAction("https://donations.jabref.org")) ), factory.createMenuItem(StandardActions.ABOUT, new AboutAction()) ); MenuBar menu = new MenuBar(); menu.getStyleClass().add("mainMenu"); menu.getMenus().addAll( file, edit, library, quality, tools, view, options, help); menu.setUseSystemMenuBar(true); return menu; } public void addParserResult(ParserResult pr, boolean focusPanel) { if (pr.toOpenTab()) { // Add the entries to the open tab. BasePanel panel = getCurrentBasePanel(); if (panel == null) { // There is no open tab to add to, so we create a new tab: addTab(pr.getDatabaseContext(), focusPanel); } else { List<BibEntry> entries = new ArrayList<>(pr.getDatabase().getEntries()); addImportedEntries(panel, entries); } } else { // only add tab if DB is not already open Optional<BasePanel> panel = getBasePanelList().stream() .filter(p -> p.getBibDatabaseContext().getDatabaseFile().equals(pr.getFile())) .findFirst(); if (panel.isPresent()) { tabbedPane.getSelectionModel().select(getTab(panel.get())); } else { addTab(pr.getDatabaseContext(), focusPanel); } } } /** * displays the String on the Status Line visible on the bottom of the JabRef mainframe */ public void output(final String s) { SwingUtilities.invokeLater(() -> { statusLine.setText(s); statusLine.repaint(); }); } private void initActions() { /* openDatabaseOnlyActions.clear(); openDatabaseOnlyActions.addAll(Arrays.asList(manageSelectors, mergeDatabaseAction, newSubDatabaseAction, save, copyPreview, saveAs, saveSelectedAs, saveSelectedAsPlain, undo, redo, cut, deleteEntry, copy, paste, mark, markSpecific, unmark, unmarkAll, rankSubMenu, editEntry, selectAll, copyKey, copyCiteKey, copyKeyAndTitle, copyKeyAndLink, editPreamble, editStrings, groupSidePane.getToggleCommand(), makeKeyAction, normalSearch, generalFetcher.getToggleCommand(), mergeEntries, cleanupEntries, exportToClipboard, replaceAll, sendAsEmail, downloadFullText, lookupIdentifiers, writeXmpAction, openOfficePanel.getToggleCommand(), findUnlinkedFiles, addToGroup, removeFromGroup, moveToGroup, autoLinkFile, resolveDuplicateKeys, openUrl, openFolder, openFile, togglePreview, dupliCheck, autoSetFile, newEntryAction, newSpec, customizeAction, plainTextImport, getMassSetField(), getManageKeywords(), pushExternalButton.getMenuAction(), closeDatabaseAction, getNextPreviewStyleAction(), getPreviousPreviewStyleAction(), checkIntegrity, databaseProperties, abbreviateIso, abbreviateMedline, unabbreviate, exportAll, exportSelected, importCurrent, saveAll, focusTable, increaseFontSize, decreseFontSize, defaultFontSize, toggleRelevance, toggleQualityAssured, togglePrinted, pushExternalButton.getComponent())); openDatabaseOnlyActions.addAll(newSpecificEntryAction); openDatabaseOnlyActions.addAll(specialFieldButtons); severalDatabasesOnlyActions.clear(); severalDatabasesOnlyActions.addAll(Arrays .asList(nextTab, prevTab, sortTabs)); openAndSavedDatabasesOnlyActions.addAll(Collections.singletonList(openConsole)); sharedDatabaseOnlyActions.addAll(Collections.singletonList(pullChangesFromSharedDatabase)); noSharedDatabaseActions.addAll(Arrays.asList(save, saveAll)); oneEntryOnlyActions.clear(); oneEntryOnlyActions.addAll(Arrays.asList(editEntry)); oneEntryWithFileOnlyActions.clear(); oneEntryWithFileOnlyActions.addAll(Arrays.asList(openFolder, openFile)); oneEntryWithURLorDOIOnlyActions.clear(); oneEntryWithURLorDOIOnlyActions.addAll(Arrays.asList(openUrl)); twoEntriesOnlyActions.clear(); twoEntriesOnlyActions.addAll(Arrays.asList(mergeEntries)); atLeastOneEntryActions.clear(); atLeastOneEntryActions.addAll(Arrays.asList(downloadFullText, lookupIdentifiers, exportLinkedFiles)); tabbedPane.getTabs().addListener(this::updateEnabledState); */ } /** * Enable or Disable all actions based on the number of open tabs. * <p> * The action that are affected are set in initActions. */ public void updateEnabledState(ListChangeListener.Change<? extends Tab> change) { int tabCount = tabbedPane.getTabs().size(); if (!change.next()) { return; } if (change.wasAdded() || change.wasRemoved()) { setEnabled(openDatabaseOnlyActions, tabCount > 0); setEnabled(severalDatabasesOnlyActions, tabCount > 1); } if (tabCount == 0) { setEnabled(openAndSavedDatabasesOnlyActions, false); setEnabled(sharedDatabaseOnlyActions, false); setEnabled(oneEntryOnlyActions, false); } if (tabCount > 0) { BasePanel current = getCurrentBasePanel(); boolean saved = current.getBibDatabaseContext().getDatabasePath().isPresent(); setEnabled(openAndSavedDatabasesOnlyActions, saved); boolean isShared = current.getBibDatabaseContext().getLocation() == DatabaseLocation.SHARED; setEnabled(sharedDatabaseOnlyActions, isShared); setEnabled(noSharedDatabaseActions, !isShared); boolean oneEntrySelected = current.getSelectedEntries().size() == 1; setEnabled(oneEntryOnlyActions, oneEntrySelected); setEnabled(oneEntryWithFileOnlyActions, isExistFile(current.getSelectedEntries())); setEnabled(oneEntryWithURLorDOIOnlyActions, isExistURLorDOI(current.getSelectedEntries())); boolean twoEntriesSelected = current.getSelectedEntries().size() == 2; setEnabled(twoEntriesOnlyActions, twoEntriesSelected); boolean atLeastOneEntrySelected = !current.getSelectedEntries().isEmpty(); setEnabled(atLeastOneEntryActions, atLeastOneEntrySelected); } } /** * This method causes all open BasePanels to set up their tables * anew. When called from PrefsDialog3, this updates to the new * settings. */ public void setupAllTables() { // This action can be invoked without an open database, so // we have to check if we have one before trying to invoke // methods to execute changes in the preferences. // We want to notify all tabs about the changes to // avoid problems when changing the column set. for (int i = 0; i < tabbedPane.getTabs().size(); i++) { BasePanel bf = getBasePanelAt(i); // Update tables: if (bf.getDatabase() != null) { DefaultTaskExecutor.runInJavaFXThread(bf::setupMainPanel); } } } private List<String> collectDatabaseFilePaths() { List<String> dbPaths = new ArrayList<>(getBasePanelCount()); for (BasePanel basePanel : getBasePanelList()) { try { // db file exists if (basePanel.getBibDatabaseContext().getDatabaseFile().isPresent()) { dbPaths.add(basePanel.getBibDatabaseContext().getDatabaseFile().get().getCanonicalPath()); } else { dbPaths.add(""); } } catch (IOException ex) { LOGGER.error("Invalid database file path: " + ex.getMessage()); } } return dbPaths; } private List<String> getUniquePathParts() { List<String> dbPaths = collectDatabaseFilePaths(); return FileUtil.uniquePathSubstrings(dbPaths); } public void updateAllTabTitles() { List<String> paths = getUniquePathParts(); for (int i = 0; i < getBasePanelCount(); i++) { String uniqPath = paths.get(i); Optional<File> file = getBasePanelAt(i).getBibDatabaseContext().getDatabaseFile(); if (file.isPresent()) { if (!uniqPath.equals(file.get().getName()) && uniqPath.contains(File.separator)) { // remove filename uniqPath = uniqPath.substring(0, uniqPath.lastIndexOf(File.separator)); tabbedPane.getTabs().get(i).setText(getBasePanelAt(i).getTabTitle() + " \u2014 " + uniqPath); } else { // set original filename (again) tabbedPane.getTabs().get(i).setText(getBasePanelAt(i).getTabTitle()); } } else { tabbedPane.getTabs().get(i).setText(getBasePanelAt(i).getTabTitle()); } tabbedPane.getTabs().get(i).setTooltip(new Tooltip(file.map(File::getAbsolutePath).orElse(null))); } } public void addTab(BasePanel basePanel, boolean raisePanel) { DefaultTaskExecutor.runInJavaFXThread(() -> { // add tab Tab newTab = new Tab(basePanel.getTabTitle(), basePanel); tabbedPane.getTabs().add(newTab); // update all tab titles updateAllTabTitles(); if (raisePanel) { tabbedPane.getSelectionModel().select(newTab); } // Register undo/redo listener basePanel.getUndoManager().registerListener(new UndoRedoEventManager()); BibDatabaseContext context = basePanel.getBibDatabaseContext(); if (readyForAutosave(context)) { AutosaveManager autosaver = AutosaveManager.start(context); autosaver.registerListener(new AutosaveUIManager(basePanel)); } BackupManager.start(context); // Track opening trackOpenNewDatabase(basePanel); }); } private void trackOpenNewDatabase(BasePanel basePanel) { Map<String, String> properties = new HashMap<>(); Map<String, Double> measurements = new HashMap<>(); measurements.put("NumberOfEntries", (double) basePanel.getDatabaseContext().getDatabase().getEntryCount()); Globals.getTelemetryClient().ifPresent(client -> client.trackEvent("OpenNewDatabase", properties, measurements)); } public BasePanel addTab(BibDatabaseContext databaseContext, boolean raisePanel) { Objects.requireNonNull(databaseContext); BasePanel bp = new BasePanel(this, BasePanelPreferences.from(Globals.prefs), databaseContext, ExternalFileTypes.getInstance()); addTab(bp, raisePanel); return bp; } private boolean readyForAutosave(BibDatabaseContext context) { return ((context.getLocation() == DatabaseLocation.SHARED) || ((context.getLocation() == DatabaseLocation.LOCAL) && Globals.prefs.getBoolean(JabRefPreferences.LOCAL_AUTO_SAVE))) && context.getDatabaseFile().isPresent(); } /** * This method does the job of adding imported entries into the active * database, or into a new one. It shows the ImportInspectionDialog if * preferences indicate it should be used. Otherwise it imports directly. * @param panel The BasePanel to add to. * @param entries The entries to add. */ private void addImportedEntries(final BasePanel panel, final List<BibEntry> entries) { SwingUtilities.invokeLater(() -> { ImportInspectionDialog diag = new ImportInspectionDialog(JabRefFrame.this, panel, Localization.lang("Import"), false); diag.addEntries(entries); diag.entryListComplete(); diag.setVisible(true); diag.toFront(); }); } public FileHistoryMenu getFileHistory() { return fileHistory; } /** * Set the visibility of the progress bar in the right end of the * status line at the bottom of the frame. * <p> * If not called on the event dispatch thread, this method uses * SwingUtilities.invokeLater() to do the actual operation on the EDT. */ public void setProgressBarVisible(final boolean visible) { if (SwingUtilities.isEventDispatchThread()) { progressBar.setVisible(visible); } else { SwingUtilities.invokeLater(() -> progressBar.setVisible(visible)); } } /** * Sets the current value of the progress bar. * <p> * If not called on the event dispatch thread, this method uses * SwingUtilities.invokeLater() to do the actual operation on the EDT. */ public void setProgressBarValue(final int value) { if (SwingUtilities.isEventDispatchThread()) { progressBar.setValue(value); } else { SwingUtilities.invokeLater(() -> progressBar.setValue(value)); } } /** * Sets the indeterminate status of the progress bar. * <p> * If not called on the event dispatch thread, this method uses * SwingUtilities.invokeLater() to do the actual operation on the EDT. */ public void setProgressBarIndeterminate(final boolean value) { if (SwingUtilities.isEventDispatchThread()) { progressBar.setIndeterminate(value); } else { SwingUtilities.invokeLater(() -> progressBar.setIndeterminate(value)); } } /** * Sets the maximum value of the progress bar. Always call this method * before using the progress bar, to set a maximum value appropriate to * the task at hand. * <p> * If not called on the event dispatch thread, this method uses * SwingUtilities.invokeLater() to do the actual operation on the EDT. */ public void setProgressBarMaximum(final int value) { if (SwingUtilities.isEventDispatchThread()) { progressBar.setMaximum(value); } else { SwingUtilities.invokeLater(() -> progressBar.setMaximum(value)); } } /** * Return a boolean, if the selected entry have file * @param selectEntryList A selected entries list of the current base pane * @return true, if the selected entry contains file. * false, if multiple entries are selected or the selected entry doesn't contains file */ private boolean isExistFile(List<BibEntry> selectEntryList) { if (selectEntryList.size() == 1) { BibEntry selectedEntry = selectEntryList.get(0); return selectedEntry.getField(FieldName.FILE).isPresent(); } return false; } /** * Return a boolean, if the selected entry have url or doi * @param selectEntryList A selected entries list of the current base pane * @return true, if the selected entry contains url or doi. * false, if multiple entries are selected or the selected entry doesn't contains url or doi */ private boolean isExistURLorDOI(List<BibEntry> selectEntryList) { if (selectEntryList.size() == 1) { BibEntry selectedEntry = selectEntryList.get(0); return (selectedEntry.getField(FieldName.URL).isPresent() || selectedEntry.getField(FieldName.DOI).isPresent()); } return false; } @Override public void showMessage(String message, String title, int msgType) { JOptionPane.showMessageDialog(null, message, title, msgType); } @Override public void setStatus(String s) { output(s); } @Override public void showMessage(String message) { JOptionPane.showMessageDialog(null, message); } private int showSaveDialog(String filename) { Object[] options = {Localization.lang("Save changes"), Localization.lang("Discard changes"), Localization.lang("Return to JabRef")}; return JOptionPane.showOptionDialog(null, Localization.lang("Library '%0' has changed.", filename), Localization.lang("Save before closing"), JOptionPane.YES_NO_CANCEL_OPTION, JOptionPane.WARNING_MESSAGE, null, options, options[2]); } private void closeTab(Tab tab) { closeTab(getBasePanel(tab)); } private BasePanel getBasePanel(Tab tab) { return (BasePanel) tab.getContent(); } private void closeTab(BasePanel panel) { // empty tab without database if (panel == null) { return; } BibDatabaseContext context = panel.getBibDatabaseContext(); if (panel.isModified() && (context.getLocation() == DatabaseLocation.LOCAL)) { if (confirmClose(panel)) { removeTab(panel); } } else if (context.getLocation() == DatabaseLocation.SHARED) { context.convertToLocalDatabase(); context.getDBMSSynchronizer().closeSharedDatabase(); context.clearDBMSSynchronizer(); removeTab(panel); } else { removeTab(panel); } AutosaveManager.shutdown(context); BackupManager.shutdown(context); } // Ask if the user really wants to close, if the base has not been saved private boolean confirmClose(BasePanel panel) { boolean close = false; String filename; filename = panel.getBibDatabaseContext() .getDatabaseFile() .map(File::getAbsolutePath) .orElse(GUIGlobals.UNTITLED_TITLE); int answer = showSaveDialog(filename); if (answer == JOptionPane.YES_OPTION) { // The user wants to save. try { SaveDatabaseAction saveAction = new SaveDatabaseAction(panel); saveAction.runCommand(); if (saveAction.isSuccess()) { close = true; } } catch (Throwable ex) { // do not close } } else if (answer == JOptionPane.NO_OPTION) { // discard changes close = true; } return close; } private void removeTab(BasePanel panel) { DefaultTaskExecutor.runInJavaFXThread(() -> { panel.cleanUp(); tabbedPane.getTabs().remove(getTab(panel)); setWindowTitle(); output(Localization.lang("Closed library") + '.'); // update tab titles updateAllTabTitles(); }); } public void closeCurrentTab() { removeTab(getCurrentBasePanel()); } public OpenDatabaseAction getOpenDatabaseAction() { return new OpenDatabaseAction(this); } public String getStatusLineText() { return statusLine.getText(); } public SidePaneManager getSidePaneManager() { return sidePaneManager; } public PushToApplications getPushApplications() { return pushApplications; } public GlobalSearchBar getGlobalSearchBar() { return globalSearchBar; } public CountingUndoManager getUndoManager() { return undoManager; } public DialogService getDialogService() { return dialogService; } private class GeneralAction extends MnemonicAwareAction { private final Actions command; public GeneralAction(Actions command, String text) { this.command = command; putValue(Action.NAME, text); } public GeneralAction(Actions command, String text, String description) { this.command = command; putValue(Action.NAME, text); putValue(Action.SHORT_DESCRIPTION, description); } public GeneralAction(Actions command, String text, Icon icon) { super(icon); this.command = command; putValue(Action.NAME, text); } public GeneralAction(Actions command, String text, String description, Icon icon) { super(icon); this.command = command; putValue(Action.NAME, text); putValue(Action.SHORT_DESCRIPTION, description); } public GeneralAction(Actions command, String text, KeyStroke key) { this.command = command; putValue(Action.NAME, text); putValue(Action.ACCELERATOR_KEY, key); } public GeneralAction(Actions command, String text, String description, KeyStroke key) { this.command = command; putValue(Action.NAME, text); putValue(Action.SHORT_DESCRIPTION, description); putValue(Action.ACCELERATOR_KEY, key); } public GeneralAction(Actions command, String text, String description, KeyStroke key, Icon icon) { super(icon); this.command = command; putValue(Action.NAME, text); putValue(Action.SHORT_DESCRIPTION, description); putValue(Action.ACCELERATOR_KEY, key); } @Override public void actionPerformed(ActionEvent e) { if (tabbedPane.getTabs().size() > 0) { try { getCurrentBasePanel().runCommand(command); } catch (Throwable ex) { LOGGER.error("Problem with executing command: " + command, ex); } } else { LOGGER.info("Action '" + command + "' must be disabled when no database is open."); } } } /** * The action concerned with closing the window. */ private class CloseAction extends SimpleCommand { @Override public void execute() { quit(); Platform.exit(); } } /** * Class for handling general actions; cut, copy and paste. The focused component is * kept track of by Globals.focusListener, and we call the action stored under the * relevant name in its action map. */ private class EditAction extends SimpleCommand { private final Actions command; public EditAction(Actions command) { this.command = command; } @Override public void execute() { LOGGER.debug(Globals.getFocusListener().getFocused().toString()); JComponent source = Globals.getFocusListener().getFocused(); Action action = source.getActionMap().get(command); if (action != null) { action.actionPerformed(new ActionEvent(source, 0, command.name())); } } } private void setDefaultTableFontSize() { GUIGlobals.setFont(Globals.prefs.getIntDefault(JabRefPreferences.FONT_SIZE)); for (BasePanel basePanel : getBasePanelList()) { basePanel.updateTableFont(); } setStatus(Localization.lang("Table font size is %0", String.valueOf(GUIGlobals.currentFont.getSize()))); } private void increaseTableFontSize() { GUIGlobals.setFont(GUIGlobals.currentFont.getSize() + 1); for (BasePanel basePanel : getBasePanelList()) { basePanel.updateTableFont(); } setStatus(Localization.lang("Table font size is %0", String.valueOf(GUIGlobals.currentFont.getSize()))); } private void decreaseTableFontSize() { int currentSize = GUIGlobals.currentFont.getSize(); if (currentSize < 2) { return; } GUIGlobals.setFont(currentSize - 1); for (BasePanel basePanel : getBasePanelList()) { basePanel.updateTableFont(); } setStatus(Localization.lang("Table font size is %0", String.valueOf(GUIGlobals.currentFont.getSize()))); } private class CloseDatabaseAction extends SimpleCommand { @Override public void execute() { closeTab(getCurrentBasePanel()); } } private class UndoRedoEventManager { @Subscribe public void listen(UndoRedoEvent event) { updateTexts(event); JabRefFrame.this.getCurrentBasePanel().updateEntryEditorIfShowing(); } @Subscribe public void listen(AddUndoableActionEvent event) { updateTexts(event); } private void updateTexts(UndoChangeEvent event) { /* TODO SwingUtilities.invokeLater(() -> { undo.putValue(Action.SHORT_DESCRIPTION, event.getUndoDescription()); undo.setEnabled(event.isCanUndo()); redo.putValue(Action.SHORT_DESCRIPTION, event.getRedoDescription()); redo.setEnabled(event.isCanRedo()); }); */ } } }
package com.github.rschmitt.collider; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.stream.Collector; import clojure.lang.IPersistentMap; import clojure.lang.IPersistentSet; import clojure.lang.IPersistentVector; /** * A collection of factory methods to create immutable collections. */ public class Collider { public static <K, V> ClojureMap<K, V> clojureMap() { return ClojureMap.create(); } public static <K, V> ClojureMap<K, V> clojureMap(K key, V value) { return ClojureMap.create(key, value); } public static <K, V> ClojureMap<K, V> clojureMap(K key1, V val1, K key2, V val2) { return ClojureMap.create(key1, val1, key2, val2); } public static <K, V> ClojureMap<K, V> clojureMap(K key1, V val1, K key2, V val2, K key3, V val3) { return ClojureMap.create(key1, val1, key2, val2, key3, val3); } public static <K, V> ClojureMap<K, V> clojureMap(K key1, V val1, K key2, V val2, K key3, V val3, K key4, V val4) { return ClojureMap.create(key1, val1, key2, val2, key3, val3, key4, val4); } public static <K, V> ClojureMap<K, V> clojureMap(K key1, V val1, K key2, V val2, K key3, V val3, K key4, V val4, K key5, V val5) { return ClojureMap.create(key1, val1, key2, val2, key3, val3, key4, val4, key5, val5); } @SafeVarargs public static <T> ClojureList<T> clojureList(T... elements) { return ClojureList.create(elements); } @SafeVarargs public static <T> ClojureSet<T> clojureSet(T... elements) { return ClojureSet.create(elements); } public static <K, V> TransientMap<K, V> transientMap() { ClojureMap<K, V> emptyMap = clojureMap(); return emptyMap.asTransient(); } public static <T> TransientList<T> transientList() { ClojureList<T> emptyList = clojureList(); return emptyList.asTransient(); } public static <T> TransientSet<T> transientSet() { ClojureSet<T> emptySet = clojureSet(); return emptySet.asTransient(); } @SuppressWarnings("unchecked") public static <K, V> ClojureMap<K, V> intoClojureMap(Map<? extends K, ? extends V> map) { if (map instanceof ClojureMap) return (ClojureMap<K, V>) map; if (map instanceof IPersistentMap) return ClojureMap.wrap((IPersistentMap) map); return map.entrySet().stream().collect(ClojureMap.toClojureMap(Entry::getKey, Entry::getValue)); } @SuppressWarnings("unchecked") public static <T> ClojureList<T> intoClojureList(List<? extends T> list) { if (list instanceof ClojureList) return (ClojureList<T>) list; if (list instanceof IPersistentVector) return (ClojureList<T>) ClojureList.wrap((IPersistentVector) list); // Work around an inference bug in some older JDKs Collector<T, TransientList<T>, ClojureList<T>> collector = ClojureList.toClojureList(); return list.stream().collect(collector); } @SuppressWarnings("unchecked") public static <T> ClojureSet<T> intoClojureSet(Set<? extends T> set) { if (set instanceof ClojureSet) return (ClojureSet<T>) set; if (set instanceof IPersistentSet) return (ClojureSet<T>) ClojureSet.wrap((IPersistentSet) set); // Work around an inference bug in some older JDKs Collector<T, TransientSet<T>, ClojureSet<T>> collector = ClojureSet.toClojureSet(); return set.stream().collect(collector); } }
package org.jbake.app; import com.orientechnologies.orient.core.db.ODatabaseRecordThreadLocal; import com.orientechnologies.orient.core.db.document.ODatabaseDocumentPool; import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx; import com.orientechnologies.orient.core.metadata.schema.OClass; import com.orientechnologies.orient.core.metadata.schema.OSchema; import com.orientechnologies.orient.core.metadata.schema.OType; import com.orientechnologies.orient.core.record.impl.ODocument; import com.orientechnologies.orient.core.sql.OCommandSQL; import com.orientechnologies.orient.core.sql.query.OSQLSynchQuery; import java.util.List; import org.jbake.model.DocumentTypes; /** * * @author jdlee */ public class ContentStore { private ODatabaseDocumentTx db; public ContentStore(final String type, String name) { db = new ODatabaseDocumentTx(type + ":" + name); boolean exists = db.exists(); if (!exists) { db.create(); } db = ODatabaseDocumentPool.global().acquire(type + ":" + name, "admin", "admin"); ODatabaseRecordThreadLocal.INSTANCE.set(db); if (!exists) { updateSchema(); } } public final void updateSchema() { OSchema schema = db.getMetadata().getSchema(); for (String docType : DocumentTypes.getDocumentTypes()) { if (schema.getClass(docType) == null) { createDocType(schema, docType); } } if (schema.getClass("Signatures") == null) { // create the sha1 signatures class OClass signatures = schema.createClass("Signatures"); signatures.createProperty("key", OType.STRING).setNotNull(true); signatures.createProperty("sha1", OType.STRING).setNotNull(true); } } public void close() { db.close(); } public void drop() { db.drop(); } public long countClass(String iClassName) { return db.countClass(iClassName); } public List<ODocument> getDocumentStatus(String docType, String uri) { return query("select sha1,rendered from " + docType + " where sourceuri=?", uri); } public List<ODocument> getPublishedPosts() { return getPublishedContent("post"); } public List<ODocument> getPublishedPostsByTag(String tag) { return query("select * from post where status='published' and ? in tags order by date desc", tag); } public List<ODocument> getPublishedPages() { return getPublishedContent("page"); } public List<ODocument> getPublishedContent(String docType) { return query("select * from " + docType + " where status='published' order by date desc"); } public List<ODocument> getAllContent(String docType) { return query("select * from " + docType + " order by date desc"); } public List<ODocument> getAllTagsFromPublishedPosts() { return query("select tags from post where status='published'"); } public List<ODocument> getSignaturesForTemplates() { return query("select sha1 from Signatures where key='templates'"); } public List<ODocument> getUnrenderedContent(String docType) { return query("select * from " + docType + " where rendered=false"); } public void deleteContent(String docType, String uri) { executeCommand("delete from " + docType + " where sourceuri=?", uri); } public void markConentAsRendered(String docType) { executeCommand("update " + docType + " set rendered=true where rendered=false and cached=true"); } public void updateSignatures(String currentTemplatesSignature) { executeCommand("update Signatures set sha1=? where key='templates'", currentTemplatesSignature); } public void deleteAllByDocType(String docType) { executeCommand("delete from " + docType); } public void insertSignature(String currentTemplatesSignature) { executeCommand("insert into Signatures(key,sha1) values('templates',?)", currentTemplatesSignature); } private List<ODocument> query(String sql) { return db.query(new OSQLSynchQuery<ODocument>(sql)); } private List<ODocument> query(String sql, Object... args) { return db.command(new OSQLSynchQuery<ODocument>(sql)).execute(args); } private void executeCommand(String query, Object... args) { db.command(new OCommandSQL(query)).execute(args); } private static void createDocType(final OSchema schema, final String doctype) { OClass page = schema.createClass(doctype); page.createProperty("sha1", OType.STRING).setNotNull(true); page.createProperty("sourceuri", OType.STRING).setNotNull(true); page.createProperty("rendered", OType.BOOLEAN).setNotNull(true); page.createProperty("cached", OType.BOOLEAN).setNotNull(true); // commented out because for some reason index seems to be written // after the database is closed to this triggers an exception //page.createIndex("uriIdx", OClass.INDEX_TYPE.UNIQUE, "uri"); //page.createIndex("renderedIdx", OClass.INDEX_TYPE.NOTUNIQUE, "rendered"); } }
package com.hyperwallet.clientsdk; import com.fasterxml.jackson.core.type.TypeReference; import com.hyperwallet.clientsdk.model.*; import com.hyperwallet.clientsdk.util.HyperwalletApiClient; import com.hyperwallet.clientsdk.util.HyperwalletEncryption; import com.hyperwallet.clientsdk.util.HyperwalletJsonUtil; import org.apache.commons.lang3.StringUtils; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.Date; import java.util.HashMap; import java.util.Locale; import java.util.TimeZone; /** * The Hyperwallet Client */ public class Hyperwallet { public static final String VERSION = "1.4.2"; private final HyperwalletApiClient apiClient; private final String programToken; private final String url; /** * Create Hyperwallet SDK instance * * @param username API key assigned * @param password API Password assigned * @param programToken API program token * @param server API server url * @param hyperwalletEncryption API encryption data */ public Hyperwallet(final String username, final String password, final String programToken, final String server, final HyperwalletEncryption hyperwalletEncryption) { apiClient = new HyperwalletApiClient(username, password, VERSION, hyperwalletEncryption); this.programToken = programToken; this.url = StringUtils.isEmpty(server) ? "https://api.sandbox.hyperwallet.com/rest/v4" : server + "/rest/v4"; } /** * Create Hyperwallet SDK instance * * @param username API key assigned * @param password API Password assigned * @param programToken API program token * @param server API serer url */ public Hyperwallet(final String username, final String password, final String programToken, final String server) { this(username, password, programToken, server, null); } /** * Create Hyperwallet SDK instance * * @param username API key assigned * @param password API password * @param programToken API program token assigned */ public Hyperwallet(final String username, final String password, final String programToken) { this(username, password, programToken, null, null); } /** * Create Hyperwallet SDK instance * * @param username API key assigned * @param password API password * @param programToken API program token assigned * @param hyperwalletEncryption API encryption data */ public Hyperwallet(final String username, final String password, final String programToken, final HyperwalletEncryption hyperwalletEncryption) { this(username, password, programToken, null, hyperwalletEncryption); } /** * Create Hyperwallet SDK instance * * @param username API key assigned * @param password API password */ public Hyperwallet(final String username, final String password) { this(username, password, null); } // Users /** * Create a User * * @param user Hyperwallet user representation * @return HyperwalletUser created User */ public HyperwalletUser createUser(HyperwalletUser user) { if (user == null) { throw new HyperwalletException("User is required"); } if (!StringUtils.isEmpty(user.getToken())) { throw new HyperwalletException("User token may not be present"); } user = copy(user); user.setStatus(null); user.setCreatedOn(null); return apiClient.post(url + "/users", user, HyperwalletUser.class); } /** * Get User * * @param token user account token * @return HyperwalletUser retreived user */ public HyperwalletUser getUser(String token) { if (StringUtils.isEmpty(token)) { throw new HyperwalletException("User token is required"); } return apiClient.get(url + "/users/" + token, HyperwalletUser.class); } /** * Update User * * @param user Hyperwallet User representation object * @return HyperwalletUser updated user object */ public HyperwalletUser updateUser(HyperwalletUser user) { if (user == null) { throw new HyperwalletException("User is required"); } if (StringUtils.isEmpty(user.getToken())) { throw new HyperwalletException("User token is required"); } return apiClient.put(url + "/users/" + user.getToken(), user, HyperwalletUser.class); } /** * List Users * * @return HyperwalletList of HyperwalletUser */ public HyperwalletList<HyperwalletUser> listUsers() { return listUsers(null); } /** * List Users * * @param options List filter option * @return HyperwalletList of HyperwalletUser */ public HyperwalletList<HyperwalletUser> listUsers(HyperwalletPaginationOptions options) { String url = paginate(this.url + "/users", options); return apiClient.get(url, new TypeReference<HyperwalletList<HyperwalletUser>>() { }); } /** * Get User Status Transition * * @param userToken User token * @param statusTransitionToken Status transition token * @return HyperwalletStatusTransition */ public HyperwalletStatusTransition listgetUserStatusTransition(String userToken, String statusTransitionToken) { if (StringUtils.isEmpty(userToken)) { throw new HyperwalletException("User token is required"); } if (StringUtils.isEmpty(statusTransitionToken)) { throw new HyperwalletException("Transition token is required"); } return apiClient.get(url + "/users/" + userToken + "/status-transitions/" + statusTransitionToken, HyperwalletStatusTransition.class); } /** * Create Business Stake Holder * * @param stakeholder Hyperwallet Stakeholder representation * @param userToken String * @return HyperwalletBusinessStakeholder created Stakeholder */ public HyperwalletBusinessStakeholder createBusinessStakeholder(String userToken, HyperwalletBusinessStakeholder stakeholder) { System.out.println("--Business Stakeholder - create"); if (stakeholder == null) { throw new HyperwalletException("Stakeholder is required"); } if (userToken == null) { throw new HyperwalletException("User token may not be present"); } stakeholder = copy(stakeholder); stakeholder.setStatus(null); stakxeholder.setCreatedOn(null); return apiClient.post(url + "/users/"+ userToken + "/business-stakeholders", stakeholder, HyperwalletBusinessStakeholder.class); } /** * Update User * * @param user Hyperwallet User representation object * @return HyperwalletUser updated user object */ public HyperwalletUser updateBusinessStakeholder(HyperwalletBusinessStakeholder stakeholder) { if (user == null) { throw new HyperwalletException("User is required"); } if (StringUtils.isEmpty(user.getToken())) { throw new HyperwalletException("User token is required"); } return apiClient.put(url + "/users/" + user.getToken(), user, HyperwalletUser.class); } /** * Get Authentication Token * * @param token user account token * @return HyperwalletAuthenticationToken retreived authentication token */ public HyperwalletAuthenticationToken getAuthenticationToken(String token) { if (StringUtils.isEmpty(token)) { throw new HyperwalletException("User token is required"); } String urlString = url + "/users/" + token + "/authentication-token"; return apiClient.post(urlString, null, HyperwalletAuthenticationToken.class); } /** * Get User Status Transition * * @param userToken User token * @param statusTransitionToken Status transition token * @return HyperwalletStatusTransition */ public HyperwalletStatusTransition getUserStatusTransition(String userToken, String statusTransitionToken) { if (StringUtils.isEmpty(userToken)) { throw new HyperwalletException("User token is required"); } if (StringUtils.isEmpty(statusTransitionToken)) { throw new HyperwalletException("Transition token is required"); } return apiClient.get(url + "/users/" + userToken + "/status-transitions/" + statusTransitionToken, HyperwalletStatusTransition.class); } /** * List All User Status Transition information * * @param userToken User token * @return HyperwalletList of HyperwalletStatusTransition */ public HyperwalletList<HyperwalletStatusTransition> listUserStatusTransitions(String userToken) { return listUserStatusTransitions(userToken, null); } /** * List Prepaid Card Status Transition information * * @param userToken User token * @param options List filter option * @return HyperwalletList of HyperwalletStatusTransition */ public HyperwalletList<HyperwalletStatusTransition> listUserStatusTransitions(String userToken, HyperwalletPaginationOptions options) { if (StringUtils.isEmpty(userToken)) { throw new HyperwalletException("User token is required"); } String url = paginate(this.url + "/users/" + userToken + "/status-transitions", options); return apiClient.get(url, new TypeReference<HyperwalletList<HyperwalletStatusTransition>>() { }); } // Prepaid Cards /** * Create Prepaid Card * * @param prepaidCard Prepaid Card object to create * @return HyperwalletPrepaidCard Prepaid Card object created */ public HyperwalletPrepaidCard createPrepaidCard(HyperwalletPrepaidCard prepaidCard) { if (prepaidCard == null) { throw new HyperwalletException("Prepaid Card is required"); } if (StringUtils.isEmpty(prepaidCard.getUserToken())) { throw new HyperwalletException("User token is required"); } if (!StringUtils.isEmpty(prepaidCard.getToken())) { throw new HyperwalletException("Prepaid Card token may not be present"); } if (prepaidCard.getType() == null) { prepaidCard.setType(HyperwalletTransferMethod.Type.PREPAID_CARD); } prepaidCard = copy(prepaidCard); prepaidCard.setStatus(null); prepaidCard.setCardType(null); prepaidCard.setCreatedOn(null); prepaidCard.setTransferMethodCountry(null); prepaidCard.setTransferMethodCurrency(null); prepaidCard.setCardNumber(null); prepaidCard.setCardBrand(null); prepaidCard.setDateOfExpiry(null); return apiClient.post(url + "/users/" + prepaidCard.getUserToken() + "/prepaid-cards", prepaidCard, HyperwalletPrepaidCard.class); } /** * Get Prepaid Card * * @param userToken User token assigned * @param prepaidCardToken Prepaid Card token * @return HyperwalletPrepaidCard Prepaid Card */ public HyperwalletPrepaidCard getPrepaidCard(String userToken, String prepaidCardToken) { if (StringUtils.isEmpty(userToken)) { throw new HyperwalletException("User token is required"); } if (StringUtils.isEmpty(prepaidCardToken)) { throw new HyperwalletException("Prepaid Card token is required"); } return apiClient.get(url + "/users/" + userToken + "/prepaid-cards/" + prepaidCardToken, HyperwalletPrepaidCard.class); } /** * Update Prepaid Card * * @param prepaidCard Prepaid Card object to create * @return HyperwalletPrepaidCard Prepaid Card object created */ public HyperwalletPrepaidCard updatePrepaidCard(HyperwalletPrepaidCard prepaidCard) { if (prepaidCard == null) { throw new HyperwalletException("Prepaid Card is required"); } if (StringUtils.isEmpty(prepaidCard.getUserToken())) { throw new HyperwalletException("User token is required"); } if (StringUtils.isEmpty(prepaidCard.getToken())) { throw new HyperwalletException("Prepaid Card token is required"); } return apiClient.put(url + "/users/" + prepaidCard.getUserToken() + "/prepaid-cards/" + prepaidCard.getToken(), prepaidCard, HyperwalletPrepaidCard.class); } /** * List User's Prepaid Card * * @param userToken User token assigned * @return HyperwalletList of HyperwalletPrepaidCard */ public HyperwalletList<HyperwalletPrepaidCard> listPrepaidCards(String userToken) { return listPrepaidCards(userToken, null); } /** * List User's Prepaid Card * * @param userToken User token assigned * @param options List filter option * @return HyperwalletList of HyperwalletPrepaidCard */ public HyperwalletList<HyperwalletPrepaidCard> listPrepaidCards(String userToken, HyperwalletPaginationOptions options) { if (StringUtils.isEmpty(userToken)) { throw new HyperwalletException("User token is required"); } String url = paginate(this.url + "/users/" + userToken + "/prepaid-cards", options); return apiClient.get(url, new TypeReference<HyperwalletList<HyperwalletPrepaidCard>>() { }); } /** * Suspend a prepaid card * * @param userToken User token * @param prepaidCardToken Prepaid card token * @return The status transition */ public HyperwalletStatusTransition suspendPrepaidCard(String userToken, String prepaidCardToken) { return createPrepaidCardStatusTransition(userToken, prepaidCardToken, new HyperwalletStatusTransition(HyperwalletStatusTransition.Status.SUSPENDED)); } /** * Unsuspend a prepaid card * * @param userToken User token * @param prepaidCardToken Prepaid card token * @return The status transition */ public HyperwalletStatusTransition unsuspendPrepaidCard(String userToken, String prepaidCardToken) { return createPrepaidCardStatusTransition(userToken, prepaidCardToken, new HyperwalletStatusTransition(HyperwalletStatusTransition.Status.UNSUSPENDED)); } /** * Mark a prepaid card as lost or stolen * * @param userToken User token * @param prepaidCardToken Prepaid card token * @return The status transition */ public HyperwalletStatusTransition lostOrStolenPrepaidCard(String userToken, String prepaidCardToken) { return createPrepaidCardStatusTransition(userToken, prepaidCardToken, new HyperwalletStatusTransition(HyperwalletStatusTransition.Status.LOST_OR_STOLEN)); } /** * Deactivate a prepaid card * * @param userToken User token * @param prepaidCardToken Prepaid card token * @return The status transition */ public HyperwalletStatusTransition deactivatePrepaidCard(String userToken, String prepaidCardToken) { return createPrepaidCardStatusTransition(userToken, prepaidCardToken, new HyperwalletStatusTransition(HyperwalletStatusTransition.Status.DE_ACTIVATED)); } /** * Lock a prepaid card * * @param userToken User token * @param prepaidCardToken Prepaid card token * @return The status transition */ public HyperwalletStatusTransition lockPrepaidCard(String userToken, String prepaidCardToken) { return createPrepaidCardStatusTransition(userToken, prepaidCardToken, new HyperwalletStatusTransition(HyperwalletStatusTransition.Status.LOCKED)); } /** * Unlock a prepaid card * * @param userToken User token * @param prepaidCardToken Prepaid card token * @return The status transition */ public HyperwalletStatusTransition unlockPrepaidCard(String userToken, String prepaidCardToken) { return createPrepaidCardStatusTransition(userToken, prepaidCardToken, new HyperwalletStatusTransition(HyperwalletStatusTransition.Status.UNLOCKED)); } /** * Create Prepaid Card Status Transition * * @param userToken User token * @param prepaidCardToken Prepaid Card token * @param transition Status transition information * @return HyperwalletStatusTransition new status for Prepaid Card */ public HyperwalletStatusTransition createPrepaidCardStatusTransition(String userToken, String prepaidCardToken, HyperwalletStatusTransition transition) { if (transition == null) { throw new HyperwalletException("Transition is required"); } if (StringUtils.isEmpty(userToken)) { throw new HyperwalletException("User token is required"); } if (StringUtils.isEmpty(prepaidCardToken)) { throw new HyperwalletException("Prepaid Card token is required"); } if (!StringUtils.isEmpty(transition.getToken())) { throw new HyperwalletException("Status Transition token may not be present"); } transition = copy(transition); transition.setCreatedOn(null); transition.setFromStatus(null); transition.setToStatus(null); return apiClient.post(url + "/users/" + userToken + "/prepaid-cards/" + prepaidCardToken + "/status-transitions", transition, HyperwalletStatusTransition.class); } /** * Get Prepaid Card Status Transition * * @param userToken User token * @param prepaidCardToken Prepaid Card token * @param statusTransitionToken Status transition token * @return HyperwalletStatusTransition */ public HyperwalletStatusTransition getPrepaidCardStatusTransition(String userToken, String prepaidCardToken, String statusTransitionToken) { if (StringUtils.isEmpty(userToken)) { throw new HyperwalletException("User token is required"); } if (StringUtils.isEmpty(prepaidCardToken)) { throw new HyperwalletException("Prepaid Card token is required"); } if (StringUtils.isEmpty(statusTransitionToken)) { throw new HyperwalletException("Transition token is required"); } return apiClient.get(url + "/users/" + userToken + "/prepaid-cards/" + prepaidCardToken + "/status-transitions/" + statusTransitionToken, HyperwalletStatusTransition.class); } /** * List All Prepaid Card Status Transition information * * @param userToken User token * @param prepaidCardToken Prepaid Card token * @return HyperwalletList of HyperwalletStatusTransition */ public HyperwalletList<HyperwalletStatusTransition> listPrepaidCardStatusTransitions(String userToken, String prepaidCardToken) { return listPrepaidCardStatusTransitions(userToken, prepaidCardToken, null); } /** * List Prepaid Card Status Transition information * * @param userToken User token * @param prepaidCardToken Prepaid Card token * @param options List filter option * @return HyperwalletList of HyperwalletStatusTransition */ public HyperwalletList<HyperwalletStatusTransition> listPrepaidCardStatusTransitions(String userToken, String prepaidCardToken, HyperwalletPaginationOptions options) { if (StringUtils.isEmpty(userToken)) { throw new HyperwalletException("User token is required"); } if (StringUtils.isEmpty(prepaidCardToken)) { throw new HyperwalletException("Prepaid Card token is required"); } String url = paginate(this.url + "/users/" + userToken + "/prepaid-cards/" + prepaidCardToken + "/status-transitions", options); return apiClient.get(url, new TypeReference<HyperwalletList<HyperwalletStatusTransition>>() { }); } // Bank Cards /** * Create Bank Card * * @param bankCard Bank Card object to create * @return HyperwalletBankCard Bank Card object created */ public HyperwalletBankCard createBankCard(HyperwalletBankCard bankCard) { if (bankCard == null) { throw new HyperwalletException("Bank Card is required"); } if (StringUtils.isEmpty(bankCard.getUserToken())) { throw new HyperwalletException("User token is required"); } if (!StringUtils.isEmpty(bankCard.getToken())) { throw new HyperwalletException("Bank Card token may not be present"); } if (bankCard.getType() == null) { bankCard.setType(HyperwalletTransferMethod.Type.BANK_CARD); } bankCard = copy(bankCard); bankCard.setStatus(null); bankCard.setCardType(null); bankCard.setCreatedOn(null); bankCard.setCardBrand(null); return apiClient.post(url + "/users/" + bankCard.getUserToken() + "/bank-cards", bankCard, HyperwalletBankCard.class); } /** * Get Bank Card * * @param userToken User token assigned * @param bankCardToken Bank Card token * @return HyperwalletBankCard Bank Card */ public HyperwalletBankCard getBankCard(String userToken, String bankCardToken) { if (StringUtils.isEmpty(userToken)) { throw new HyperwalletException("User token is required"); } if (StringUtils.isEmpty(bankCardToken)) { throw new HyperwalletException("Bank Card token is required"); } return apiClient.get(url + "/users/" + userToken + "/bank-cards/" + bankCardToken, HyperwalletBankCard.class); } /** * Update Bank Card * * @param bankCard Bank Card object to create * @return HyperwalletBankCard Bank Card object created */ public HyperwalletBankCard updateBankCard(HyperwalletBankCard bankCard) { if (bankCard == null) { throw new HyperwalletException("Bank Card is required"); } if (StringUtils.isEmpty(bankCard.getUserToken())) { throw new HyperwalletException("User token is required"); } if (StringUtils.isEmpty(bankCard.getToken())) { throw new HyperwalletException("Bank Card token is required"); } return apiClient.put(url + "/users/" + bankCard.getUserToken() + "/bank-cards/" + bankCard.getToken(), bankCard, HyperwalletBankCard.class); } /** * List User's Bank Card * * @param userToken User token assigned * @return HyperwalletList of HyperwalletBankCard */ public HyperwalletList<HyperwalletBankCard> listBankCards(String userToken) { return listBankCards(userToken, null); } /** * List User's Bank Card * * @param userToken User token assigned * @param options List filter option * @return HyperwalletList of HyperwalletBankCard */ public HyperwalletList<HyperwalletBankCard> listBankCards(String userToken, HyperwalletPaginationOptions options) { if (StringUtils.isEmpty(userToken)) { throw new HyperwalletException("User token is required"); } String url = paginate(this.url + "/users/" + userToken + "/bank-cards", options); return apiClient.get(url, new TypeReference<HyperwalletList<HyperwalletBankCard>>() { }); } /** * Deactivate a bank card * * @param userToken User token * @param bankCardToken Bank card token * @return The status transition */ public HyperwalletStatusTransition deactivateBankCard(String userToken, String bankCardToken) { return deactivateBankCard(userToken, bankCardToken, null); } /** * Deactivate a bank card * * @param userToken User token * @param bankCardToken Bank card token * @param notes Comments regarding the status change * @return The status transition */ public HyperwalletStatusTransition deactivateBankCard(String userToken, String bankCardToken, String notes) { return createBankCardStatusTransition(userToken, bankCardToken, new HyperwalletStatusTransition(HyperwalletStatusTransition.Status.DE_ACTIVATED).notes(notes)); } /** * Create Bank Card Status Transition * * @param userToken User token * @param bankCardToken Bank Card token * @param transition Status transition information * @return HyperwalletStatusTransition new status for Bank Card */ public HyperwalletStatusTransition createBankCardStatusTransition(String userToken, String bankCardToken, HyperwalletStatusTransition transition) { if (transition == null) { throw new HyperwalletException("Transition is required"); } if (StringUtils.isEmpty(userToken)) { throw new HyperwalletException("User token is required"); } if (StringUtils.isEmpty(bankCardToken)) { throw new HyperwalletException("Bank Card token is required"); } if (!StringUtils.isEmpty(transition.getToken())) { throw new HyperwalletException("Status Transition token may not be present"); } transition = copy(transition); transition.setCreatedOn(null); transition.setFromStatus(null); transition.setToStatus(null); return apiClient.post(url + "/users/" + userToken + "/bank-cards/" + bankCardToken + "/status-transitions", transition, HyperwalletStatusTransition.class); } /** * Get Bank Card Status Transition * * @param userToken User token * @param bankCardToken Bank Card token * @param statusTransitionToken Status transition token * @return HyperwalletStatusTransition */ public HyperwalletStatusTransition getBankCardStatusTransition(String userToken, String bankCardToken, String statusTransitionToken) { if (StringUtils.isEmpty(userToken)) { throw new HyperwalletException("User token is required"); } if (StringUtils.isEmpty(bankCardToken)) { throw new HyperwalletException("Bank Card token is required"); } if (StringUtils.isEmpty(statusTransitionToken)) { throw new HyperwalletException("Transition token is required"); } return apiClient.get(url + "/users/" + userToken + "/bank-cards/" + bankCardToken + "/status-transitions/" + statusTransitionToken, HyperwalletStatusTransition.class); } /** * List All Bank Card Status Transition information * * @param userToken User token * @param bankCardToken Bank Card token * @return HyperwalletList of HyperwalletStatusTransition */ public HyperwalletList<HyperwalletStatusTransition> listBankCardStatusTransitions(String userToken, String bankCardToken) { return listBankCardStatusTransitions(userToken, bankCardToken, null); } /** * List Bank Card Status Transition information * * @param userToken User token * @param bankCardToken Bank Card token * @param options List filter option * @return HyperwalletList of HyperwalletStatusTransition */ public HyperwalletList<HyperwalletStatusTransition> listBankCardStatusTransitions(String userToken, String bankCardToken, HyperwalletPaginationOptions options) { if (StringUtils.isEmpty(userToken)) { throw new HyperwalletException("User token is required"); } if (StringUtils.isEmpty(bankCardToken)) { throw new HyperwalletException("Bank Card token is required"); } String url = paginate(this.url + "/users/" + userToken + "/bank-cards/" + bankCardToken + "/status-transitions", options); return apiClient.get(url, new TypeReference<HyperwalletList<HyperwalletStatusTransition>>() { }); } // Paper Checks /** * Create Paper Check * * @param paperCheck Paper Check object to create * @return HyperwalletPaperCheck Paper Check object created */ public HyperwalletPaperCheck createPaperCheck(HyperwalletPaperCheck paperCheck) { if (paperCheck == null) { throw new HyperwalletException("Paper Check is required"); } if (StringUtils.isEmpty(paperCheck.getUserToken())) { throw new HyperwalletException("User token is required"); } if (!StringUtils.isEmpty(paperCheck.getToken())) { throw new HyperwalletException("Paper Check token may not be present"); } if (paperCheck.getType() == null) { paperCheck.setType(HyperwalletTransferMethod.Type.PAPER_CHECK); } paperCheck = copy(paperCheck); paperCheck.setStatus(null); paperCheck.setCreatedOn(null); return apiClient.post(url + "/users/" + paperCheck.getUserToken() + "/paper-checks", paperCheck, HyperwalletPaperCheck.class); } /** * Get Paper Check * * @param userToken User token assigned * @param paperCheckToken Paper Check token * @return HyperwalletPaperCheck Paper Check */ public HyperwalletPaperCheck getPaperCheck(String userToken, String paperCheckToken) { if (StringUtils.isEmpty(userToken)) { throw new HyperwalletException("User token is required"); } if (StringUtils.isEmpty(paperCheckToken)) { throw new HyperwalletException("Paper Check token is required"); } return apiClient.get(url + "/users/" + userToken + "/paper-checks/" + paperCheckToken, HyperwalletPaperCheck.class); } /** * Update Paper Check * * @param paperCheck Paper Check object to create * @return HyperwalletPaperCheck Paper Check object created */ public HyperwalletPaperCheck updatePaperCheck(HyperwalletPaperCheck paperCheck) { if (paperCheck == null) { throw new HyperwalletException("Paper Check is required"); } if (StringUtils.isEmpty(paperCheck.getUserToken())) { throw new HyperwalletException("User token is required"); } if (StringUtils.isEmpty(paperCheck.getToken())) { throw new HyperwalletException("Paper Check token is required"); } return apiClient.put(url + "/users/" + paperCheck.getUserToken() + "/paper-checks/" + paperCheck.getToken(), paperCheck, HyperwalletPaperCheck.class); } /** * List User's Paper Check * * @param userToken User token assigned * @return HyperwalletList of HyperwalletPaperCheck */ public HyperwalletList<HyperwalletPaperCheck> listPaperChecks(String userToken) { return listPaperChecks(userToken, null); } /** * List User's Paper Check * * @param userToken User token assigned * @param options List filter option * @return HyperwalletList of HyperwalletPaperCheck */ public HyperwalletList<HyperwalletPaperCheck> listPaperChecks(String userToken, HyperwalletPaginationOptions options) { if (StringUtils.isEmpty(userToken)) { throw new HyperwalletException("User token is required"); } String url = paginate(this.url + "/users/" + userToken + "/paper-checks", options); return apiClient.get(url, new TypeReference<HyperwalletList<HyperwalletPaperCheck>>() { }); } /** * Deactivate a Paper Check * * @param userToken User token * @param paperCheckToken Paper Check token * @return The status transition */ public HyperwalletStatusTransition deactivatePaperCheck(String userToken, String paperCheckToken) { return deactivatePaperCheck(userToken, paperCheckToken, null); } /** * Deactivate a Paper Check * * @param userToken User token * @param paperCheckToken Paper Check token * @return The status transition */ public HyperwalletStatusTransition deactivatePaperCheck(String userToken, String paperCheckToken, String notes) { return createPaperCheckStatusTransition(userToken, paperCheckToken, new HyperwalletStatusTransition(HyperwalletStatusTransition.Status.DE_ACTIVATED).notes(notes)); } /** * Create Paper Check Status Transition * * @param userToken User token * @param paperCheckToken Paper Check token * @param transition Status transition information * @return HyperwalletStatusTransition new status for Paper Check */ public HyperwalletStatusTransition createPaperCheckStatusTransition(String userToken, String paperCheckToken, HyperwalletStatusTransition transition) { if (transition == null) { throw new HyperwalletException("Transition is required"); } if (StringUtils.isEmpty(userToken)) { throw new HyperwalletException("User token is required"); } if (StringUtils.isEmpty(paperCheckToken)) { throw new HyperwalletException("Paper Check token is required"); } if (!StringUtils.isEmpty(transition.getToken())) { throw new HyperwalletException("Status Transition token may not be present"); } transition = copy(transition); transition.setCreatedOn(null); transition.setFromStatus(null); transition.setToStatus(null); return apiClient.post(url + "/users/" + userToken + "/paper-checks/" + paperCheckToken + "/status-transitions", transition, HyperwalletStatusTransition.class); } /** * Get Paper Check Status Transition * * @param userToken User token * @param paperCheckToken Paper Check token * @param statusTransitionToken Status transition token * @return HyperwalletStatusTransition */ public HyperwalletStatusTransition getPaperCheckStatusTransition(String userToken, String paperCheckToken, String statusTransitionToken) { if (StringUtils.isEmpty(userToken)) { throw new HyperwalletException("User token is required"); } if (StringUtils.isEmpty(paperCheckToken)) { throw new HyperwalletException("Paper Check token is required"); } if (StringUtils.isEmpty(statusTransitionToken)) { throw new HyperwalletException("Transition token is required"); } return apiClient.get(url + "/users/" + userToken + "/paper-checks/" + paperCheckToken + "/status-transitions/" + statusTransitionToken, HyperwalletStatusTransition.class); } /** * List All Paper Check Status Transition information * * @param userToken User token * @param paperCheckToken Paper Check token * @return HyperwalletList of HyperwalletStatusTransition */ public HyperwalletList<HyperwalletStatusTransition> listPaperCheckStatusTransitions(String userToken, String paperCheckToken) { return listPaperCheckStatusTransitions(userToken, paperCheckToken, null); } /** * List Paper Check Status Transition information * * @param userToken User token * @param paperCheckToken Paper Check token * @param options List filter option * @return HyperwalletList of HyperwalletStatusTransition */ public HyperwalletList<HyperwalletStatusTransition> listPaperCheckStatusTransitions(String userToken, String paperCheckToken, HyperwalletPaginationOptions options) { if (StringUtils.isEmpty(userToken)) { throw new HyperwalletException("User token is required"); } if (StringUtils.isEmpty(paperCheckToken)) { throw new HyperwalletException("Paper Check token is required"); } String url = paginate(this.url + "/users/" + userToken + "/paper-checks/" + paperCheckToken + "/status-transitions", options); return apiClient.get(url, new TypeReference<HyperwalletList<HyperwalletStatusTransition>>() { }); } // Transfers /** * Create Transfer Request * * @param transfer HyperwalletTransfer object to create * @return HyperwalletTransfer Transfer object created */ public HyperwalletTransfer createTransfer(HyperwalletTransfer transfer) { if (transfer == null) { throw new HyperwalletException("Transfer is required"); } if (StringUtils.isEmpty(transfer.getSourceToken())) { throw new HyperwalletException("Source token is required"); } if (StringUtils.isEmpty(transfer.getDestinationToken())) { throw new HyperwalletException("Destination token is required"); } if (StringUtils.isEmpty(transfer.getClientTransferId())) { throw new HyperwalletException("ClientTransferId is required"); } transfer = copy(transfer); transfer.setStatus(null); transfer.setCreatedOn(null); transfer.setExpiresOn(null); return apiClient.post(url + "/transfers", transfer, HyperwalletTransfer.class); } /** * Get Transfer Request * * @param transferToken Transfer token assigned * @return HyperwalletTransfer Transfer */ public HyperwalletTransfer getTransfer(String transferToken) { if (StringUtils.isEmpty(transferToken)) { throw new HyperwalletException("Transfer token is required"); } return apiClient.get(url + "/transfers/" + transferToken, HyperwalletTransfer.class); } /** * List Transfer Requests * * @param options List filter option * @return HyperwalletList of HyperwalletTransfer */ public HyperwalletList<HyperwalletTransfer> listTransfers(HyperwalletTransferListOptions options) { String url = paginate(this.url + "/transfers", options); if (options != null) { url = addParameter(url, "sourceToken", options.getSourceToken()); url = addParameter(url, "destinationToken", options.getDestinationToken()); } return apiClient.get(url, new TypeReference<HyperwalletList<HyperwalletTransfer>>() { }); } /** * List Transfer Requests * * @return HyperwalletList of HyperwalletTransfer */ public HyperwalletList<HyperwalletTransfer> listTransfers() { return listTransfers(null); } /** * Create Transfer Status Transition * * @param transferToken Transfer token assigned * @return HyperwalletStatusTransition new status for Transfer Request */ public HyperwalletStatusTransition createTransferStatusTransition(String transferToken, HyperwalletStatusTransition transition) { if (transition == null) { throw new HyperwalletException("Transition is required"); } if (StringUtils.isEmpty(transferToken)) { throw new HyperwalletException("Transfer token is required"); } if (!StringUtils.isEmpty(transition.getToken())) { throw new HyperwalletException("Status Transition token may not be present"); } transition = copy(transition); transition.setCreatedOn(null); transition.setFromStatus(null); transition.setToStatus(null); return apiClient.post(url + "/transfers/" + transferToken + "/status-transitions", transition, HyperwalletStatusTransition.class); } // PayPal Accounts /** * Create PayPal Account Request * * @param payPalAccount HyperwalletPayPalAccount object to create * @return HyperwalletPayPalAccount created PayPal account for the specified user */ public HyperwalletPayPalAccount createPayPalAccount(HyperwalletPayPalAccount payPalAccount) { if (payPalAccount == null) { throw new HyperwalletException("PayPal Account is required"); } if (StringUtils.isEmpty(payPalAccount.getUserToken())) { throw new HyperwalletException("User token is required"); } if (StringUtils.isEmpty(payPalAccount.getTransferMethodCountry())) { throw new HyperwalletException("Transfer Method Country is required"); } if (StringUtils.isEmpty(payPalAccount.getTransferMethodCurrency())) { throw new HyperwalletException("Transfer Method Currency is required"); } if (StringUtils.isEmpty(payPalAccount.getEmail())) { throw new HyperwalletException("Email is required"); } if (!StringUtils.isEmpty(payPalAccount.getToken())) { throw new HyperwalletException("PayPal Account token may not be present"); } if (payPalAccount.getType() == null) { payPalAccount.setType(HyperwalletTransferMethod.Type.PAYPAL_ACCOUNT); } payPalAccount = copy(payPalAccount); payPalAccount.setStatus(null); payPalAccount.setCreatedOn(null); return apiClient.post(url + "/users/" + payPalAccount.getUserToken() + "/paypal-accounts", payPalAccount, HyperwalletPayPalAccount.class); } /** * Get PayPal Account Request * * @param userToken User token assigned * @param payPalAccountToken PayPal Account token assigned * @return HyperwalletPayPalAccount PayPal Account */ public HyperwalletPayPalAccount getPayPalAccount(String userToken, String payPalAccountToken) { if (StringUtils.isEmpty(userToken)) { throw new HyperwalletException("User token is required"); } if (StringUtils.isEmpty(payPalAccountToken)) { throw new HyperwalletException("PayPal Account token is required"); } return apiClient.get(url + "/users/" + userToken + "/paypal-accounts/" + payPalAccountToken, HyperwalletPayPalAccount.class); } /** * List PayPal Accounts * * @param userToken User token assigned * @param options List filter option * @return HyperwalletList of HyperwalletPayPalAccount */ public HyperwalletList<HyperwalletPayPalAccount> listPayPalAccounts(String userToken, HyperwalletPaginationOptions options) { if (StringUtils.isEmpty(userToken)) { throw new HyperwalletException("User token is required"); } String url = paginate(this.url + "/users/" + userToken + "/paypal-accounts", options); return apiClient.get(url, new TypeReference<HyperwalletList<HyperwalletPayPalAccount>>() { }); } /** * List PayPal Accounts * * @param userToken User token assigned * @return HyperwalletList of HyperwalletPayPalAccount */ public HyperwalletList<HyperwalletPayPalAccount> listPayPalAccounts(String userToken) { return listPayPalAccounts(userToken, null); } /** * Deactivate PayPal Account * * @param userToken User token * @param payPalAccountToken PayPal Account token * @return HyperwalletStatusTransition deactivated PayPal account */ public HyperwalletStatusTransition deactivatePayPalAccount(String userToken, String payPalAccountToken) { return deactivatePayPalAccount(userToken, payPalAccountToken, null); } /** * Deactivate PayPal Account * * @param userToken User token * @param payPalAccountToken PayPal Account token * @param notes Comments regarding the status change * @return HyperwalletStatusTransition deactivated PayPal account */ public HyperwalletStatusTransition deactivatePayPalAccount(String userToken, String payPalAccountToken, String notes) { return createPayPalAccountStatusTransition(userToken, payPalAccountToken, new HyperwalletStatusTransition(HyperwalletStatusTransition.Status.DE_ACTIVATED).notes(notes)); } /** * Create PayPal Account Status Transition * * @param userToken User token * @param payPalAccountToken PayPal Account token * @param transition Status transition information * @return HyperwalletStatusTransition new status for PayPal Account */ public HyperwalletStatusTransition createPayPalAccountStatusTransition(String userToken, String payPalAccountToken, HyperwalletStatusTransition transition) { if (transition == null) { throw new HyperwalletException("Transition is required"); } if (StringUtils.isEmpty(userToken)) { throw new HyperwalletException("User token is required"); } if (StringUtils.isEmpty(payPalAccountToken)) { throw new HyperwalletException("PayPal Account token is required"); } if (!StringUtils.isEmpty(transition.getToken())) { throw new HyperwalletException("Status Transition token may not be present"); } transition = copy(transition); transition.setCreatedOn(null); transition.setFromStatus(null); transition.setToStatus(null); return apiClient.post(url + "/users/" + userToken + "/paypal-accounts/" + payPalAccountToken + "/status-transitions", transition, HyperwalletStatusTransition.class); } /** * Get PayPal Account Status Transition * * @param userToken User token * @param payPalAccountToken PayPal Account token * @param statusTransitionToken Status transition token * @return HyperwalletStatusTransition */ public HyperwalletStatusTransition getPayPalAccountStatusTransition(String userToken, String payPalAccountToken, String statusTransitionToken) { if (StringUtils.isEmpty(userToken)) { throw new HyperwalletException("User token is required"); } if (StringUtils.isEmpty(payPalAccountToken)) { throw new HyperwalletException("PayPal Account token is required"); } if (StringUtils.isEmpty(statusTransitionToken)) { throw new HyperwalletException("Transition token is required"); } return apiClient.get(url + "/users/" + userToken + "/paypal-accounts/" + payPalAccountToken + "/status-transitions/" + statusTransitionToken, HyperwalletStatusTransition.class); } /** * List All PayPal Account Status Transition information * * @param userToken User token * @param payPalAccountToken PayPal Account token * @return HyperwalletList of HyperwalletStatusTransition */ public HyperwalletList<HyperwalletStatusTransition> listPayPalAccountStatusTransitions(String userToken, String payPalAccountToken) { return listPayPalAccountStatusTransitions(userToken, payPalAccountToken, null); } /** * List PayPal Account Status Transition information * * @param userToken User token * @param payPalAccountToken PayPal Account token * @param options List filter option * @return HyperwalletList of HyperwalletStatusTransition */ public HyperwalletList<HyperwalletStatusTransition> listPayPalAccountStatusTransitions(String userToken, String payPalAccountToken, HyperwalletPaginationOptions options) { if (StringUtils.isEmpty(userToken)) { throw new HyperwalletException("User token is required"); } if (StringUtils.isEmpty(payPalAccountToken)) { throw new HyperwalletException("PayPal Account token is required"); } String url = paginate(this.url + "/users/" + userToken + "/paypal-accounts/" + payPalAccountToken + "/status-transitions", options); return apiClient.get(url, new TypeReference<HyperwalletList<HyperwalletStatusTransition>>() { }); } // Bank Accounts /** * Create Bank Account * * @param bankAccount bank account representation * @return HyperwalletBankAccount created bank account for the specicic user */ public HyperwalletBankAccount createBankAccount(HyperwalletBankAccount bankAccount) { if (bankAccount == null) { throw new HyperwalletException("Bank Account is required"); } if (StringUtils.isEmpty(bankAccount.getUserToken())) { throw new HyperwalletException("User token is required"); } if (!StringUtils.isEmpty(bankAccount.getToken())) { throw new HyperwalletException("Bank Account token may not be present"); } bankAccount = copy(bankAccount); bankAccount.createdOn(null); bankAccount.setStatus(null); return apiClient.post(url + "/users/" + bankAccount.getUserToken() + "/bank-accounts", bankAccount, HyperwalletBankAccount.class); } /** * Get Bank Account * * @param userToken User token assigned * @param transferMethodToken Bank account token assigned * @return HyperwalletBankAccount bank account information */ public HyperwalletBankAccount getBankAccount(String userToken, String transferMethodToken) { if (StringUtils.isEmpty(userToken)) { throw new HyperwalletException("User token is required"); } if (StringUtils.isEmpty(transferMethodToken)) { throw new HyperwalletException("Bank Account token is required"); } return apiClient.get(url + "/users/" + userToken + "/bank-accounts/" + transferMethodToken, HyperwalletBankAccount.class); } /** * Update Bank Account * * @param bankAccount Bank Account to update. * @return HyperwalletBankAccount updated Bank Account */ public HyperwalletBankAccount updateBankAccount(HyperwalletBankAccount bankAccount) { if (bankAccount == null) { throw new HyperwalletException("Bank Account is required"); } if (StringUtils.isEmpty(bankAccount.getUserToken())) { throw new HyperwalletException("User token is required"); } if (StringUtils.isEmpty(bankAccount.getToken())) { throw new HyperwalletException("Bank Account token is required"); } return apiClient.put(url + "/users/" + bankAccount.getUserToken() + "/bank-accounts/" + bankAccount.getToken(), bankAccount, HyperwalletBankAccount.class); } /** * List Bank Accounts * * @param userToken User token assigned * @return HyperwalletList of HyperwalletBankAccount */ public HyperwalletList<HyperwalletBankAccount> listBankAccounts(String userToken) { return listBankAccounts(userToken, null); } /** * List Bank Accounts * * @param userToken User token assigned * @param options List filter option * @return HyperwalletList of HyperwalletBankAccount */ public HyperwalletList<HyperwalletBankAccount> listBankAccounts(String userToken, HyperwalletPaginationOptions options) { if (StringUtils.isEmpty(userToken)) { throw new HyperwalletException("User token is required"); } String url = paginate(this.url + "/users/" + userToken + "/bank-accounts", options); return apiClient.get(url, new TypeReference<HyperwalletList<HyperwalletBankAccount>>() { }); } /** * Deactivate Bank Account * * @param userToken User token * @param bankAccountToken Bank Account token * @return HyperwalletStatusTransition deactivated bank account */ public HyperwalletStatusTransition deactivateBankAccount(String userToken, String bankAccountToken) { return createBankAccountStatusTransition(userToken, bankAccountToken, new HyperwalletStatusTransition(HyperwalletStatusTransition.Status.DE_ACTIVATED)); } /** * Create Bank Account Status Transition * * @param userToken User token * @param bankAccountToken Bank Account token * @param transition Status transition information * @return HyperwalletStatusTransition */ public HyperwalletStatusTransition createBankAccountStatusTransition(String userToken, String bankAccountToken, HyperwalletStatusTransition transition) { if (transition == null) { throw new HyperwalletException("Transition is required"); } if (StringUtils.isEmpty(userToken)) { throw new HyperwalletException("User token is required"); } if (StringUtils.isEmpty(bankAccountToken)) { throw new HyperwalletException("Bank Account token is required"); } if (!StringUtils.isEmpty(transition.getToken())) { throw new HyperwalletException("Status Transition token may not be present"); } transition = copy(transition); transition.setCreatedOn(null); transition.setFromStatus(null); transition.setToStatus(null); return apiClient.post(url + "/users/" + userToken + "/bank-accounts/" + bankAccountToken + "/status-transitions", transition, HyperwalletStatusTransition.class); } /** * List All Bank Account Status Transition * * @param userToken User token * @param bankAccountToken Bank Account token * @return HyperwalletList of HyperwalletStatusTransition */ public HyperwalletList<HyperwalletStatusTransition> listBankAccountStatusTransitions(String userToken, String bankAccountToken) { return listBankAccountStatusTransitions(userToken, bankAccountToken, null); } /** * List Bank Account Status Transition * * @param userToken User token * @param bankAccountToken Bank Account token * @param options List filter option * @return HyperwalletList of HyperwalletStatusTransition */ public HyperwalletList<HyperwalletStatusTransition> listBankAccountStatusTransitions(String userToken, String bankAccountToken, HyperwalletPaginationOptions options) { if (StringUtils.isEmpty(userToken)) { throw new HyperwalletException("User token is required"); } if (StringUtils.isEmpty(bankAccountToken)) { throw new HyperwalletException("Bank Account token is required"); } String url = paginate(this.url + "/users/" + userToken + "/bank-accounts/" + bankAccountToken + "/status-transitions", options); return apiClient.get(url, new TypeReference<HyperwalletList<HyperwalletStatusTransition>>() { }); } // Balances /** * List all User's Balances * * @param userToken User token assigned * @return HyperwalletList of HyperwalletBalance */ public HyperwalletList<HyperwalletBalance> listBalancesForUser(String userToken) { return listBalancesForUser(userToken, null); } /** * List all User's Balances * * @param userToken User token assigned * @param options List filter option * @return HyperwalletList list of HyperwalletBalance */ public HyperwalletList<HyperwalletBalance> listBalancesForUser(String userToken, HyperwalletBalanceListOptions options) { if (StringUtils.isEmpty(userToken)) { throw new HyperwalletException("User token is required"); } String url = this.url + "/users/" + userToken + "/balances"; if (options != null) { url = addParameter(url, "currency", options.getCurrency()); url = addParameter(url, "sortBy", options.getSortBy()); url = addParameter(url, "offset", options.getOffset()); url = addParameter(url, "limit", options.getLimit()); } return apiClient.get(url, new TypeReference<HyperwalletList<HyperwalletBalance>>() { }); } /** * List all Program account balances * * @param accountToken Account token assigned * @param programToken Program token assigned * @return HyperwalletList of HyperwalletBalance */ public HyperwalletList<HyperwalletBalance> listBalancesForAccount(String programToken, String accountToken) { return listBalancesForAccount(programToken, accountToken, null); } /** * List all Program account balances * * @param accountToken Account token assigned * @param programToken Program token assigned * @param options List filter option * @return HyperwalletList list of HyperwalletBalance */ public HyperwalletList<HyperwalletBalance> listBalancesForAccount(String programToken, String accountToken, HyperwalletBalanceListOptions options) { if (StringUtils.isEmpty(programToken)) { throw new HyperwalletException("Program token is required"); } if (StringUtils.isEmpty(accountToken)) { throw new HyperwalletException("Account token is required"); } String url = this.url + "/programs/" + programToken + "/accounts/" + accountToken + "/balances"; if (options != null) { url = addParameter(url, "currency", options.getCurrency()); url = addParameter(url, "sortBy", options.getSortBy()); url = addParameter(url, "offset", options.getOffset()); url = addParameter(url, "limit", options.getLimit()); } return apiClient.get(url, new TypeReference<HyperwalletList<HyperwalletBalance>>() { }); } /** * List all User's Prepaid Card Balances * * @param userToken User token assigned * @param prepaidCardToken Prepaid Card token assigned from User's Prepaid Card * @return HyperwalletList of HyperwalletBalances */ public HyperwalletList<HyperwalletBalance> listBalancesForPrepaidCard(String userToken, String prepaidCardToken) { return listBalancesForPrepaidCard(userToken, prepaidCardToken, null); } /** * List all User's Prepaid Card Balances * * @param userToken User token assigned * @param prepaidCardToken Prepaid Card token assigned from User's Prepaid Card * @param options List filter option * @return HyperwalletList of HyperwalletBalances */ public HyperwalletList<HyperwalletBalance> listBalancesForPrepaidCard(String userToken, String prepaidCardToken, HyperwalletBalanceListOptions options) { if (StringUtils.isEmpty(userToken)) { throw new HyperwalletException("User token is required"); } if (StringUtils.isEmpty(prepaidCardToken)) { throw new HyperwalletException("Prepaid Card token is required"); } String url = this.url + "/users/" + userToken + "/prepaid-cards/" + prepaidCardToken + "/balances"; if (options != null) { url = addParameter(url, "sortBy", options.getSortBy()); url = addParameter(url, "offset", options.getOffset()); url = addParameter(url, "limit", options.getLimit()); } return apiClient.get(url, new TypeReference<HyperwalletList<HyperwalletBalance>>() { }); } // Payments /** * Create Payment * * @param payment Payment * @return HyperwalletPayment created payment information */ public HyperwalletPayment createPayment(HyperwalletPayment payment) { if (payment == null) { throw new HyperwalletException("Payment is required"); } if (!StringUtils.isEmpty(payment.getToken())) { throw new HyperwalletException("Payment token may not be present"); } payment = copy(payment); payment.setCreatedOn(null); return apiClient.post(url + "/payments", payment, HyperwalletPayment.class); } /** * Get Payment * * @param paymentToken Payment token * @return HyperwalletPayment */ public HyperwalletPayment getPayment(String paymentToken) { if (StringUtils.isEmpty(paymentToken)) { throw new HyperwalletException("Payment token is required"); } return apiClient.get(url + "/payments/" + paymentToken, HyperwalletPayment.class); } /** * List all Payments * * @return HyperwalletList of HyperwalletPayment */ public HyperwalletList<HyperwalletPayment> listPayments() { return listPayments(null); } /** * List all Payments * * @param options List filter option * @return HyperwalletList of HyperwalletPayment */ public HyperwalletList<HyperwalletPayment> listPayments(HyperwalletPaymentListOptions options) { String url = paginate(this.url + "/payments", options); if (options != null) { url = addParameter(url, "releasedOn", convert(options.getReleasedOn())); url = addParameter(url, "currency", options.getCurrency()); } return apiClient.get(url, new TypeReference<HyperwalletList<HyperwalletPayment>>() { }); } /** * Create Payment Status Transition * * @param paymentToken Payment token * @param transition Status transition information * @return HyperwalletStatusTransition new status for Payment */ public HyperwalletStatusTransition createPaymentStatusTransition(String paymentToken, HyperwalletStatusTransition transition) { if (transition == null) { throw new HyperwalletException("Transition is required"); } if (StringUtils.isEmpty(paymentToken)) { throw new HyperwalletException("Payment token is required"); } if (!StringUtils.isEmpty(transition.getToken())) { throw new HyperwalletException("Status Transition token may not be present"); } transition = copy(transition); transition.setCreatedOn(null); transition.setFromStatus(null); transition.setToStatus(null); return apiClient.post(url + "/payments/" + paymentToken + "/status-transitions", transition, HyperwalletStatusTransition.class); } /** * Get Payment Status Transition * * @param paymentToken Payment token * @param statusTransitionToken Status transition token * @return HyperwalletStatusTransition */ public HyperwalletStatusTransition getPaymentStatusTransition(String paymentToken, String statusTransitionToken) { if (StringUtils.isEmpty(paymentToken)) { throw new HyperwalletException("Payment token is required"); } if (StringUtils.isEmpty(statusTransitionToken)) { throw new HyperwalletException("Transition token is required"); } return apiClient.get(url + "/payments/" + paymentToken + "/status-transitions/" + statusTransitionToken, HyperwalletStatusTransition.class); } /** * List All Payment Status Transition information * * @param paymentToken Payment token * @return HyperwalletList of HyperwalletStatusTransition */ public HyperwalletList<HyperwalletStatusTransition> listPaymentStatusTransitions( String paymentToken) { return listPaymentStatusTransitions(paymentToken, null); } /** * List Payment Status Transition information * * @param paymentToken Payment token * @param options List filter option * @return HyperwalletList of HyperwalletStatusTransition */ public HyperwalletList<HyperwalletStatusTransition> listPaymentStatusTransitions(String paymentToken, HyperwalletPaginationOptions options) { if (StringUtils.isEmpty(paymentToken)) { throw new HyperwalletException("Payment token is required"); } String url = paginate(this.url + "/payments/" + paymentToken + "/status-transitions", options); return apiClient.get(url, new TypeReference<HyperwalletList<HyperwalletStatusTransition>>() { }); } // Programs /** * Get Program * * @param programToken Program token * @return HyperwalletProgram */ public HyperwalletProgram getProgram(String programToken) { if (StringUtils.isEmpty(programToken)) { throw new HyperwalletException("Program token is required"); } return apiClient.get(url + "/programs/" + programToken, HyperwalletProgram.class); } // Program Accounts /** * Get Programs Account * * @param programToken Program token * @param accountToken Program account token * @return HyperwalletAccount */ public HyperwalletAccount getProgramAccount(String programToken, String accountToken) { if (StringUtils.isEmpty(programToken)) { throw new HyperwalletException("Program token is required"); } if (StringUtils.isEmpty(accountToken)) { throw new HyperwalletException("Account token is required"); } return apiClient.get(url + "/programs/" + programToken + "/accounts/" + accountToken, HyperwalletAccount.class); } // Transfer Method Configurations /** * Get Transfer Method Configuration * * @param userToken User token * @param country Country * @param currency Currency * @param type Type of Transfer Method to retrieve * @param profileType Type of User profile * @return HyperwalletTransferMethodConfiguration */ public HyperwalletTransferMethodConfiguration getTransferMethodConfiguration(String userToken, String country, String currency, HyperwalletTransferMethod.Type type, HyperwalletUser.ProfileType profileType) { if (StringUtils.isEmpty(userToken)) { throw new HyperwalletException("User token is required"); } if (StringUtils.isEmpty(country)) { throw new HyperwalletException("Country is required"); } if (StringUtils.isEmpty(currency)) { throw new HyperwalletException("Currency is required"); } if (type == null) { throw new HyperwalletException("Type is required"); } if (profileType == null) { throw new HyperwalletException("Profile Type is required"); } return apiClient.get(url + "/transfer-method-configurations" + "?userToken=" + userToken + "&country=" + country + "&currency=" + currency + "&type=" + type.name() + "&profileType=" + profileType.name(), HyperwalletTransferMethodConfiguration.class); } /** * List all Transfer Method Configuration associated with User * * @param userToken User token * @return HyperwalletList of HyperwalletTransferMethodConfiguration */ public HyperwalletList<HyperwalletTransferMethodConfiguration> listTransferMethodConfigurations(String userToken) { return listTransferMethodConfigurations(userToken, null); } /** * List all Transfer Method Configuration associated with User * * @param userToken User token * @param options List filter options * @return HyperwalletList of HyperwalletTransferMethodConfiguration */ public HyperwalletList<HyperwalletTransferMethodConfiguration> listTransferMethodConfigurations(String userToken, HyperwalletPaginationOptions options) { if (StringUtils.isEmpty(userToken)) { throw new HyperwalletException("User token is required"); } String url = paginate(this.url + "/transfer-method-configurations?userToken=" + userToken, options); return apiClient.get(url, new TypeReference<HyperwalletList<HyperwalletTransferMethodConfiguration>>() { }); } // Receipts /** * List all program account receipts * * @param programToken Program token * @param accountToken Program account token * @return HyperwalletList of HyperwalletReceipt */ public HyperwalletList<HyperwalletReceipt> listReceiptsForProgramAccount(String programToken, String accountToken) { return listReceiptsForProgramAccount(programToken, accountToken, null); } /** * List all program account receipts * * @param programToken Program token * @param accountToken Program account token * @param options List filter options * @return HyperwalletList of HyperwalletReceipt */ public HyperwalletList<HyperwalletReceipt> listReceiptsForProgramAccount(String programToken, String accountToken, HyperwalletReceiptPaginationOptions options) { if (StringUtils.isEmpty(programToken)) { throw new HyperwalletException("Program token is required"); } if (StringUtils.isEmpty(accountToken)) { throw new HyperwalletException("Account token is required"); } String url = paginate(this.url + "/programs/" + programToken + "/accounts/" + accountToken + "/receipts", options); if (options != null && options.getType() != null) { url = addParameter(url, "type", options.getType().name()); } return apiClient.get(url, new TypeReference<HyperwalletList<HyperwalletReceipt>>() { }); } /** * List all user receipts * * @param userToken User token * @return HyperwalletList of HyperwalletReceipt */ public HyperwalletList<HyperwalletReceipt> listReceiptsForUser(String userToken) { return listReceiptsForUser(userToken, null); } /** * List all user receipts * * @param userToken Program token * @param options List filter options * @return HyperwalletList of HyperwalletReceipt */ public HyperwalletList<HyperwalletReceipt> listReceiptsForUser(String userToken, HyperwalletReceiptPaginationOptions options) { if (StringUtils.isEmpty(userToken)) { throw new HyperwalletException("User token is required"); } String url = paginate(this.url + "/users/" + userToken + "/receipts", options); if (options != null && options.getType() != null) { url = addParameter(url, "type", options.getType().name()); } return apiClient.get(url, new TypeReference<HyperwalletList<HyperwalletReceipt>>() { }); } /** * List all prepaid card receipts * * @param userToken User token * @param prepaidCardToken Prepaid card token * @return HyperwalletList of HyperwalletReceipt */ public HyperwalletList<HyperwalletReceipt> listReceiptsForPrepaidCard(String userToken, String prepaidCardToken) { return listReceiptsForPrepaidCard(userToken, prepaidCardToken, null); } /** * List all prepaid card receipts * * @param userToken User token * @param prepaidCardToken Prepaid card token * @param options List filter options * @return HyperwalletList of HyperwalletReceipt */ public HyperwalletList<HyperwalletReceipt> listReceiptsForPrepaidCard(String userToken, String prepaidCardToken, HyperwalletReceiptPaginationOptions options) { if (StringUtils.isEmpty(userToken)) { throw new HyperwalletException("User token is required"); } if (StringUtils.isEmpty(prepaidCardToken)) { throw new HyperwalletException("Prepaid card token is required"); } String url = paginate(this.url + "/users/" + userToken + "/prepaid-cards/" + prepaidCardToken + "/receipts", options); if (options != null && options.getType() != null) { url = addParameter(url, "type", options.getType().name()); } return apiClient.get(url, new TypeReference<HyperwalletList<HyperwalletReceipt>>() { }); } // Webhook Notification /** * Retrieve webhook event notification * * @param webhookToken Webhook token * @return HyperwalletWebhookNotification * */ public HyperwalletWebhookNotification getWebhookEvent(String webhookToken) { if (StringUtils.isEmpty(webhookToken)) { throw new HyperwalletException("Webhook token is required"); } return apiClient.get(url + "/webhook-notifications/" + webhookToken, HyperwalletWebhookNotification.class); } /** * List all webhook event notifications * * @return HyperwalletList of HyperwalletWebhookNotification * */ public HyperwalletList<HyperwalletWebhookNotification> listWebhookEvents() { return listWebhookEvents(null); } /** * List all webhook event notifications * * @param options List filter options * @return HyperwalletList of HyperwalletWebhookNotification * */ public HyperwalletList<HyperwalletWebhookNotification> listWebhookEvents(HyperwalletWebhookNotificationPaginationOptions options) { String url = paginate(this.url + "/webhook-notifications", options); if (options != null && options.getType() != null) { url = addParameter(url, "type", options.getType()); } return apiClient.get(url, new TypeReference<HyperwalletList<HyperwalletWebhookNotification>>() {}); } // Transfer Methods /** * Create a Transfer Method * * @param jsonCacheToken String JSON cache token * @param transferMethod TransferMethod object to create * @return HyperwalletTransferMethod Transfer Method object created */ public HyperwalletTransferMethod createTransferMethod(String jsonCacheToken, HyperwalletTransferMethod transferMethod) { if (transferMethod == null || StringUtils.isEmpty(transferMethod.getUserToken())) { throw new HyperwalletException("User token is required"); } if (StringUtils.isEmpty(jsonCacheToken)) { throw new HyperwalletException("JSON token is required"); } transferMethod = copy(transferMethod); transferMethod.setToken(null); transferMethod.setStatus(null); transferMethod.setCreatedOn(null); HashMap<String, String> headers = new HashMap<String, String>(); headers.put("Json-Cache-Token", jsonCacheToken); return apiClient.post(url + "/users/" + transferMethod.getUserToken() + "/transfer-methods", transferMethod, HyperwalletTransferMethod.class, headers); } /** * Create a Transfer Method * * @param jsonCacheToken String JSON cache token * @param userToken String user token * @return HyperwalletTransferMethod Transfer Method object created */ public HyperwalletTransferMethod createTransferMethod(String jsonCacheToken, String userToken) { if (StringUtils.isEmpty(userToken)) { throw new HyperwalletException("User token is required"); } if (StringUtils.isEmpty(jsonCacheToken)) { throw new HyperwalletException("JSON token is required"); } HyperwalletTransferMethod transferMethod = new HyperwalletTransferMethod(); transferMethod.setUserToken(userToken); HashMap<String, String> headers = new HashMap<String, String>(); headers.put("Json-Cache-Token", jsonCacheToken); return apiClient.post(url + "/users/" + transferMethod.getUserToken() + "/transfer-methods", transferMethod, HyperwalletTransferMethod.class, headers); } // Internal utils private String paginate(String url, HyperwalletPaginationOptions options) { if (options == null) { return url; } url = addParameter(url, "createdAfter", convert(options.getCreatedAfter())); url = addParameter(url, "createdBefore", convert(options.getCreatedBefore())); url = addParameter(url, "sortBy", options.getSortBy()); url = addParameter(url, "offset", options.getOffset()); url = addParameter(url, "limit", options.getLimit()); return url; } private String addParameter(String url, String key, Object value) { if (url == null || key == null || value == null) { return url; } return url + (url.indexOf("?") == -1 ? "?" : "&") + key + "=" + value; } private String convert(Date in) { if (in == null) { return null; } DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'", Locale.US); dateFormat.setTimeZone(TimeZone.getTimeZone("UTC")); return dateFormat.format(in); } private void setProgramToken(HyperwalletUser user) { if (user != null && user.getProgramToken() == null) { user.setProgramToken(this.programToken); } } private void setProgramToken(HyperwalletPayment payment) { if (payment != null && payment.getProgramToken() == null) { payment.setProgramToken(this.programToken); } } private HyperwalletUser copy(HyperwalletUser user) { user = HyperwalletJsonUtil.fromJson(HyperwalletJsonUtil.toJson(user), HyperwalletUser.class); setProgramToken(user); return user; } private HyperwalletPayment copy(HyperwalletPayment payment) { payment = HyperwalletJsonUtil.fromJson(HyperwalletJsonUtil.toJson(payment), HyperwalletPayment.class); setProgramToken(payment); return payment; } private HyperwalletPrepaidCard copy(HyperwalletPrepaidCard method) { method = HyperwalletJsonUtil.fromJson(HyperwalletJsonUtil.toJson(method), HyperwalletPrepaidCard.class); return method; } private HyperwalletBusinessStakeholder copy(HyperwalletBusinessStakeholder method) { method = HyperwalletJsonUtil.fromJson(HyperwalletJsonUtil.toJson(method), HyperwalletBusinessStakeholder.class); return method; } private HyperwalletBankCard copy(HyperwalletBankCard card) { card = HyperwalletJsonUtil.fromJson(HyperwalletJsonUtil.toJson(card), HyperwalletBankCard.class); return card; } private HyperwalletPaperCheck copy(HyperwalletPaperCheck check) { check = HyperwalletJsonUtil.fromJson(HyperwalletJsonUtil.toJson(check), HyperwalletPaperCheck.class); return check; } private HyperwalletBankAccount copy(HyperwalletBankAccount method) { method = HyperwalletJsonUtil.fromJson(HyperwalletJsonUtil.toJson(method), HyperwalletBankAccount.class); return method; } private HyperwalletStatusTransition copy(HyperwalletStatusTransition statusTransition) { statusTransition = HyperwalletJsonUtil.fromJson(HyperwalletJsonUtil.toJson(statusTransition), HyperwalletStatusTransition.class); return statusTransition; } private HyperwalletTransferMethod copy(HyperwalletTransferMethod transferMethod) { transferMethod = HyperwalletJsonUtil.fromJson(HyperwalletJsonUtil.toJson(transferMethod), HyperwalletTransferMethod.class); return transferMethod; } private HyperwalletTransfer copy(HyperwalletTransfer transfer) { transfer = HyperwalletJsonUtil.fromJson(HyperwalletJsonUtil.toJson(transfer), HyperwalletTransfer.class); return transfer; } private HyperwalletPayPalAccount copy(HyperwalletPayPalAccount payPalAccount) { payPalAccount = HyperwalletJsonUtil.fromJson(HyperwalletJsonUtil.toJson(payPalAccount), HyperwalletPayPalAccount.class); return payPalAccount; } }
package org.kohsuke.github; import java.util.Locale; public enum GHEvent { CHECK_RUN, CHECK_SUITE, COMMIT_COMMENT, CONTENT_REFERENCE, CREATE, DELETE, DEPLOY_KEY, DEPLOYMENT, DEPLOYMENT_STATUS, DOWNLOAD, FOLLOW, FORK, FORK_APPLY, GITHUB_APP_AUTHORIZATION, GIST, GOLLUM, INSTALLATION, INSTALLATION_REPOSITORIES, INTEGRATION_INSTALLATION_REPOSITORIES, ISSUE_COMMENT, ISSUES, LABEL, MARKETPLACE_PURCHASE, MEMBER, MEMBERSHIP, META, MILESTONE, ORGANIZATION, ORG_BLOCK, PACKAGE, PAGE_BUILD, PROJECT_CARD, PROJECT_COLUMN, PROJECT, PING, PUBLIC, PULL_REQUEST, PULL_REQUEST_REVIEW, PULL_REQUEST_REVIEW_COMMENT, PUSH, REGISTRY_PACKAGE, RELEASE, REPOSITORY_DISPATCH, // only valid for org hooks REPOSITORY, REPOSITORY_IMPORT, REPOSITORY_VULNERABILITY_ALERT, SECURITY_ADVISORY, STAR, STATUS, TEAM, TEAM_ADD, WATCH, WORKFLOW_DISPATCH, WORKFLOW_RUN, /** * Special event type that means "every possible event" */ ALL; /** * Returns GitHub's internal representation of this event. */ String symbol() { if (this == ALL) return "*"; return name().toLowerCase(Locale.ENGLISH); } }
package com.indeed.proctor.service; import com.google.common.base.Predicates; import com.google.common.collect.Maps; import com.indeed.proctor.common.ProctorResult; import com.indeed.proctor.common.model.Audit; import com.indeed.proctor.common.model.TestBucket; import java.util.Collections; import java.util.List; import java.util.Map; /** * ProctorResult intended for JSON serialization for the /groups/identify method. */ public class JsonResult { // Map of test name to bucket assignment. final private Map<String, JsonTestBucket> groups; // Serialized context used to process this request. final private Map<String, Object> context; final private Audit audit; public JsonResult(final ProctorResult result, final List<String> testFilter, final Map<String, Object> context, final Audit audit) { this.context = context; this.audit = audit; groups = generateJsonBuckets(result, testFilter); } private Map<String, JsonTestBucket> generateJsonBuckets(final ProctorResult result, final List<String> testFilter) { Map<String, JsonTestBucket> jsonBuckets = Maps.newHashMap(); // As we process each TestBucket into a JsonBucket, we also need to obtain a version for that test. Map<String, Integer> versions = result.getTestVersions(); final Map<String, TestBucket> filtered; if (testFilter != null) { // Only include tests that exist in the filter. filtered = Maps.filterKeys(result.getBuckets(), Predicates.in(testFilter)); } else { // Include all tests since there is no filter. filtered = result.getBuckets(); } for (Map.Entry<String, TestBucket> e : filtered.entrySet()) { final String testName = e.getKey(); final TestBucket testBucket = e.getValue(); JsonTestBucket jsonBucket = new JsonTestBucket(testBucket, versions.get(testName)); jsonBuckets.put(testName, jsonBucket); } return jsonBuckets; } public Map<String, JsonTestBucket> getGroups() { return groups; } public Map<String, Object> getContext() { return context; } public Audit getAudit() { return audit; } }
package wwwc.nees.joint.module.kao; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.logging.Level; import java.util.logging.Logger; import wwwc.nees.joint.compiler.annotations.Iri; import org.openrdf.model.Literal; import org.openrdf.model.Statement; import org.openrdf.model.URI; import org.openrdf.model.Value; import org.openrdf.model.ValueFactory; import org.openrdf.model.vocabulary.RDF; import org.openrdf.query.GraphQueryResult; import org.openrdf.repository.RepositoryConnection; import org.openrdf.repository.RepositoryResult; import wwwc.nees.joint.model.JOINTResource; /** * * @author Olavo */ public class RetrieveOperations { private DatatypeManager datatypeManager; private RepositoryConnection connection; private ValueFactory f; private Map<String, String> packages; private static final String OBJECT_CLASS = "java.lang.Object"; private static final String SET_CLASS = "java.util.Set"; private static final String METHOD_SET_INNERFIELDS = "setInnerModifiedFields"; private static final String METHOD_SET_LAZYLOADED = "setLazyLoaded"; private static final String METHOD_SET_URI = "setURI"; private static final String PREF_SETTER = "set"; private static final String SUF_IMPL_CLASS = "Impl"; private GraphQueryConstruct graphQueryConstruct; public RetrieveOperations(RepositoryConnection connection) { this.packages = ConceptsPackageInfo.getPackagesInfo(this.getClass()); this.datatypeManager = DatatypeManager.getInstance(); this.connection = connection; this.f = this.connection.getValueFactory(); this.graphQueryConstruct = new GraphQueryConstruct(connection); } /** * Retrieves the desired instance in the repository. * * @param <T> * @param ontologyURI a <code>String</code> with the ontology prefix * @param instanceName a <code>String</code> with the instance name. * @param clazz a <code>Class</code> with the instance type * @param connection a sesame <code>RepositoryConnection</code> * @return T the desired instance. * @throws java.lang.Exception any exception */ public <T> T retrieveInstance(String ontologyURI, String instanceName, Class<T> clazz, RepositoryConnection connection, URI... contexts) throws Exception { this.connection = connection; this.f = this.connection.getValueFactory(); return (T) this.convertOriginalForImpl(ontologyURI + instanceName, clazz, contexts); } /** * Retrieves all the instances of the class, passed in the constructor. * * @param <T> * @param clazz a <code>Class</code> with the instance type * @param connection a sesame <code>RepositoryConnection</code> * @return a <code>List</code> with the instances. * @throws java.lang.Exception any exception */ public <T> List<T> retrieveAllInstances(Class<T> clazz, RepositoryConnection connection, URI... contexts) throws Exception { this.connection = connection; this.f = this.connection.getValueFactory(); // Creates a new java.util.List List<T> listInstances = new ArrayList<>(); RepositoryResult<Statement> stts = this.connection.getStatements(null, RDF.TYPE, this.f.createURI(((Iri) clazz.getAnnotation(Iri.class)).value()), true, contexts); while (stts.hasNext()) { List<String> instancesName = new ArrayList<>(); Statement statement = stts.next(); instancesName.add(statement.getSubject().stringValue()); listInstances.addAll((Collection<? extends T>) (T) this.convertCollectionOriginalForImpl(instancesName, clazz, contexts)); } stts.close(); return listInstances; } public String getClassFromBase(String subj, URI... contexts) throws Exception { ValueFactory f = connection.getValueFactory(); URI sub = f.createURI(subj); RepositoryResult<Statement> statements = this.connection.getStatements(sub, RDF.TYPE, null, true, contexts); //Preparar a query para recuperar o tipo de instancia while (statements.hasNext()) { Statement st = statements.next(); statements.close(); String uriObj = st.getObject().stringValue(); String nameClasse = this.packages.get(uriObj); if (nameClasse == null) { return OBJECT_CLASS; } return nameClasse; } statements.close(); return OBJECT_CLASS; } private Object createJOINTResourceObject(String instanceName) { //creates a new instance of JOINTResource JOINTResource obj = new JOINTResource(); //calls the method setURI obj.setURI(instanceName); //return the object return obj; } public Object getNotLoadedObject(String instanceName, String clazzName) { //creates a null object Object obj = null; try { // checks if it is a java object if (clazzName.equals(OBJECT_CLASS)) { //if yes, than create a JOINT resource instance obj = this.createJOINTResourceObject(instanceName); } else { //creates an instance with the concrete class Class clazz = Class.forName(clazzName + SUF_IMPL_CLASS); obj = clazz.newInstance(); //casts the object to the upper class JOINTResource and //calls the methods setURI and setLazyLoaded ((JOINTResource) obj).setURI(instanceName); ((JOINTResource) obj).setLazyLoaded(false); } } catch (ClassNotFoundException | InstantiationException | IllegalAccessException ex) { Logger.getLogger(LazyLoader.class.getName()).log(Level.SEVERE, null, ex); } //return the object return obj; } private Map<String, List<Value>> sortPropertiesAndValues(List<Statement> statements) { //creates a map to hold all values of the properties Map<String, List<Value>> mapProperties = new HashMap<>(); //puts the property values in the map for (Statement s : statements) { String prop = s.getPredicate().stringValue(); if (mapProperties.containsKey(prop)) { mapProperties.get(prop).add(s.getObject()); } else { List<Value> values = new ArrayList<>(); values.add(s.getObject()); mapProperties.put(prop, values); } } return mapProperties; } public Object convertOriginalForImpl(String instanceName, Class clazz, URI... contexts) throws Exception { URI suj = f.createURI(instanceName); //checks if this instance is in the triple store boolean objectNull = this.connection.hasStatement(suj, RDF.TYPE, null, true, contexts); if (!objectNull) { return null; } // checks if it is a java object if (clazz.getName().equals(OBJECT_CLASS)) { //if yes, than create a JOINT resource instance return this.createJOINTResourceObject(instanceName); } //creates an instance with the concrete class Class classImpl = Class.forName(clazz.getName() + SUF_IMPL_CLASS); Object obj = classImpl.newInstance(); //casts the object to the upper class JOINTResource and //calls the methods setURI and setLazyLoaded ((JOINTResource) obj).setURI(instanceName); ((JOINTResource) obj).setLazyLoaded(true); //gets all methods of the desired class Method[] allMethodsClassImpl = classImpl.getMethods(); //retrieves all values of the properties of the instance List<Statement> statements = graphQueryConstruct.getStatementsAsList(suj.toString(), null, null, contexts); // List<Statement> statements = this.connection.getStatements(suj, null, null, true, contexts).asList(); //creates a map to hold all values of the properties Map<String, List<Value>> mapProperties = this.sortPropertiesAndValues(statements); //for each method, searches for the setter ones for (Method method : allMethodsClassImpl) { //retrieves the method name String nomeMetodoImpl = method.getName(); //checks if it is a setter one if (nomeMetodoImpl.startsWith(PREF_SETTER)) { //if it is the methods setURI or setLazyLoaded or // set InnerFields, do nothing if (nomeMetodoImpl.startsWith(METHOD_SET_URI) || nomeMetodoImpl.startsWith(METHOD_SET_LAZYLOADED) || nomeMetodoImpl.startsWith(METHOD_SET_INNERFIELDS)) { continue; } //ignores Java supress warning check method.setAccessible(true); //gets the name of the method parameter class String parameterClassName = method.getParameterTypes()[0].getName(); //retrieves the associated annotation that contais the predicate Iri iri = method.getAnnotation(Iri.class); //if the property has no value, it enters in the first if //if the property is functional, it enters in the second if // else it has multi values if (!mapProperties.containsKey(iri.value())) { //if the parameter is a setter one, invoke the method //with an empty new HashSet if (parameterClassName.equals(SET_CLASS)) { method.invoke(obj, new HashSet()); } } else if (!parameterClassName.equals(SET_CLASS)) { //gets the Value from the object of this property Value objValue = mapProperties.get(iri.value()).get(0); //gets the assiciated URI String valueURI = objValue.stringValue(); //it the valueURI is not empty if (!valueURI.isEmpty()) { //if it is a datatype if (this.datatypeManager.isDatatype(objValue)) { //mapps to the an specific java native Class method.invoke(obj, this.datatypeManager. convertLiteralToDataype((Literal) objValue, parameterClassName)); //else it is an istance } else { //gets the class name from the triple store parameterClassName = this.getClassFromBase(valueURI, contexts); //gets a new instance with its properties not loaded method.invoke(obj, this. getNotLoadedObject(valueURI, parameterClassName)); } } } else { //gets all values of the property and put in the list List<Value> listValues = mapProperties.get(iri.value()); //creates the set to be used in the method Set<Object> returnSet = new HashSet<>(); //gets the first value for type searching Value v = listValues.get(0); //if it is a datatype if (this.datatypeManager.isDatatype(v)) { //crawls the list of values converting to a //specific java native Class for (Value objValue : listValues) { returnSet.add(this.datatypeManager.convertLiteralToDataype((Literal) objValue)); } //else it is an istance } else { //gets the class name from the triple store parameterClassName = this.getClassFromBase(v.stringValue(), contexts); //crawls the list of values converting to the //specific Class for (Value objValue : listValues) { String objIt = objValue.stringValue(); //gets a new instance with its properties not loaded returnSet.add(this.getNotLoadedObject(objIt, parameterClassName)); } } //invokes the method with the converted parameter method.invoke(obj, returnSet); } } } //calls the setInnerModifiedFields to erase the modified fields //of the instance (update mechanics) ((JOINTResource) obj).setInnerModifiedFields(new ArrayList<String>()); return obj; } public List<Object> convertCollectionOriginalForImpl(List<String> instancesName, Class clazz, URI... contexts) throws Exception { List<Object> returnList = new ArrayList<>(); // checks if it is a java object if (clazz.getName().equals(OBJECT_CLASS)) { //crawls the list of instances name for (String name : instancesName) { //if yes, than create a JOINT resource instance returnList.add(this.createJOINTResourceObject(name)); } return returnList; } //gets the concrete desired class Class classImpl = Class.forName(clazz.getName() + SUF_IMPL_CLASS); //gets all methods of the concrete class Method[] allMethodsClassImpl = classImpl.getMethods(); //constructs a query to get all information about the objects that will //be parsed GraphQueryResult stts = graphQueryConstruct.getStatementsAsGraphQuery(instancesName, null, null, contexts); //creates a map with key - uri/object - list of statements Map<String, List<Statement>> cInformation = new HashMap<>(); //iterates the previous graph result while (stts.hasNext()) { //gets the statement Statement st = stts.next(); //gets the uri key String uri = st.getSubject().stringValue(); //checks if the instanceURI is already in the map, if not if (!cInformation.containsKey(uri)) { //creates a new list of statements List<Statement> objects = new ArrayList<>(); //adds this one objects.add(st); // puts in the map with the associated uri as a key cInformation.put(uri, objects); } else { //else, gets the list and adds this statement cInformation.get(uri).add(st); } } stts.close(); for (String instanceURI : instancesName) { //creates an instance with the concrete class Object obj = classImpl.newInstance(); //casts the object to the upper class JOINTResource and //calls the methods setURI and setLazyLoaded ((JOINTResource) obj).setURI(instanceURI); ((JOINTResource) obj).setLazyLoaded(true); //recupera os objects de todas as propriedades List<Statement> statements = cInformation.get(instanceURI); //creates a map to hold all values of the properties Map<String, List<Value>> mapProperties = this.sortPropertiesAndValues(statements); //for each method, searches for the setter ones for (Method method : allMethodsClassImpl) { //ignores Java supress warning check method.setAccessible(true); //retrieves the method name String nomeMetodoImpl = method.getName(); //checks if it is a setter one if (nomeMetodoImpl.startsWith(PREF_SETTER)) { //if it is the methods setURI or setLazyLoaded or // set InnerFields, do nothing if (nomeMetodoImpl.startsWith(METHOD_SET_URI) || nomeMetodoImpl.startsWith(METHOD_SET_LAZYLOADED) || nomeMetodoImpl.startsWith(METHOD_SET_INNERFIELDS)) { continue; } //gets the name of the method parameter class String parameterClassName = method.getParameterTypes()[0].getName(); //retrieves the associated annotation that contais the predicate Iri iri = method.getAnnotation(Iri.class); //if the property has no value, it enters in the first if //if the property is functional, it enters in the second if // else it has multi values if (!mapProperties.containsKey(iri.value())) { //if the parameter is a setter one, invoke the method //with an empty new HashSet if (parameterClassName.equals(SET_CLASS)) { method.invoke(obj, new HashSet()); } } else if (!parameterClassName.equals(SET_CLASS)) { //gets the Value from the object of this property Value objValue = mapProperties.get(iri.value()).get(0); //gets the assiciated URI String valueURI = objValue.stringValue(); //it the valueURI is not empty if (!valueURI.isEmpty()) { //if it is a datatype if (this.datatypeManager.isDatatype(objValue)) { //mapps to the an specific java native Class method.invoke(obj, this.datatypeManager. convertLiteralToDataype((Literal) objValue, parameterClassName)); // method.invoke(obj, this.datatypeManager. // convertLiteralToDataype((Literal) objValue)); //else it is an istance } else { //gets the class name from the triple store parameterClassName = this.getClassFromBase(valueURI, contexts); //gets a new instance with its properties not loaded method.invoke(obj, this. getNotLoadedObject(valueURI, parameterClassName)); } } } else { //gets all values of the property and put in the list List<Value> listValues = mapProperties.get(iri.value()); //creates the set to be used in the method Set<Object> returnSet = new HashSet<>(); //gets the first value for type searching Value v = listValues.get(0); //if it is a datatype if (this.datatypeManager.isDatatype(v)) { //crawls the list of values converting to a //specific java native Class for (Value objValue : listValues) { returnSet.add(this.datatypeManager.convertLiteralToDataype((Literal) objValue)); } //else it is an istance } else { //gets the class name from the triple store parameterClassName = this.getClassFromBase(v.stringValue(), contexts); //crawls the list of values converting to the //specific Class for (Value objValue : listValues) { String objIt = objValue.stringValue(); //gets a new instance with its properties not loaded returnSet.add(this.getNotLoadedObject(objIt, parameterClassName)); } } //invokes the method with the converted parameter method.invoke(obj, returnSet); } } } //calls the setInnerModifiedFields to erase the modified fields //of the instance (update mechanics) ((JOINTResource) obj).setInnerModifiedFields(new ArrayList<String>()); //adds the object in the returnList returnList.add(obj); } return returnList; } }
package com.jakewharton.trakt; import java.text.SimpleDateFormat; import java.util.Date; import java.util.Iterator; import java.util.List; import com.google.gson.JsonElement; import com.google.gson.JsonObject; import com.google.gson.reflect.TypeToken; import com.jakewharton.apibuilder.ApiBuilder; import com.jakewharton.apibuilder.ApiException; /** * Trakt-specific API builder extension which provides helper methods for * adding fields, parameters, and post-parameters commonly used in the API. * * @param <T> Native class type of the HTTP method call result. * @author Jake Wharton <jakewharton@gmail.com> */ public abstract class TraktApiBuilder<T> extends ApiBuilder { /** API key field name. */ protected static final String FIELD_API_KEY = "apikey"; protected static final String FIELD_USERNAME = "username"; protected static final String FIELD_DATE = "date"; protected static final String FIELD_DAYS = "days"; protected static final String FIELD_QUERY = "query"; protected static final String FIELD_SEASON = "season"; protected static final String FIELD_EPISODE = "episode"; protected static final String FIELD_EXTENDED = "extended"; private static final String POST_PLUGIN_VERSION = "plugin_version"; private static final String POST_MEDIA_CENTER_VERSION = "media_center_version"; private static final String POST_MEDIA_CENTER_DATE = "media_center_date"; /** Format for encoding a {@link java.util.Date} in a URL. */ private static final SimpleDateFormat URL_DATE_FORMAT = new SimpleDateFormat("yyyyMMdd"); /** Trakt API URL base. */ private static final String BASE_URL = "http://api.trakt.tv"; /** Number of milliseconds in a single second. */ /*package*/ static final long MILLISECONDS_IN_SECOND = 1000; /** Valued-list seperator. */ private static final char SEPERATOR = ','; /** Valid HTTP request methods. */ protected static enum HttpMethod { Get, Post } /** Service instance. */ private final TraktApiService service; /** Type token of return type. */ private final TypeToken<T> token; /** HTTP request method to use. */ private final HttpMethod method; /** String representation of JSON POST body. */ private JsonObject postBody; /** * Initialize a new builder for an HTTP GET call. * * @param service Service to bind to. * @param token Return type token. * @param methodUri URI method format string. * @param fieldsUri URI field format string. */ public TraktApiBuilder(TraktApiService service, TypeToken<T> token, String methodUri) { this(service, token, methodUri, HttpMethod.Get); } /** * Initialize a new builder for the specified HTTP method call. * * @param service Service to bind to. * @param token Return type token. * @param urlFormat URL format string. * @param method HTTP method. */ public TraktApiBuilder(TraktApiService service, TypeToken<T> token, String urlFormat, HttpMethod method) { super(BASE_URL + urlFormat); this.service = service; this.token = token; this.method = method; this.postBody = new JsonObject(); this.field(FIELD_API_KEY, this.service.getApiKey()); } /** * Execute remote API method and unmarshall the result to its native type. * * @return Instance of result type. * @throws ApiException if validation fails. */ public final T fire() { try { this.performValidation(); } catch (Exception e) { throw new ApiException(e); } return this.service.unmarshall(this.token, this.execute()); } /** * Perform any required validation before firing off the request. */ protected void performValidation() { //Override me! } /** * Mark current builder as Trakt developer method. This will automatically * add the debug fields to the post body. */ protected final void markAsDeveloperMethod() { this.postParameter(POST_PLUGIN_VERSION, service.getPluginVersion()); this.postParameter(POST_MEDIA_CENTER_VERSION, service.getMediaCenterVersion()); this.postParameter(POST_MEDIA_CENTER_DATE, service.getMediaCenterDate()); } /** * <p>Execute the remote API method and return the JSON object result.<p> * * <p>This method can be overridden to select a specific subset of the JSON * object. The overriding implementation should still call 'super.execute()' * and then perform the filtering from there.</p> * * @return JSON object instance. */ protected final JsonElement execute() { String url = this.buildUrl(); while (url.endsWith("/")) { url = url.substring(0, url.length() - 2); } switch (this.method) { case Get: return this.service.get(url); case Post: return this.service.post(url, this.postBody.toString()); default: throw new IllegalArgumentException("Unknown HttpMethod type " + this.method.toString()); } } /** * Set the API key. * * @param apiKey API key string. * @return Current instance for builder pattern. */ /*package*/ final ApiBuilder api(String apiKey) { return this.field(FIELD_API_KEY, apiKey); } /** * Add a URL parameter value. * * @param name Name. * @param value Value. * @return Current instance for builder pattern. */ protected final ApiBuilder parameter(String name, Date value) { return this.parameter(name, Long.toString(value.getTime() / MILLISECONDS_IN_SECOND)); } /** * Add a URL parameter value. * * @param name Name. * @param value Value. * @return Current instance for builder pattern. */ protected final <K extends TraktEnumeration> ApiBuilder parameter(String name, K value) { if ((value == null) || (value.toString() == null) || (value.toString().length() == 0)) { return this.parameter(name, ""); } else { return this.parameter(name, value.toString()); } } /** * Add a URL parameter value. * * @param name Name. * @param valueList List of values. * @return Current instance for builder pattern. */ protected final <K extends Object> ApiBuilder parameter(String name, List<K> valueList) { StringBuilder builder = new StringBuilder(); Iterator<K> iterator = valueList.iterator(); while (iterator.hasNext()) { builder.append(encodeUrl(iterator.next().toString())); if (iterator.hasNext()) { builder.append(SEPERATOR); } } return this.parameter(name, builder.toString()); } /** * Add a URL field value. * * @param name Name. * @param date Value. * @return Current instance for builder pattern. */ protected final ApiBuilder field(String name, Date date) { return this.field(name, URL_DATE_FORMAT.format(date)); } /** * Add a URL field value. * * @param name Name. * @param value Value. * @return Current instance for builder pattern. */ protected final <K extends TraktEnumeration> ApiBuilder field(String name, K value) { if ((value == null) || (value.toString() == null) || (value.toString().length() == 0)) { return this.field(name); } else { return this.field(name, value.toString()); } } protected final boolean hasPostParameter(String name) { return this.postBody.has(name); } protected final TraktApiBuilder<T> postParameter(String name, String value) { this.postBody.addProperty(name, value); return this; } protected final TraktApiBuilder<T> postParameter(String name, int value) { return this.postParameter(name, Integer.toString(value)); } protected final <K extends TraktEnumeration> TraktApiBuilder<T> postParameter(String name, K value) { if ((value != null) && (value.toString() != null) && (value.toString().length() > 0)) { return this.postParameter(name, value.toString()); } return this; } }
package org.zwobble.mammoth; import java.util.Set; /** * The result of converting a document. */ public interface Result<T> { /** * The generated value. */ T getValue(); /** * Any warnings generated during the conversion. */ Set<String> getWarnings(); }
package com.jgardella.app.backend; // Attendance requirement definition public class Requirement { private ReqType reqType; private int amount; private String[] eventTypes; public enum ReqType { CONSECUTIVE, ABSOLUTE, LAST }; public Requirement(ReqType type, int amount, String... eventTypes) { this.reqType = type; this.amount = amount; this.eventTypes = eventTypes; } public ReqType getType() { return reqType; } public int getAmount() { return amount; } public String[] getEventTypes() { return eventTypes; } }
package com.lordmau5.ffs.tile; import buildcraft.api.transport.IPipeConnection; import buildcraft.api.transport.IPipeTile; import com.lordmau5.ffs.FancyFluidStorage; import com.lordmau5.ffs.util.ExtendedBlock; import com.lordmau5.ffs.util.GenericUtil; import com.lordmau5.ffs.util.Position3D; import cpw.mods.fml.common.Optional; import dan200.computercraft.api.lua.ILuaContext; import dan200.computercraft.api.lua.LuaException; import dan200.computercraft.api.peripheral.IComputerAccess; import dan200.computercraft.api.peripheral.IPeripheral; import framesapi.IMoveCheck; import li.cil.oc.api.machine.Arguments; import li.cil.oc.api.machine.Context; import li.cil.oc.api.network.ManagedPeripheral; import li.cil.oc.api.network.SimpleComponent; import net.minecraft.nbt.NBTTagCompound; import net.minecraft.network.NetworkManager; import net.minecraft.network.Packet; import net.minecraft.network.play.server.S35PacketUpdateTileEntity; import net.minecraft.tileentity.TileEntity; import net.minecraft.util.AxisAlignedBB; import net.minecraft.world.World; import net.minecraftforge.common.util.ForgeDirection; import net.minecraftforge.fluids.*; import java.util.ArrayList; import java.util.List; import java.util.Map; @Optional.InterfaceList(value = { @Optional.Interface(iface = "buildcraft.api.transport.IPipeConnection", modid = "BuildCraftAPI|Transport"), @Optional.Interface(iface = "dan200.computercraft.api.peripheral.IPeripheral", modid = "ComputerCraft"), @Optional.Interface(iface = "li.cil.oc.api.network.SimpleComponent", modid = "OpenComputers"), @Optional.Interface(iface = "li.cil.oc.api.network.ManagedPeripheral", modid = "OpenComputers"), @Optional.Interface(iface = "framesapi.IMoveCheck", modid = "funkylocomotion") }) public class TileEntityValve extends TileEntity implements IFluidTank, IFluidHandler, IPipeConnection, // BuildCraft IPeripheral, // ComputerCraft SimpleComponent, ManagedPeripheral, // OpenComputers IMoveCheck // Funky Locomotion { private final int maxSize = 9; protected int mbPerVirtualTank = FancyFluidStorage.instance.MB_PER_TANK_BLOCK; public boolean isValid; private boolean isMaster; private boolean initiated; public int tankHeight = 0; public int valveHeightPosition = 0; private boolean autoOutput; private ForgeDirection inside = ForgeDirection.UNKNOWN; private TileEntityValve master; private List<TileEntityTankFrame> tankFrames; private List<TileEntityValve> otherValves; private Map<Position3D, ExtendedBlock>[] maps; /** * Length of the inside * * 0 = Down * 1 = Up * 2 = North * 3 = South * 4 = West * 5 = East */ private int[] length = new int[6]; public Position3D bottomDiagFrame, topDiagFrame; // TANK LOGIC private FluidStack fluidStack; private int fluidCapacity; public TileEntityValve() { tankFrames = new ArrayList<>(); otherValves = new ArrayList<>(); } @Override public void validate() { super.validate(); initiated = true; } @Override public void updateEntity() { if(worldObj.isRemote) return; if(initiated) { initiated = false; if(isMaster()) buildTank(inside); return; } if(!isValid()) return; if(getFluid() == null) return; if(getAutoOutput()) { // Auto outputs at 50mB/t (1B/s) if enabled if (getFluidAmount() != 0) { float height = (float) getFluidAmount() / (float) getCapacity() * (float) getTankHeight(); if (height > (valveHeightPosition - 0.5f)) { // Valves can output until the liquid is at their halfway point. ForgeDirection out = inside.getOpposite(); TileEntity tile = worldObj.getTileEntity(xCoord + out.offsetX, yCoord + out.offsetY, zCoord + out.offsetZ); if(tile != null) { int maxAmount = 0; if(tile instanceof TileEntityValve) maxAmount = 1000; // When two tanks are connected by valves, allow faster output else if(tile instanceof IFluidHandler) maxAmount = 50; if(maxAmount != 0) { IFluidHandler handler = (IFluidHandler) tile; FluidStack fillStack = getFluid().copy(); fillStack.amount = Math.min(getFluidAmount(), maxAmount); if (handler.fill(inside, fillStack, false) > 0) { drain(handler.fill(inside, fillStack, true), true); } } } } } } if(getFluid() != null && getFluid().getFluid() == FluidRegistry.WATER) { if(worldObj.isRaining()) { int rate = (int) Math.floor(worldObj.rainingStrength * 5 * worldObj.getBiomeGenForCoords(xCoord, zCoord).rainfall); if (yCoord == worldObj.getPrecipitationHeight(xCoord, zCoord) - 1) { FluidStack waterStack = getFluid().copy(); waterStack.amount = rate * 10; fill(waterStack, true); } } } } public int getTankHeight() { return isMaster() ? tankHeight : getMaster().tankHeight; } private void setInside(ForgeDirection inside) { this.inside = inside; } public ForgeDirection getInside() { return this.inside; } public void buildTank(ForgeDirection inside) { if (worldObj.isRemote) return; fluidCapacity = 0; tankFrames.clear(); otherValves.clear(); if(this.inside == ForgeDirection.UNKNOWN) setInside(inside); if(!calculateInside()) return; if(!setupTank()) return; updateBlockAndNeighbors(); } private boolean calculateInside() { int xIn = xCoord + inside.offsetX; int yIn = yCoord + inside.offsetY; int zIn = zCoord + inside.offsetZ; for(ForgeDirection dr : ForgeDirection.VALID_DIRECTIONS) { for(int i=0; i<maxSize; i++) { if (!worldObj.isAirBlock(xIn + dr.offsetX * i, yIn + dr.offsetY * i, zIn + dr.offsetZ * i)) { length[dr.ordinal()] = i - 1; break; } } } return length[0] != -1; } private void setSlaveValveInside(Map<Position3D, ExtendedBlock> airBlocks, TileEntityValve slave) { List<Position3D> possibleAirBlocks = new ArrayList<>(); for(ForgeDirection dr : ForgeDirection.VALID_DIRECTIONS) { if(worldObj.isAirBlock(slave.xCoord + dr.offsetX, slave.yCoord + dr.offsetY, slave.zCoord + dr.offsetZ)) possibleAirBlocks.add(new Position3D(slave.xCoord + dr.offsetX, slave.yCoord + dr.offsetY, slave.zCoord + dr.offsetZ)); } Position3D insideAir = null; for(Position3D pos : possibleAirBlocks) { if (airBlocks.containsKey(pos)) { insideAir = pos; break; } } if(insideAir == null) return; Position3D dist = insideAir.getDistance(new Position3D(slave.xCoord, slave.yCoord, slave.zCoord)); for(ForgeDirection dr : ForgeDirection.VALID_DIRECTIONS) { if(dist.equals(new Position3D(dr.offsetX, dr.offsetY, dr.offsetZ))) { slave.setInside(dr); break; } } } private void fetchMaps() { bottomDiagFrame = new Position3D(xCoord + inside.offsetX + length[ForgeDirection.WEST.ordinal()] * ForgeDirection.WEST.offsetX + ForgeDirection.WEST.offsetX, yCoord + inside.offsetY + length[ForgeDirection.DOWN.ordinal()] * ForgeDirection.DOWN.offsetY + ForgeDirection.DOWN.offsetY, zCoord + inside.offsetZ + length[ForgeDirection.NORTH.ordinal()] * ForgeDirection.NORTH.offsetZ + ForgeDirection.NORTH.offsetZ); topDiagFrame = new Position3D(xCoord + inside.offsetX + length[ForgeDirection.EAST.ordinal()] * ForgeDirection.EAST.offsetX + ForgeDirection.EAST.offsetX, yCoord + inside.offsetY + length[ForgeDirection.UP.ordinal()] * ForgeDirection.UP.offsetY + ForgeDirection.UP.offsetY, zCoord + inside.offsetZ + length[ForgeDirection.SOUTH.ordinal()] * ForgeDirection.SOUTH.offsetZ + ForgeDirection.SOUTH.offsetZ); maps = GenericUtil.getTankFrame(worldObj, bottomDiagFrame, topDiagFrame); } private boolean setupTank() { fetchMaps(); otherValves = new ArrayList<>(); tankFrames = new ArrayList<>(); Position3D pos = new Position3D(xCoord, yCoord, zCoord); valveHeightPosition = Math.abs(bottomDiagFrame.getDistance(pos).getY()); tankHeight = topDiagFrame.getDistance(bottomDiagFrame).getY() - 1; ExtendedBlock bottomDiagBlock = new ExtendedBlock(worldObj.getBlock(bottomDiagFrame.getX(), bottomDiagFrame.getY(), bottomDiagFrame.getZ()), worldObj.getBlockMetadata(bottomDiagFrame.getX(), bottomDiagFrame.getY(), bottomDiagFrame.getZ())); ExtendedBlock topDiagBlock = new ExtendedBlock(worldObj.getBlock(topDiagFrame.getX(), topDiagFrame.getY(), topDiagFrame.getZ()), worldObj.getBlockMetadata(topDiagFrame.getX(), topDiagFrame.getY(), topDiagFrame.getZ())); if(!bottomDiagBlock.equals(topDiagBlock) || !GenericUtil.isValidTankBlock(worldObj, bottomDiagFrame, bottomDiagBlock)) return false; for(Map.Entry<Position3D, ExtendedBlock> airCheck : maps[2].entrySet()) { if(!worldObj.isAirBlock(airCheck.getKey().getX(), airCheck.getKey().getY(), airCheck.getKey().getZ())) { if (airCheck.getValue().getBlock().getUnlocalizedName() == "railcraft.residual.heat") continue; // Just to be /sure/ that railcraft isn't messing with us return false; } } if (FancyFluidStorage.instance.INSIDE_CAPACITY) { fluidCapacity = (maps[2].size()) * mbPerVirtualTank; } else { fluidCapacity = (maps[0].size() + maps[1].size() + maps[2].size()) * mbPerVirtualTank; } for(Map.Entry<Position3D, ExtendedBlock> frameCheck : maps[0].entrySet()) { if(!frameCheck.getValue().equals(bottomDiagBlock)) return false; } List<TileEntityValve> valves = new ArrayList<>(); for(Map.Entry<Position3D, ExtendedBlock> insideFrameCheck : maps[1].entrySet()) { pos = insideFrameCheck.getKey(); ExtendedBlock check = insideFrameCheck.getValue(); TileEntity tile = worldObj.getTileEntity(pos.getX(), pos.getY(), pos.getZ()); if(check.equals(bottomDiagBlock) || GenericUtil.isBlockGlass(check.getBlock(), check.getMetadata()) || tile instanceof TileEntityTankFrame) continue; if(tile instanceof TileEntityValve) { TileEntityValve valve = (TileEntityValve) tile; if(valve == this) continue; if(valve.fluidStack != null) { this.fluidStack = valve.fluidStack; } valves.add(valve); continue; } return false; } // Make sure we don't overfill a tank. If the new tank is smaller than the old one, excess liquid disappear. this.fluidStack.amount = Math.min(this.fluidStack.amount, this.fluidCapacity); for(TileEntityValve valve : valves) { pos = new Position3D(valve.xCoord, valve.yCoord, valve.zCoord); valve.valveHeightPosition = Math.abs(bottomDiagFrame.getDistance(pos).getY()); valve.isMaster = false; valve.setMaster(this); setSlaveValveInside(maps[2], valve); } isMaster = true; for(Map.Entry<Position3D, ExtendedBlock> setTiles : maps[0].entrySet()) { pos = setTiles.getKey(); TileEntityTankFrame tankFrame; if(setTiles.getValue().getBlock() != FancyFluidStorage.blockTankFrame) { tankFrame = new TileEntityTankFrame(this, setTiles.getValue()); worldObj.setBlock(pos.getX(), pos.getY(), pos.getZ(), FancyFluidStorage.blockTankFrame, setTiles.getValue().getMetadata(), 2); worldObj.setTileEntity(pos.getX(), pos.getY(), pos.getZ(), tankFrame); tankFrame.markForUpdate(); } else { tankFrame = (TileEntityTankFrame) worldObj.getTileEntity(pos.getX(), pos.getY(), pos.getZ()); tankFrame.setValve(this); } tankFrames.add(tankFrame); } for(Map.Entry<Position3D, ExtendedBlock> setTiles : maps[1].entrySet()) { pos = setTiles.getKey(); TileEntity tile = worldObj.getTileEntity(pos.getX(), pos.getY(), pos.getZ()); if(tile != null) { if(tile instanceof TileEntityValve && tile != this) otherValves.add((TileEntityValve) tile); if(tile instanceof TileEntityTankFrame) { ((TileEntityTankFrame) tile).setValve(this); tankFrames.add((TileEntityTankFrame) tile); } } else { TileEntityTankFrame tankFrame = new TileEntityTankFrame(this, setTiles.getValue()); worldObj.setBlock(pos.getX(), pos.getY(), pos.getZ(), FancyFluidStorage.blockTankFrame, setTiles.getValue().getMetadata(), 2); worldObj.setTileEntity(pos.getX(), pos.getY(), pos.getZ(), tankFrame); tankFrame.markForUpdate(); tankFrames.add(tankFrame); } } isValid = true; return true; } public void breakTank(TileEntity frame) { if (worldObj.isRemote) return; if(!isMaster()) { if(getMaster() != this) getMaster().breakTank(frame); return; } for(TileEntityValve valve : otherValves) { valve.fluidStack = getFluid(); valve.master = null; valve.isValid = false; valve.updateBlockAndNeighbors(); } for(TileEntityTankFrame tankFrame : tankFrames) { if(frame == tankFrame) continue; ExtendedBlock block = tankFrame.getBlock(); Position3D pos = new Position3D(tankFrame.xCoord, tankFrame.yCoord, tankFrame.zCoord); if(block == null || worldObj.isAirBlock(pos.getX(), pos.getY(), pos.getZ())) continue; worldObj.removeTileEntity(tankFrame.xCoord, tankFrame.yCoord, tankFrame.zCoord); worldObj.setBlock(pos.getX(), pos.getY(), pos.getZ(), block.getBlock(), block.getMetadata(), 2); } isValid = false; this.updateBlockAndNeighbors(); otherValves = new ArrayList<>(); tankFrames = new ArrayList<>(); } public boolean isValid() { return isValid; } private void updateBlockAndNeighbors() { if(worldObj.isRemote) return; this.markForUpdate(false); if(otherValves != null) { for(TileEntityValve otherValve : otherValves) { otherValve.isValid = isValid; otherValve.markForUpdate(true); } } ForgeDirection outside = getInside().getOpposite(); TileEntity outsideTile = worldObj.getTileEntity(xCoord + outside.offsetX, yCoord + outside.offsetY, zCoord + outside.offsetZ); if (outsideTile != null) { //BC Check if(FancyFluidStorage.proxy.BUILDCRAFT_LOADED) { if(outsideTile instanceof IPipeTile) ((IPipeTile) outsideTile).scheduleNeighborChange(); } } worldObj.markBlockForUpdate(xCoord + outside.offsetX, yCoord + outside.offsetY, zCoord + outside.offsetZ); } public boolean isMaster() { return isMaster; } public TileEntityValve getMaster() { return master == null ? this : master; } public void setMaster(TileEntityValve master) { this.master = master; } public boolean getAutoOutput() { return isValid() && (isMaster() ? this.autoOutput : getMaster().getAutoOutput()); } public void setAutoOutput(boolean autoOutput) { if(!isMaster()) { getMaster().setAutoOutput(autoOutput); return; } this.autoOutput = autoOutput; updateBlockAndNeighbors(); } @Override public void readFromNBT(NBTTagCompound tag) { super.readFromNBT(tag); isValid = tag.getBoolean("isValid"); inside = ForgeDirection.getOrientation(tag.getInteger("inside")); isMaster = tag.getBoolean("master"); if(isMaster()) { if(tag.getBoolean("hasFluid")) { fluidStack = new FluidStack(FluidRegistry.getFluid(tag.getInteger("fluidID")), tag.getInteger("fluidAmount")); fluidCapacity = tag.getInteger("fluidCapacity"); } else { fluidStack = null; } autoOutput = tag.getBoolean("autoOutput"); tankHeight = tag.getInteger("tankHeight"); } if(tag.hasKey("bottomDiagF")) { int[] bottomDiagF = tag.getIntArray("bottomDiagF"); int[] topDiagF = tag.getIntArray("topDiagF"); bottomDiagFrame = new Position3D(bottomDiagF[0], bottomDiagF[1], bottomDiagF[2]); topDiagFrame = new Position3D(topDiagF[0], topDiagF[1], topDiagF[2]); } } @Override public void writeToNBT(NBTTagCompound tag) { tag.setBoolean("isValid", isValid); tag.setInteger("inside", inside.ordinal()); tag.setBoolean("master", isMaster()); if(isMaster()) { tag.setBoolean("hasFluid", fluidStack != null); if(fluidStack != null) { tag.setInteger("fluidID", fluidStack.getFluidID()); tag.setInteger("fluidAmount", fluidStack.amount); tag.setInteger("fluidCapacity", fluidCapacity); } tag.setBoolean("autoOutput", autoOutput); tag.setInteger("tankHeight", tankHeight); } if(bottomDiagFrame != null && topDiagFrame != null) { tag.setIntArray("bottomDiagF", new int[]{bottomDiagFrame.getX(), bottomDiagFrame.getY(), bottomDiagFrame.getZ()}); tag.setIntArray("topDiagF", new int[]{topDiagFrame.getX(), topDiagFrame.getY(), topDiagFrame.getZ()}); } super.writeToNBT(tag); } @Override public void onDataPacket(NetworkManager net, S35PacketUpdateTileEntity pkt) { readFromNBT(pkt.func_148857_g()); if ((!isMaster() || master == null) && pkt.func_148857_g().hasKey("masterValve")) { int[] masterCoords = pkt.func_148857_g().getIntArray("masterValve"); TileEntity tile = worldObj.getTileEntity(masterCoords[0], masterCoords[1], masterCoords[2]); if(tile != null && tile instanceof TileEntityValve) { master = (TileEntityValve) tile; } } markForUpdate(true); } @Override public Packet getDescriptionPacket() { NBTTagCompound tag = new NBTTagCompound(); writeToNBT(tag); if (!isMaster() && master != null) { tag.setIntArray("masterValve", new int[] {master.xCoord, master.yCoord, master.zCoord}); } return new S35PacketUpdateTileEntity(xCoord, yCoord, zCoord, 0, tag); } private void markForUpdate(boolean onlyThis) { if (!worldObj.isRemote) { if(!onlyThis) { for (TileEntityValve valve : otherValves) { valve.updateBlockAndNeighbors(); } for (TileEntityTankFrame frame : tankFrames) worldObj.markBlockForUpdate(frame.xCoord, frame.yCoord, frame.zCoord); } worldObj.markBlockForUpdate(xCoord, yCoord, zCoord); } } @Override public AxisAlignedBB getRenderBoundingBox() { if(bottomDiagFrame == null || topDiagFrame == null) return super.getRenderBoundingBox(); return AxisAlignedBB.getBoundingBox(bottomDiagFrame.getX(), bottomDiagFrame.getY(), bottomDiagFrame.getZ(), topDiagFrame.getX(), topDiagFrame.getY(), topDiagFrame.getZ()); } // Tank logic! @Override public FluidStack getFluid() { if(!isValid()) return null; return getMaster() == this ? fluidStack : getMaster().fluidStack; } @Override public int getFluidAmount() { if(!isValid() || getFluid() == null) return 0; return getFluid().amount; } @Override public int getCapacity() { if(!isValid()) return 0; return getMaster() == this ? fluidCapacity : getMaster().fluidCapacity; } @Override public FluidTankInfo getInfo() { if(!isValid()) return null; return new FluidTankInfo(getMaster()); } @Override public int fill(FluidStack resource, boolean doFill) { if(getMaster() == this) { if(!isValid() || fluidStack != null && !fluidStack.isFluidEqual(resource)) return 0; int possibleAmount = resource.amount; if(fluidStack != null) possibleAmount += getFluid().amount; int rest = resource.amount; if(possibleAmount > fluidCapacity) { rest = possibleAmount - fluidCapacity; possibleAmount = fluidCapacity; } if(doFill) { if (fluidStack == null) fluidStack = resource; fluidStack.amount = possibleAmount; getMaster().markForUpdate(true); } return rest; } else return getMaster().fill(resource, doFill); } @Override public FluidStack drain(int maxDrain, boolean doDrain) { if(getMaster() == this) { if(!isValid() || fluidStack == null) return null; int possibleAmount = fluidStack.amount - maxDrain; int drained = maxDrain; if(possibleAmount < 0) { drained += possibleAmount; possibleAmount = 0; } FluidStack returnStack = new FluidStack(fluidStack, drained); if(doDrain) { fluidStack.amount = possibleAmount; if (possibleAmount == 0) fluidStack = null; getMaster().markForUpdate(true); } return returnStack; } else return getMaster().drain(maxDrain, doDrain); } // IFluidHandler @Override public int fill(ForgeDirection from, FluidStack resource, boolean doFill) { return getMaster() == this ? fill(resource, doFill) : getMaster().fill(resource, doFill); } @Override public FluidStack drain(ForgeDirection from, FluidStack resource, boolean doDrain) { return getMaster() == this ? drain(resource.amount, doDrain) : getMaster().drain(resource.amount, doDrain); } @Override public FluidStack drain(ForgeDirection from, int maxDrain, boolean doDrain) { return getMaster() == this ? drain(maxDrain, doDrain) : getMaster().drain(maxDrain, doDrain); } @Override public boolean canFill(ForgeDirection from, Fluid fluid) { return isValid() && ((getFluid() != null && getFluid().getFluid() == fluid && getFluid().amount < getCapacity()) || getFluid() == null); } @Override public boolean canDrain(ForgeDirection from, Fluid fluid) { return isValid() && getFluid() != null && getFluid().getFluid() == fluid && getFluid().amount > 0; } @Override public FluidTankInfo[] getTankInfo(ForgeDirection from) { if(!isValid()) return null; return getMaster() == this ? new FluidTankInfo[]{ getInfo() } : getMaster().getTankInfo(from); } @Optional.Method(modid = "BuildCraftAPI|Transport") @Override public ConnectOverride overridePipeConnection(IPipeTile.PipeType pipeType, ForgeDirection from) { if(!isValid()) return ConnectOverride.DISCONNECT; return ConnectOverride.CONNECT; } public String[] methodNames() { return new String[]{"getFluidName", "getFluidAmount", "getFluidCapacity", "setAutoOutput", "doesAutoOutput"}; } @Optional.Method(modid = "ComputerCraft") @Override public String getType() { return "ffs_valve"; } @Optional.Method(modid = "ComputerCraft") @Override public String[] getMethodNames() { return methodNames(); } @Optional.Method(modid = "ComputerCraft") @Override public Object[] callMethod(IComputerAccess computer, ILuaContext context, int method, Object[] arguments) throws LuaException, InterruptedException { switch(method) { case 0: { // getFluidName if(this.getFluid() == null) return null; return new Object[]{this.getFluid().getLocalizedName()}; } case 1: { // getFluidAmount return new Object[]{this.getFluidAmount()}; } case 2: { // getFluidCapacity return new Object[]{this.getCapacity()}; } case 3: { // setAutoOutput if(arguments.length == 0) { arguments = new Object[]{!this.getAutoOutput()}; } if(!(arguments[0] instanceof Boolean)) { throw new LuaException("expected argument 1 to be of type \"boolean\", found \"" + arguments[0].getClass().getSimpleName() + "\""); } this.setAutoOutput((boolean) arguments[0]); return new Object[]{this.getAutoOutput()}; } case 4: { // doesAutoOutput return new Object[]{this.getAutoOutput()}; } default: } return null; } @Optional.Method(modid = "ComputerCraft") @Override public void attach(IComputerAccess computer) { } @Optional.Method(modid = "ComputerCraft") @Override public void detach(IComputerAccess computer) { } @Optional.Method(modid = "ComputerCraft") @Override public boolean equals(IPeripheral other) { return false; } @Optional.Method(modid = "OpenComputers") @Override public String getComponentName() { return "ffs_valve"; } @Optional.Method(modid = "OpenComputers") @Override public String[] methods() { return methodNames(); } @Optional.Method(modid = "OpenComputers") @Override public Object[] invoke(String method, Context context, Arguments args) throws Exception { switch(method) { case "getFluidName": { // getFluidName if(this.getFluid() == null) return null; return new Object[]{this.getFluid().getLocalizedName()}; } case "getFluidAmount": { // getFluidAmount return new Object[]{this.getFluidAmount()}; } case "getFluidCapacity": { // getCapacity return new Object[]{this.getCapacity()}; } case "setAutoOutput": { // setAutoOutput this.setAutoOutput(args.optBoolean(0, !this.getAutoOutput())); return new Object[]{this.getAutoOutput()}; } case "doesAutoOutput": { // doesAutoOutput return new Object[]{this.getAutoOutput()}; } default: } return null; } @Optional.Method(modid = "funkylocomotion") @Override public boolean canMove(World worldObj, int x, int y, int z) { return false; } }
package tars.commons.core; /** * Container for user visible messages. */ public class Messages { public static final String MESSAGE_UNKNOWN_COMMAND = "Unknown command"; public static final String MESSAGE_INVALID_COMMAND_FORMAT = "Invalid command format! \n%1$s"; public static final String MESSAGE_INVALID_DATE = "Invalid date"; public static final String MESSAGE_INVALID_PERSON_DISPLAYED_INDEX = "The task index provided is invalid"; public static final String MESSAGE_PERSONS_LISTED_OVERVIEW = "%1$d tasks listed!"; }
package com.microsoft.sqlserver.jdbc; import java.math.BigDecimal; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.util.ArrayList; import java.util.List; import java.util.Locale; public class Geometry { private ByteBuffer buffer; private InternalSpatialDatatype internalType; private String wkt; private byte[] wkb; private int srid; private byte version = 1; private byte serializationProperties; private int numberOfPoints; private int numberOfFigures; private int numberOfShapes; private int numberOfSegments; private double points[]; private double zValues[]; private double mValues[]; private Figure figures[]; private Shape shapes[]; private Segment segments[]; private StringBuffer WKTsb; private int currentPointIndex = 0; private int currentFigureIndex = 0; private int currentSegmentIndex = 0; private int currentShapeIndex = 0; //serialization properties private boolean hasZvalues = false; private boolean hasMvalues = false; //TODO: when is a geometry/geography not valid? //Also, from the driver's point of view, should this ever be false? private boolean isValid = false; private boolean isSinglePoint = false; private boolean isSingleLineSegment = false; //TODO: how do i use this? private boolean isLargerThanHemisphere = false; private final byte FA_INTERIOR_RING = 0; private final byte FA_STROKE = 1; private final byte FA_EXTERIOR_RING = 2; private final byte FA_POINT = 0; private final byte FA_LINE = 1; private final byte FA_ARC = 2; private final byte FA_COMPOSITE_CURVE = 3; private final byte SEGMENT_LINE = 0; private final byte SEGMENT_ARC = 1; private final byte SEGMENT_FIRST_LINE = 2; private final byte SEGMENT_FIRST_ARC = 3; private final byte hasZvaluesMask = 0b00000001; private final byte hasMvaluesMask = 0b00000010; private final byte isValidMask = 0b00000100; private final byte isSinglePointMask = 0b00001000; private final byte isSingleLineSegmentMask = 0b00010000; private final byte isLargerThanHemisphereMask = 0b00100000; // WKT to WKB properties private int currentWktPos = 0; private List<Point> pointList = new ArrayList<Point>(); private List<Figure> figureList = new ArrayList<Figure>(); private List<Shape> shapeList = new ArrayList<Shape>(); private List<Segment> segmentList = new ArrayList<Segment>(); private List<Integer> version_one_shape_indexes = new ArrayList<Integer>(); public Geometry(String WellKnownText, int srid) { this.wkt = WellKnownText; this.srid = srid; //TODO: do lazy conversion later parseWKTForSerialization(currentWktPos, -1, false); serializeToWkb(); } public Geometry(byte[] wkb) { this.wkb = wkb; buffer = ByteBuffer.wrap(wkb); buffer.order(ByteOrder.LITTLE_ENDIAN); parseWkb(); WKTsb = new StringBuffer(); constructWKT(internalType, numberOfPoints, numberOfFigures, numberOfSegments, numberOfShapes); wkt = WKTsb.toString(); } public InternalSpatialDatatype getInternalType() { return internalType; } public int getSRID() { return srid; } public byte[] getWkb() { return wkb; } public String toString() { return wkt; } private void serializeToWkb() { ByteBuffer buf = ByteBuffer.allocate(determineWkbCapacity()); createSerializationProperties(); buf.order(ByteOrder.LITTLE_ENDIAN); buf.putInt(srid); buf.put(version); buf.put(serializationProperties); if (!isSinglePoint && !isSingleLineSegment) { buf.putInt(numberOfPoints); } for (int i = 0; i < numberOfPoints; i++) { buf.putDouble(points[2 * i]); buf.putDouble(points[2 * i + 1]); } if (hasZvalues) { for (int i = 0; i < numberOfPoints; i++) { buf.putDouble(zValues[i]); } } if (hasMvalues) { for (int i = 0; i < numberOfPoints; i++) { buf.putDouble(mValues[i]); } } if (isSinglePoint || isSingleLineSegment) { wkb = buf.array(); return; } buf.putInt(numberOfFigures); for (int i = 0; i < numberOfFigures; i++) { buf.put(figures[i].getFiguresAttribute()); buf.putInt(figures[i].getPointOffset()); } buf.putInt(numberOfShapes); for (int i = 0; i < numberOfShapes; i++) { buf.putInt(shapes[i].getParentOffset()); buf.putInt(shapes[i].getFigureOffset()); buf.put(shapes[i].getOpenGISType()); } if (version == 2 && null != segments) { buf.putInt(numberOfSegments); for (int i = 0; i < numberOfSegments; i++) { buf.put(segments[i].getSegmentType()); } } wkb = buf.array(); return; } private void createSerializationProperties() { serializationProperties = 0; if (hasZvalues) { serializationProperties += hasZvaluesMask; } if (hasMvalues) { serializationProperties += hasMvaluesMask; } if (isValid) { serializationProperties += isValidMask; } if (isSinglePoint) { serializationProperties += isSinglePointMask; } if (isSingleLineSegment) { serializationProperties += isSingleLineSegmentMask; } //TODO look into how the isLargerThanHemisphere is created if (version == 2) { if (isLargerThanHemisphere) { serializationProperties += isLargerThanHemisphereMask; } } } private int determineWkbCapacity() { int totalSize = 0; totalSize+=6; // SRID + version + SerializationPropertiesByte if (isSinglePoint || isSingleLineSegment) { totalSize += 16 * numberOfPoints; if (hasZvalues) { totalSize += 8 * numberOfPoints; } if (hasMvalues) { totalSize += 8 * numberOfPoints; } return totalSize; } int pointSize = 16; if (hasZvalues) { pointSize += 8; } if (hasMvalues) { pointSize += 8; } totalSize += 12; // 4 bytes for 3 ints, each representing the number of points, shapes and figures totalSize += numberOfPoints * pointSize; totalSize += numberOfFigures * 5; totalSize += numberOfShapes * 9; if (version == 2) { totalSize += 4; // 4 bytes for 1 int, representing the number of segments totalSize += numberOfSegments; } return totalSize; } private void parseWkb() { srid = buffer.getInt(); version = buffer.get(); serializationProperties = buffer.get(); interpretSerializationPropBytes(); readNumberOfPoints(); readPoints(); if (hasZvalues) { readZvalues(); } if (hasMvalues) { readMvalues(); } //TODO: do I need to do anything when it's isSinglePoint or isSingleLineSegment? if (!(isSinglePoint || isSingleLineSegment)) { readNumberOfFigures(); readFigures(); readNumberOfShapes(); readShapes(); } determineInternalType(); if (version == 2 && internalType.getTypeCode() != 8) { readNumberOfSegments(); readSegments(); } } private void interpretSerializationPropBytes() { hasZvalues = (serializationProperties & hasZvaluesMask) != 0; hasMvalues = (serializationProperties & hasMvaluesMask) != 0; isValid = (serializationProperties & isValidMask) != 0; isSinglePoint = (serializationProperties & isSinglePointMask) != 0; isSingleLineSegment = (serializationProperties & isSingleLineSegmentMask) != 0; } private void readNumberOfPoints() { if (isSinglePoint) { numberOfPoints = 1; } else if (isSingleLineSegment) { numberOfPoints = 2; } else { numberOfPoints = buffer.getInt(); } } private void readPoints() { points = new double[2 * numberOfPoints]; for (int i = 0; i < numberOfPoints; i++) { points[2 * i] = buffer.getDouble(); points[2 * i + 1] = buffer.getDouble(); } } private void readZvalues() { zValues = new double[numberOfPoints]; for (int i = 0; i < numberOfPoints; i++) { zValues[i] = buffer.getDouble(); } } private void readMvalues() { mValues = new double[numberOfPoints]; for (int i = 0; i < numberOfPoints; i++) { mValues[i] = buffer.getDouble(); } } private void readNumberOfFigures() { numberOfFigures = buffer.getInt(); } private void readFigures() { byte fa; int po; figures = new Figure[numberOfFigures]; for (int i = 0; i < numberOfFigures; i++) { fa = buffer.get(); po = buffer.getInt(); figures[i] = new Figure(fa, po); } } private void readNumberOfShapes() { numberOfShapes = buffer.getInt(); } private void readShapes() { int po; int fo; byte ogt; shapes = new Shape[numberOfShapes]; for (int i = 0; i < numberOfShapes; i++) { po = buffer.getInt(); fo = buffer.getInt(); ogt = buffer.get(); shapes[i] = new Shape(po, fo, ogt); } } private void readNumberOfSegments() { numberOfSegments = buffer.getInt(); } private void readSegments() { byte st; segments = new Segment[numberOfSegments]; for (int i = 0; i < numberOfSegments; i++) { st = buffer.get(); segments[i] = new Segment(st); } } private void determineInternalType() { if (isSinglePoint) { internalType = InternalSpatialDatatype.POINT; } else if (isSingleLineSegment) { internalType = InternalSpatialDatatype.LINESTRING; } else { internalType = InternalSpatialDatatype.valueOf(shapes[0].getOpenGISType()); } } private void constructWKT(InternalSpatialDatatype isd, int pointIndexEnd, int figureIndexEnd, int segmentIndexEnd, int shapeIndexEnd) { if (null == points || numberOfPoints == 0) { wkt = internalType + " EMPTY"; return; } WKTsb.append(isd.getTypeName()); WKTsb.append("("); switch (isd) { case POINT: constructPointWKT(currentPointIndex); break; case LINESTRING: case CIRCULARSTRING: constructLineWKT(currentPointIndex, pointIndexEnd); break; case POLYGON: case MULTIPOINT: case MULTILINESTRING: constructShapeWKT(currentFigureIndex, figureIndexEnd); break; case COMPOUNDCURVE: constructCompoundcurveWKT(currentSegmentIndex, segmentIndexEnd, pointIndexEnd); break; case MULTIPOLYGON: constructMultipolygonWKT(currentFigureIndex, figureIndexEnd); break; case GEOMETRYCOLLECTION: constructGeometryCollectionWKT(shapeIndexEnd); break; case CURVEPOLYGON: constructCurvepolygonWKT(currentFigureIndex, figureIndexEnd, currentSegmentIndex, segmentIndexEnd); break; case FULLGLOBE: //TODO: return error return; default: break; } WKTsb.append(")"); } private void constructPointWKT(int pointIndex) { int firstPointIndex = pointIndex * 2; int secondPointIndex = firstPointIndex + 1; int zValueIndex = pointIndex; int mValueIndex = pointIndex; WKTsb.append(points[firstPointIndex]); WKTsb.append(" "); WKTsb.append(points[secondPointIndex]); WKTsb.append(" "); if (hasZvalues && !Double.isNaN(zValues[zValueIndex])) { WKTsb.append(zValues[zValueIndex]); WKTsb.append(" "); if (hasMvalues && !Double.isNaN(mValues[mValueIndex])) { WKTsb.append(mValues[mValueIndex]); WKTsb.append(" "); } } currentPointIndex++; WKTsb.setLength(WKTsb.length() - 1); // truncate last space } private void constructLineWKT(int pointStartIndex, int pointEndIndex) { for (int i = pointStartIndex; i < pointEndIndex; i++) { constructPointWKT(i); // add ', ' to separate points, except for the last point if (i != pointEndIndex - 1) { WKTsb.append(", "); } } } private void constructShapeWKT(int figureStartIndex, int figureEndIndex) { // Method for constructing shapes (simple Geometry/Geography entities that are contained within a single bracket) for (int i = figureStartIndex; i < figureEndIndex; i++) { WKTsb.append("("); if (i != numberOfFigures - 1) { //not the last figure constructLineWKT(figures[i].getPointOffset(), figures[i + 1].getPointOffset()); } else { constructLineWKT(figures[i].getPointOffset(), numberOfPoints); } if (i != figureEndIndex - 1) { WKTsb.append("), "); } else { WKTsb.append(")"); } } } private void constructCompoundcurveWKT(int segmentStartIndex, int segmentEndIndex, int pointEndIndex) { for (int i = segmentStartIndex; i < segmentEndIndex; i++) { byte segment = segments[i].getSegmentType(); constructSegmentWKT(i, segment, pointEndIndex); if (i == segmentEndIndex - 1) { WKTsb.append(")"); break; } switch (segment) { case 0: case 2: if (segments[i + 1].getSegmentType() != 0) { WKTsb.append("), "); } break; case 1: case 3: if (segments[i + 1].getSegmentType() != 1) { WKTsb.append("), "); } break; default: return; } } } private void constructSegmentWKT(int currentSegment, byte segment, int pointEndIndex) { switch (segment) { case 0: WKTsb.append(", "); constructLineWKT(currentPointIndex, currentPointIndex + 1); if (currentSegment == segments.length - 1) { // last segment break; } else if (segments[currentSegment + 1].getSegmentType() != 0) { // not being followed by another line, but not the last segment currentPointIndex = currentPointIndex - 1; incrementPointNumStartIfPointNotReused(pointEndIndex); } break; case 1: WKTsb.append(", "); constructLineWKT(currentPointIndex, currentPointIndex + 2); if (currentSegment == segments.length - 1) { // last segment break; } else if (segments[currentSegment + 1].getSegmentType() != 1) { // not being followed by another arc, but not the last segment currentPointIndex = currentPointIndex - 1; // only increment pointNumStart by one less than what we should be, since the last point will be reused incrementPointNumStartIfPointNotReused(pointEndIndex); } break; case 2: WKTsb.append("("); constructLineWKT(currentPointIndex, currentPointIndex + 2); if (currentSegment == segments.length - 1) { // last segment break; } else if (segments[currentSegment + 1].getSegmentType() != 0) { // not being followed by another line, but not the last segment currentPointIndex = currentPointIndex - 1; // only increment pointNumStart by one less than what we should be, since the last point will be reused incrementPointNumStartIfPointNotReused(pointEndIndex); } break; case 3: WKTsb.append("CIRCULARSTRING("); constructLineWKT(currentPointIndex, currentPointIndex + 3); if (currentSegment == segments.length - 1) { // last segment break; } else if (segments[currentSegment + 1].getSegmentType() != 1) { // not being followed by another arc currentPointIndex = currentPointIndex - 1; // only increment pointNumStart by one less than what we should be, since the last point will be reused incrementPointNumStartIfPointNotReused(pointEndIndex); } break; default: return; } } private void incrementPointNumStartIfPointNotReused(int pointEndIndex) { // We need to increment PointNumStart if the last point was actually not re-used in the points array. // 0 for pointNumEnd indicates that this check is not applicable. if (currentPointIndex + 1 >= pointEndIndex) { currentPointIndex++; } } private void constructMultipolygonWKT(int figureStartIndex, int figureEndIndex) { for (int i = figureStartIndex; i < figureEndIndex; i++) { if (figures[i].getFiguresAttribute() == 2) { // exterior ring WKTsb.append("(("); } else { // interior ring WKTsb.append("("); } if (i == figures.length - 1) { // last figure constructLineWKT(figures[i].getPointOffset(), numberOfPoints); } else { constructLineWKT(figures[i].getPointOffset(), figures[i + 1].getPointOffset()); } if (i == figureEndIndex - 1) { // last polygon of this multipolygon, close off the Multipolygon and return WKTsb.append("))"); return; } else if (figures[i + 1].getFiguresAttribute() == 2) { // not the last polygon, followed by an exterior ring WKTsb.append(")), "); } else { // not the last polygon, followed by an interior ring WKTsb.append("), "); } } } private void constructCurvepolygonWKT(int figureStartIndex, int figureEndIndex, int segmentStartIndex, int segmentEndIndex) { for (int i = figureStartIndex; i < figureEndIndex; i++) { switch (figures[i].getFiguresAttribute()) { case 1: // line WKTsb.append("("); if (i == figures.length - 1) { constructLineWKT(currentPointIndex, numberOfPoints); } else { constructLineWKT(currentPointIndex, figures[i + 1].getPointOffset()); //currentPointIndex = figures[i + 1].getPointOffset(); } WKTsb.append(")"); break; case 2: // arc WKTsb.append("CIRCULARSTRING("); if (i == figures.length - 1) { constructLineWKT(currentPointIndex, numberOfPoints); } else { constructLineWKT(currentPointIndex, figures[i + 1].getPointOffset()); //currentPointIndex = figures[i + 1].getPointOffset(); } WKTsb.append(")"); break; case 3: // composite curve WKTsb.append("COMPOUNDCURVE("); int pointEndIndex = 0; if (i == figures.length - 1) { pointEndIndex = numberOfPoints; } else { pointEndIndex = figures[i + 1].getPointOffset(); } while (currentPointIndex < pointEndIndex) { byte segment = segments[segmentStartIndex].getSegmentType(); constructSegmentWKT(segmentStartIndex, segment, pointEndIndex); if (segmentStartIndex >= segmentEndIndex - 1) { WKTsb.append(")"); // about to exit while loop, but not the last segment = we are closing Compoundcurve. } else if (!(currentPointIndex < pointEndIndex)) { WKTsb.append("))"); } else { switch (segment) { case 0: case 2: if (segments[segmentStartIndex + 1].getSegmentType() != 0) { WKTsb.append("), "); } break; case 1: case 3: if (segments[segmentStartIndex + 1].getSegmentType() != 1) { WKTsb.append("), "); } break; default: return; } } segmentStartIndex++; } break; default: return; } if (i == figureEndIndex - 1) { WKTsb.append(")"); } else { WKTsb.append(", "); } } } private void constructGeometryCollectionWKT(int shapeEndIndex) { currentShapeIndex++; constructGeometryCollectionWKThelper(shapeEndIndex); } private void constructGeometryCollectionWKThelper(int shapeEndIndex) { //phase 1: assume that there is no multi - stuff and no geometrycollection while (currentShapeIndex < shapeEndIndex) { InternalSpatialDatatype isd = InternalSpatialDatatype.valueOf(shapes[currentShapeIndex].getOpenGISType()); int figureIndex = shapes[currentShapeIndex].getFigureOffset(); int pointIndexEnd = numberOfPoints; int figureIndexEnd = numberOfFigures; int segmentIndexEnd = numberOfSegments; int shapeIndexEnd = numberOfShapes; int figureIndexIncrement = 0; int segmentIndexIncrement = 0; int localCurrentSegmentIndex = 0; int localCurrentShapeIndex = 0; switch (isd) { case POINT: figureIndexIncrement++; currentShapeIndex++; break; case LINESTRING: case CIRCULARSTRING: figureIndexIncrement++; currentShapeIndex++; pointIndexEnd = figures[figureIndex + 1].getPointOffset(); break; case POLYGON: case CURVEPOLYGON: if (currentShapeIndex < shapes.length - 1) { figureIndexEnd = shapes[currentShapeIndex + 1].getFigureOffset(); } figureIndexIncrement = figureIndexEnd - currentFigureIndex; currentShapeIndex++; // Needed to keep track of which segment we are at, inside the for loop localCurrentSegmentIndex = currentSegmentIndex; if (isd.equals(InternalSpatialDatatype.CURVEPOLYGON)) { // assume Version 2 for (int i = currentFigureIndex; i < figureIndexEnd; i++) { // Only Compoundcurves (with figure attribute 3) can have segments if (figures[i].getFiguresAttribute() == 3) { int pointOffsetEnd; if (i == figures.length - 1) { pointOffsetEnd = numberOfPoints; } else { pointOffsetEnd = figures[i + 1].getPointOffset(); } int increment = calculateSegmentIncrement(localCurrentSegmentIndex, pointOffsetEnd - figures[i].getPointOffset()); segmentIndexIncrement = segmentIndexIncrement + increment; localCurrentSegmentIndex = localCurrentSegmentIndex + increment; } } } segmentIndexEnd = localCurrentSegmentIndex; break; case MULTIPOINT: case MULTILINESTRING: case MULTIPOLYGON: //Multipoint and MultiLineString can go on for multiple Shapes, but eventually //the parentOffset will signal the end of the object, or it's reached the end of the //shapes array. //There is also no possibility that a MultiPoint or MultiLineString would branch //into another parent. int thisShapesParentOffset = shapes[currentShapeIndex].getParentOffset(); // Increment shapeStartIndex to account for the shape index that either Multipoint, MultiLineString // or MultiPolygon takes up currentShapeIndex++; while (currentShapeIndex < shapes.length - 1 && shapes[currentShapeIndex].getParentOffset() != thisShapesParentOffset) { figureIndexEnd = shapes[currentShapeIndex + 1].getFigureOffset(); currentShapeIndex++; } figureIndexIncrement = figureIndexEnd - currentFigureIndex; break; case GEOMETRYCOLLECTION: WKTsb.append(isd.getTypeName()); WKTsb.append("("); int geometryCollectionParentIndex = shapes[currentShapeIndex].getParentOffset(); // Needed to keep track of which shape we are at, inside the for loop localCurrentShapeIndex = currentShapeIndex; while (localCurrentShapeIndex < shapes.length - 1 && shapes[localCurrentShapeIndex + 1].getParentOffset() > geometryCollectionParentIndex) { localCurrentShapeIndex++; } // increment localCurrentShapeIndex one more time since it will be used as a shapeEndIndex parameter // for constructGeometryCollectionWKT, and the shapeEndIndex parameter is used non-inclusively localCurrentShapeIndex++; currentShapeIndex++; constructGeometryCollectionWKThelper(localCurrentShapeIndex); if (currentShapeIndex < shapeEndIndex) { WKTsb.append("), "); } else { WKTsb.append(")"); } continue; case COMPOUNDCURVE: if (currentFigureIndex == figures.length - 1) { pointIndexEnd = numberOfPoints; } else { pointIndexEnd = figures[currentFigureIndex + 1].getPointOffset(); } int increment = calculateSegmentIncrement(currentSegmentIndex, pointIndexEnd - figures[currentFigureIndex].getPointOffset()); segmentIndexIncrement = increment; segmentIndexEnd = currentSegmentIndex + increment; figureIndexIncrement++; currentShapeIndex++; break; case FULLGLOBE: WKTsb.append("FULLGLOBE"); break; default: break; } constructWKT(isd, pointIndexEnd, figureIndexEnd, segmentIndexEnd, shapeIndexEnd); currentFigureIndex = currentFigureIndex + figureIndexIncrement; currentSegmentIndex = currentSegmentIndex + segmentIndexIncrement; if (currentShapeIndex < shapeEndIndex) { WKTsb.append(", "); } } } //Calculates how many segments will be used by this CompoundCurve private int calculateSegmentIncrement(int segmentStart, int pointDifference) { int segmentIncrement = 0; while (pointDifference > 0) { switch (segments[segmentStart].getSegmentType()) { case 0: pointDifference = pointDifference - 1; if (segmentStart == segments.length - 1 || pointDifference < 1) { // last segment break; } else if (segments[segmentStart + 1].getSegmentType() != 0) { // one point will be reused pointDifference = pointDifference + 1; } break; case 1: pointDifference = pointDifference - 2; if (segmentStart == segments.length - 1 || pointDifference < 1) { // last segment break; } else if (segments[segmentStart + 1].getSegmentType() != 1) { // one point will be reused pointDifference = pointDifference + 1; } break; case 2: pointDifference = pointDifference - 2; if (segmentStart == segments.length - 1 || pointDifference < 1) { // last segment break; } else if (segments[segmentStart + 1].getSegmentType() != 0) { // one point will be reused pointDifference = pointDifference + 1; } break; case 3: pointDifference = pointDifference - 3; if (segmentStart == segments.length - 1 || pointDifference < 1) { // last segment break; } else if (segments[segmentStart + 1].getSegmentType() != 1) { // one point will be reused pointDifference = pointDifference + 1; } break; default: return segmentIncrement; } segmentStart++; segmentIncrement++; } return segmentIncrement; } private void parseWKTForSerialization(int startPos, int parentShapeIndex, boolean isGeoCollection) { //after every iteration of this while loop, the currentWktPosition will be set to the //end of the geometry/geography shape, except for the very first iteration of it. //This means that there has to be comma (that separates the previous shape with the next shape), //or we expect a ')' that will close the entire shape and exit the method. while (hasMoreToken()) { if (startPos != 0) { if (wkt.charAt(currentWktPos) == ')') { return; } else if (wkt.charAt(currentWktPos) == ',') { currentWktPos++; } else { //TODO: throw exception here? //return; } } String nextToken = getNextStringToken().toUpperCase(Locale.US); String nextPotentialToken; int thisShapeIndex; InternalSpatialDatatype isd = InternalSpatialDatatype.valueOf(nextToken); byte fa = 0; readOpenBracket(); if (version == 1 && (nextToken.equals("CIRCULARSTRING") || nextToken.equals("COMPOUNDCURVE") || nextToken.equals("CURVEPOLYGON"))) { version = 2; } switch (nextToken) { case "POINT": if (startPos == 0 && nextToken.toUpperCase().equals("POINT")) { isSinglePoint = true; } if (isGeoCollection) { shapeList.add(new Shape(parentShapeIndex, figureList.size(), isd.getTypeCode())); figureList.add(new Figure(FA_LINE, pointList.size())); } readPointWkt(); break; case "LINESTRING": case "CIRCULARSTRING": shapeList.add(new Shape(parentShapeIndex, figureList.size(), isd.getTypeCode())); fa = isd.getTypeCode() == InternalSpatialDatatype.LINESTRING.getTypeCode() ? FA_STROKE : FA_EXTERIOR_RING; figureList.add(new Figure(fa, pointList.size())); readLineWkt(); if (startPos == 0 && nextToken.toUpperCase().equals("LINESTRING") && pointList.size() == 2) { isSingleLineSegment = true; } break; case "POLYGON": case "MULTIPOINT": case "MULTILINESTRING": thisShapeIndex = shapeList.size(); shapeList.add(new Shape(parentShapeIndex, figureList.size(), isd.getTypeCode())); readShapeWkt(thisShapeIndex, nextToken); break; case "MULTIPOLYGON": thisShapeIndex = shapeList.size(); shapeList.add(new Shape(parentShapeIndex, figureList.size(), isd.getTypeCode())); while (currentWktPos < wkt.length() && wkt.charAt(currentWktPos) != ')') { shapeList.add(new Shape(thisShapeIndex, figureList.size(), InternalSpatialDatatype.POLYGON.getTypeCode())); //exterior polygon readOpenBracket(); readShapeWkt(thisShapeIndex, nextToken); readCloseBracket(); if (wkt.charAt(currentWktPos) == ',') { // more polygons to follow readComma(); } else if (wkt.charAt(currentWktPos) == ')') { // about to exit while loop continue; } else { // unexpected input throw new IllegalArgumentException(); } } break; case "COMPOUNDCURVE": shapeList.add(new Shape(parentShapeIndex, figureList.size(), isd.getTypeCode())); figureList.add(new Figure(FA_COMPOSITE_CURVE, pointList.size())); readCompoundCurveWkt(true); break; case "CURVEPOLYGON": shapeList.add(new Shape(parentShapeIndex, figureList.size(), isd.getTypeCode())); while (currentWktPos < wkt.length() && wkt.charAt(currentWktPos) != ')') { nextPotentialToken = getNextStringToken().toUpperCase(Locale.US); if (nextPotentialToken.equals("CIRCULARSTRING")) { figureList.add(new Figure(FA_ARC, pointList.size())); readOpenBracket(); readLineWkt(); readCloseBracket(); } else if (nextPotentialToken.equals("COMPOUNDCURVE")) { figureList.add(new Figure(FA_COMPOSITE_CURVE, pointList.size())); readOpenBracket(); readCompoundCurveWkt(true); readCloseBracket(); } else if (wkt.charAt(currentWktPos) == '(') { //LineString figureList.add(new Figure(FA_LINE, pointList.size())); readOpenBracket(); readLineWkt(); readCloseBracket(); } else { throw new IllegalArgumentException(); } if (wkt.charAt(currentWktPos) == ',') { // more polygons to follow readComma(); } else if (wkt.charAt(currentWktPos) == ')') { // about to exit while loop continue; } else { // unexpected input throw new IllegalArgumentException(); } } break; case "GEOMETRYCOLLECTION": thisShapeIndex = shapeList.size(); shapeList.add(new Shape(parentShapeIndex, figureList.size(), isd.getTypeCode())); parseWKTForSerialization(currentWktPos, thisShapeIndex, true); break; case "FULLGLOBE": break; default: break; } //all geometry methods return when the depth reaches 0. ( gives + 1 depth and ) takes away 1 depth. readCloseBracket(); } populateStructures(); } private void readCompoundCurveWkt(boolean isFirstIteration) { while (currentWktPos < wkt.length() && wkt.charAt(currentWktPos) != ')') { String nextPotentialToken = getNextStringToken().toUpperCase(Locale.US); if (nextPotentialToken.equals("CIRCULARSTRING")) { readOpenBracket(); readSegmentWkt(SEGMENT_FIRST_ARC, isFirstIteration); readCloseBracket(); } else if (wkt.charAt(currentWktPos) == '(') {//LineString readOpenBracket(); readSegmentWkt(SEGMENT_FIRST_LINE, isFirstIteration); readCloseBracket(); } else { throw new IllegalArgumentException(); } isFirstIteration = false; if (wkt.charAt(currentWktPos) == ',') { // more polygons to follow readComma(); } else if (wkt.charAt(currentWktPos) == ')') { // about to exit while loop continue; } else { // unexpected input throw new IllegalArgumentException(); } } } private void readPointWkt() { int numOfCoordinates = 0; double sign; double coords[] = new double[4]; while (numOfCoordinates < 4) { sign = 1; if (wkt.charAt(currentWktPos) == '-') { sign = -1; currentWktPos++; } int startPos = currentWktPos; if (wkt.charAt(currentWktPos) == ')') { break; } while (currentWktPos < wkt.length() && (Character.isDigit(wkt.charAt(currentWktPos)) || wkt.charAt(currentWktPos) == '.' || wkt.charAt(currentWktPos) == 'E' || wkt.charAt(currentWktPos) == 'e')) { currentWktPos++; } try { coords[numOfCoordinates] = sign * new BigDecimal(wkt.substring(startPos, currentWktPos)).doubleValue(); } catch (Exception e) { //modify to conversion exception throw new IllegalArgumentException(); } skipWhiteSpaces(); if (wkt.charAt(currentWktPos) == ',') { currentWktPos++; skipWhiteSpaces(); numOfCoordinates++; break; } skipWhiteSpaces(); numOfCoordinates++; } if (numOfCoordinates == 4) { hasZvalues = true; hasMvalues = true; } else if (numOfCoordinates == 3) { hasZvalues = true; } pointList.add(new Point(coords[0], coords[1], coords[2], coords[3])); } private void readLineWkt() { while (currentWktPos < wkt.length() && wkt.charAt(currentWktPos) != ')') { readPointWkt(); } } private void readShapeWkt(int parentShapeIndex, String nextToken) { byte fa = FA_POINT; while (currentWktPos < wkt.length() && wkt.charAt(currentWktPos) != ')') { if (nextToken.equals("MULTIPOINT")) { shapeList.add(new Shape(parentShapeIndex, figureList.size(), InternalSpatialDatatype.POINT.getTypeCode())); } else if (nextToken.equals("MULTILINESTRING")) { shapeList.add(new Shape(parentShapeIndex, figureList.size(), InternalSpatialDatatype.LINESTRING.getTypeCode())); } if (version == 1) { if (nextToken.equals("MULTIPOINT")) { fa = FA_STROKE; } else if (nextToken.equals("MULTILINESTRING") || nextToken.equals("POLYGON")) { fa = FA_EXTERIOR_RING; } version_one_shape_indexes.add(figureList.size()); } else if (version == 2) { if (nextToken.equals("MULTIPOINT") || nextToken.equals("MULTILINESTRING") || nextToken.equals("POLYGON") || nextToken.equals("MULTIPOLYGON")) { fa = FA_LINE; } } figureList.add(new Figure(fa, pointList.size())); readOpenBracket(); readLineWkt(); readCloseBracket(); skipWhiteSpaces(); if (wkt.charAt(currentWktPos) == ',') { // more rings to follow readComma(); } else if (wkt.charAt(currentWktPos) == ')') { // about to exit while loop continue; } else { // unexpected input throw new IllegalArgumentException(); } } } private void readSegmentWkt(int segmentType, boolean isFirstIteration) { segmentList.add(new Segment((byte) segmentType)); int segmentLength = segmentType; // under 2 means 0 or 1 (possible values). 0 (line) has 1 point, and 1 (arc) has 2 points, so increment by one if (segmentLength < 2) { segmentLength++; } for (int i = 0; i < segmentLength; i++) { //If a segment type of 2 (first line) or 3 (first arc) is not from the very first iteration of the while loop, //then the first point has to be a duplicate point from the previous segment, so skip the first point. if (i == 0 && !isFirstIteration && segmentType >= 2) { skipFirstPointWkt(); } else { readPointWkt(); } } if (currentWktPos < wkt.length() && wkt.charAt(currentWktPos) != ')') { if (segmentType == SEGMENT_FIRST_ARC || segmentType == SEGMENT_ARC) { readSegmentWkt(SEGMENT_ARC, false); } else if (segmentType == SEGMENT_FIRST_LINE | segmentType == SEGMENT_LINE) { readSegmentWkt(SEGMENT_LINE, false); } } } private void skipFirstPointWkt() { int numOfCoordinates = 0; while (numOfCoordinates < 4) { if (wkt.charAt(currentWktPos) == '-') { currentWktPos++; } if (wkt.charAt(currentWktPos) == ')') { break; } while (currentWktPos < wkt.length() && (Character.isDigit(wkt.charAt(currentWktPos)) || wkt.charAt(currentWktPos) == '.' || wkt.charAt(currentWktPos) == 'E' || wkt.charAt(currentWktPos) == 'e')) { currentWktPos++; } skipWhiteSpaces(); if (wkt.charAt(currentWktPos) == ',') { currentWktPos++; skipWhiteSpaces(); numOfCoordinates++; break; } skipWhiteSpaces(); numOfCoordinates++; } } private void readOpenBracket() { skipWhiteSpaces(); if (wkt.charAt(currentWktPos) == '(') { currentWktPos++; skipWhiteSpaces(); } else { throw new IllegalArgumentException(); } } private void readCloseBracket() { skipWhiteSpaces(); if (wkt.charAt(currentWktPos) == ')') { currentWktPos++; skipWhiteSpaces(); } else { throw new IllegalArgumentException(); } } private void readComma() { skipWhiteSpaces(); if (wkt.charAt(currentWktPos) == ',') { currentWktPos++; skipWhiteSpaces(); } else { throw new IllegalArgumentException(); } } private boolean hasMoreToken() { skipWhiteSpaces(); return currentWktPos < wkt.length(); } private void skipWhiteSpaces() { while (currentWktPos < wkt.length() && Character.isWhitespace(wkt.charAt(currentWktPos))) { currentWktPos++; } } private String getNextStringToken() { skipWhiteSpaces(); int endIndex = currentWktPos; while (endIndex < wkt.length() && Character.isLetter(wkt.charAt(endIndex))) { endIndex++; } int temp = currentWktPos; currentWktPos = endIndex; skipWhiteSpaces(); return wkt.substring(temp, endIndex); } private void populateStructures() { if (pointList.size() > 0) { points = new double[pointList.size() * 2]; for (int i = 0; i < pointList.size(); i++) { points[i * 2] = pointList.get(i).getX(); points[i * 2 + 1] = pointList.get(i).getY(); } if (hasZvalues) { zValues = new double[pointList.size()]; for (int i = 0; i < pointList.size(); i++) { zValues[i] = pointList.get(i).getZ(); } } if (hasMvalues) { mValues = new double[pointList.size()]; for (int i = 0; i < pointList.size(); i++) { mValues[i] = pointList.get(i).getM(); } } } // if version is 2, then we need to check for potential shapes (polygon & multi-shapes) that were // given their figure attributes as if it was version 1, since we don't know what would be the // version of the geometry/geography before we parse the entire WKT. if (version == 2) { for (int i = 0; i < version_one_shape_indexes.size(); i++) { figureList.get(version_one_shape_indexes.get(i)).setFiguresAttribute((byte) 1); } } if (figureList.size() > 0) { figures = new Figure[figureList.size()]; for (int i = 0; i < figureList.size(); i++) { figures[i] = figureList.get(i); } } if (shapeList.size() > 0) { shapes = new Shape[shapeList.size()]; for (int i = 0; i < shapeList.size(); i++) { shapes[i] = shapeList.get(i); } } if (segmentList.size() > 0) { segments = new Segment[segmentList.size()]; for (int i = 0; i < segmentList.size(); i++) { segments[i] = segmentList.get(i); } } numberOfPoints = pointList.size(); numberOfFigures = figureList.size(); numberOfShapes = shapeList.size(); numberOfSegments = segmentList.size(); } } class Figure { private byte figuresAttribute; private int pointOffset; Figure(byte figuresAttribute, int pointOffset) { this.figuresAttribute = figuresAttribute; this.pointOffset = pointOffset; } public byte getFiguresAttribute() { return figuresAttribute; } public int getPointOffset() { return pointOffset; } public void setFiguresAttribute(byte fa) { figuresAttribute = fa; } } class Shape { private int parentOffset; private int figureOffset; private byte openGISType; Shape(int parentOffset, int figureOffset, byte openGISType) { this.parentOffset = parentOffset; this.figureOffset = figureOffset; this.openGISType = openGISType; } public int getParentOffset() { return parentOffset; } public int getFigureOffset() { return figureOffset; } public byte getOpenGISType() { return openGISType; } } class Segment { private byte segmentType; Segment(byte segmentType) { this.segmentType = segmentType; } public byte getSegmentType() { return segmentType; } } class Point { private final double x; private final double y; private final double z; private final double m; Point(double x, double y, double z, double m) { this.x = x; this.y = y; this.z = z; this.m = m; } public double getX() { return x; } public double getY() { return y; } public double getZ() { return z; } public double getM() { return m; } }
package techreborn.init; import ic2.api.item.IC2Items; import net.minecraft.init.Blocks; import net.minecraft.init.Items; import net.minecraft.item.ItemStack; import net.minecraftforge.fluids.FluidRegistry; import net.minecraftforge.fluids.FluidStack; import net.minecraftforge.oredict.OreDictionary; import techreborn.api.BlastFurnaceRecipe; import techreborn.api.TechRebornAPI; import techreborn.api.recipe.RecipeHandler; import techreborn.api.recipe.machines.AlloySmelterRecipe; import techreborn.api.recipe.machines.AssemblingMachineRecipe; import techreborn.api.recipe.machines.CentrifugeRecipe; import techreborn.api.recipe.machines.ChemicalReactorRecipe; import techreborn.api.recipe.machines.GrinderRecipe; import techreborn.api.recipe.machines.ImplosionCompressorRecipe; import techreborn.api.recipe.machines.IndustrialSawmillRecipe; import techreborn.api.recipe.machines.LatheRecipe; import techreborn.api.recipe.machines.PlateCuttingMachineRecipe; import techreborn.config.ConfigTechReborn; import techreborn.items.*; import techreborn.util.CraftingHelper; import techreborn.util.LogHelper; import cpw.mods.fml.common.registry.GameRegistry; public class ModRecipes { public static ConfigTechReborn config; public static void init() { addShaplessRecipes(); addShappedRecipes(); addSmeltingRecipes(); addAlloySmelterRecipes(); addLatheRecipes(); addPlateCuttingMachineRecipes(); addUUrecipes(); addHammerRecipes(); addIndustrialSawmillRecipes(); } public static void addShappedRecipes() { // Storage Blocks CraftingHelper.addShapedOreRecipe(new ItemStack(ModBlocks.storage, 1, 0), "AAA", "AAA", "AAA", 'A', "ingotSilver"); CraftingHelper.addShapedOreRecipe(new ItemStack(ModBlocks.storage, 1, 1), "AAA", "AAA", "AAA", 'A', "ingotAluminium"); CraftingHelper.addShapedOreRecipe(new ItemStack(ModBlocks.storage, 1, 2), "AAA", "AAA", "AAA", 'A', "ingotTitanium"); CraftingHelper.addShapedOreRecipe(new ItemStack(ModBlocks.storage, 1, 3), "AAA", "AAA", "AAA", 'A', "gemSapphire"); CraftingHelper.addShapedOreRecipe(new ItemStack(ModBlocks.storage, 1, 4), "AAA", "AAA", "AAA", 'A', "gemRuby"); CraftingHelper.addShapedOreRecipe(new ItemStack(ModBlocks.storage, 1, 5), "AAA", "AAA", "AAA", 'A', "gemGreenSapphire"); CraftingHelper.addShapedOreRecipe(new ItemStack(ModBlocks.storage, 1, 6), "AAA", "AAA", "AAA", 'A', "ingotChrome"); CraftingHelper.addShapedOreRecipe(new ItemStack(ModBlocks.storage, 1, 7), "AAA", "AAA", "AAA", 'A', "ingotElectrum"); CraftingHelper.addShapedOreRecipe(new ItemStack(ModBlocks.storage, 1, 8), "AAA", "AAA", "AAA", 'A', "ingotTungsten"); CraftingHelper.addShapedOreRecipe(new ItemStack(ModBlocks.storage, 1, 9), "AAA", "AAA", "AAA", 'A', "ingotLead"); CraftingHelper.addShapedOreRecipe(new ItemStack(ModBlocks.storage, 1, 10), "AAA", "AAA", "AAA", 'A', "ingotZinc"); CraftingHelper.addShapedOreRecipe(new ItemStack(ModBlocks.storage, 1, 11), "AAA", "AAA", "AAA", 'A', "ingotBrass"); CraftingHelper.addShapedOreRecipe(new ItemStack(ModBlocks.storage, 1, 12), "AAA", "AAA", "AAA", 'A', "ingotSteel"); CraftingHelper.addShapedOreRecipe(new ItemStack(ModBlocks.storage, 1, 13), "AAA", "AAA", "AAA", 'A', "ingotPlatinum"); CraftingHelper.addShapedOreRecipe(new ItemStack(ModBlocks.storage, 1, 14), "AAA", "AAA", "AAA", 'A', "ingotNickel"); CraftingHelper.addShapedOreRecipe(new ItemStack(ModBlocks.storage, 1, 15), "AAA", "AAA", "AAA", 'A', "ingotInvar"); LogHelper.info("Shapped Recipes Added"); } public static void addShaplessRecipes() { CraftingHelper.addShapelessOreRecipe(new ItemStack(ModItems.ingots, 9, 4), "blockSilver"); CraftingHelper.addShapelessOreRecipe(new ItemStack(ModItems.ingots, 9, 5), "blockAluminium"); CraftingHelper.addShapelessOreRecipe(new ItemStack(ModItems.ingots, 9, 6), "blockTitanium"); CraftingHelper.addShapelessOreRecipe(new ItemStack(ModItems.gems, 9, 1), "blockSapphire"); CraftingHelper.addShapelessOreRecipe(new ItemStack(ModItems.gems, 9, 0), "blockRuby"); CraftingHelper.addShapelessOreRecipe(new ItemStack(ModItems.gems, 9, 2), "blockGreenSapphire"); CraftingHelper.addShapelessOreRecipe(new ItemStack(ModItems.ingots, 9, 7), "blockChrome"); CraftingHelper.addShapelessOreRecipe(new ItemStack(ModItems.ingots, 9, 8), "blockElectrum"); CraftingHelper.addShapelessOreRecipe(new ItemStack(ModItems.ingots, 9, 9), "blockTungsten"); CraftingHelper.addShapelessOreRecipe(new ItemStack(ModItems.ingots, 9, 10), "blockLead"); CraftingHelper.addShapelessOreRecipe(new ItemStack(ModItems.ingots, 9, 11), "blockZinc"); CraftingHelper.addShapelessOreRecipe(new ItemStack(ModItems.ingots, 9, 12), "blockBrass"); CraftingHelper.addShapelessOreRecipe(new ItemStack(ModItems.ingots, 9, 13), "blockSteel"); CraftingHelper.addShapelessOreRecipe(new ItemStack(ModItems.ingots, 9, 14), "blockPlatinum"); CraftingHelper.addShapelessOreRecipe(new ItemStack(ModItems.ingots, 9, 15), "blockNickel"); CraftingHelper.addShapelessOreRecipe(new ItemStack(ModItems.ingots, 9, 16), "blockInvar"); CraftingHelper.addShapelessOreRecipe(new ItemStack(ModItems.rockCutter, 1, 27), Items.apple); LogHelper.info("Shapless Recipes Added"); } public static void addSmeltingRecipes() { GameRegistry.addSmelting(new ItemStack(ModItems.dusts, 1, 27), new ItemStack(Items.iron_ingot), 1F); GameRegistry.addSmelting(new ItemStack(ModItems.dusts, 1, 23), new ItemStack(Items.gold_ingot), 1F); GameRegistry.addSmelting(new ItemStack(ModItems.dusts, 1, 14), IC2Items.getItem("copperIngot"), 1F); GameRegistry.addSmelting(new ItemStack(ModItems.dusts, 1, 51), IC2Items.getItem("tinIngot"), 1F); GameRegistry.addSmelting(new ItemStack(ModItems.dusts, 1, 7), IC2Items.getItem("bronzeIngot"), 1F); GameRegistry.addSmelting(new ItemStack(ModItems.dusts, 1, 29), IC2Items.getItem("leadIngot"), 1F); GameRegistry.addSmelting(new ItemStack(ModItems.dusts, 1, 45), IC2Items.getItem("silverIngot"), 1F); LogHelper.info("Smelting Recipes Added"); } public static void addHammerRecipes(){ ItemStack hammerIron = new ItemStack(ModItems.hammerIron, 1, OreDictionary.WILDCARD_VALUE); ItemStack hammerDiamond = new ItemStack(ModItems.hammerDiamond, 1, OreDictionary.WILDCARD_VALUE); GameRegistry.addShapelessRecipe(new ItemStack(ModItems.plate, 1, 13), hammerIron, new ItemStack(Items.iron_ingot)); } public static void addAlloySmelterRecipes(){ //Bronze RecipeHandler.addRecipe(new AlloySmelterRecipe(ItemIngots.getIngotByName("copper", 3), ItemIngots.getIngotByName("tin", 1), ItemIngots.getIngotByName("bronze", 4), 200, 16)); RecipeHandler.addRecipe(new AlloySmelterRecipe(ItemIngots.getIngotByName("copper", 3), ItemDusts.getDustByName("tin", 1), ItemIngots.getIngotByName("bronze", 4), 200, 16)); RecipeHandler.addRecipe(new AlloySmelterRecipe(ItemDusts.getDustByName("copper", 3), ItemIngots.getIngotByName("tin", 1), ItemIngots.getIngotByName("bronze", 4), 200, 16)); RecipeHandler.addRecipe(new AlloySmelterRecipe(ItemDusts.getDustByName("copper", 3), ItemDusts.getDustByName("tin", 1), ItemIngots.getIngotByName("bronze", 4), 200, 16)); //Electrum RecipeHandler.addRecipe(new AlloySmelterRecipe(new ItemStack(Items.gold_ingot, 1), ItemIngots.getIngotByName("silver", 1), ItemIngots.getIngotByName("electrum", 2), 200, 16)); RecipeHandler.addRecipe(new AlloySmelterRecipe(new ItemStack(Items.gold_ingot, 1), ItemDusts.getDustByName("silver", 1), ItemIngots.getIngotByName("electrum", 2), 200, 16)); RecipeHandler.addRecipe(new AlloySmelterRecipe(ItemDusts.getDustByName("gold", 1), ItemIngots.getIngotByName("silver", 1), ItemIngots.getIngotByName("electrum", 2), 200, 16)); RecipeHandler.addRecipe(new AlloySmelterRecipe(ItemDusts.getDustByName("gold", 1), ItemDusts.getDustByName("silver", 1), ItemIngots.getIngotByName("electrum", 2), 200, 16)); //Invar RecipeHandler.addRecipe(new AlloySmelterRecipe(new ItemStack(Items.iron_ingot, 2), ItemIngots.getIngotByName("nickel", 1), ItemIngots.getIngotByName("invar", 3), 200, 16)); RecipeHandler.addRecipe(new AlloySmelterRecipe(new ItemStack(Items.iron_ingot, 2), ItemDusts.getDustByName("nickel", 1), ItemIngots.getIngotByName("invar", 3), 200, 16)); RecipeHandler.addRecipe(new AlloySmelterRecipe(ItemDusts.getDustByName("iron", 2), ItemIngots.getIngotByName("nickel", 1), ItemIngots.getIngotByName("invar", 3), 200, 16)); RecipeHandler.addRecipe(new AlloySmelterRecipe(ItemDusts.getDustByName("iron", 2), ItemDusts.getDustByName("nickel", 1), ItemIngots.getIngotByName("invar", 3), 200, 16)); //Cupronickel RecipeHandler.addRecipe(new AlloySmelterRecipe(ItemIngots.getIngotByName("copper", 1), ItemIngots.getIngotByName("nickel", 1), ItemIngots.getIngotByName("cupronickel", 2), 200, 16)); RecipeHandler.addRecipe(new AlloySmelterRecipe(ItemIngots.getIngotByName("copper", 1), ItemDusts.getDustByName("nickel", 1), ItemIngots.getIngotByName("cupronickel", 2), 200, 16)); RecipeHandler.addRecipe(new AlloySmelterRecipe(ItemDusts.getDustByName("copper", 1), ItemIngots.getIngotByName("nickel", 1), ItemIngots.getIngotByName("cupronickel", 2), 200, 16)); RecipeHandler.addRecipe(new AlloySmelterRecipe(ItemDusts.getDustByName("copper", 1), ItemDusts.getDustByName("nickel", 1), ItemIngots.getIngotByName("cupronickel", 2), 200, 16)); //Nichrome RecipeHandler.addRecipe(new AlloySmelterRecipe(ItemIngots.getIngotByName("chrome", 1), ItemIngots.getIngotByName("nickel", 4), ItemIngots.getIngotByName("nichrome", 5), 200, 16)); RecipeHandler.addRecipe(new AlloySmelterRecipe(ItemIngots.getIngotByName("chrome", 1), ItemDusts.getDustByName("nickel", 4), ItemIngots.getIngotByName("nichrome", 5), 200, 16)); RecipeHandler.addRecipe(new AlloySmelterRecipe(ItemDusts.getDustByName("chrome", 1), ItemIngots.getIngotByName("nickel", 4), ItemIngots.getIngotByName("nichrome", 5), 200, 16)); RecipeHandler.addRecipe(new AlloySmelterRecipe(ItemDusts.getDustByName("chrome", 1), ItemDusts.getDustByName("nickel", 4), ItemIngots.getIngotByName("nichrome", 5), 200, 16)); //Magnalium RecipeHandler.addRecipe(new AlloySmelterRecipe(ItemDusts.getDustByName("magnesium", 1), ItemIngots.getIngotByName("aluminum", 4), ItemIngots.getIngotByName("magnalium", 3), 200, 16)); RecipeHandler.addRecipe(new AlloySmelterRecipe(ItemDusts.getDustByName("magnesium", 1), ItemDusts.getDustByName("aluminum", 4), ItemIngots.getIngotByName("magnalium", 3), 200, 16)); //Battery Alloy RecipeHandler.addRecipe(new AlloySmelterRecipe(ItemIngots.getIngotByName("lead", 4), ItemIngots.getIngotByName("antimony", 1), ItemIngots.getIngotByName("batteryAlloy", 5), 200, 16)); RecipeHandler.addRecipe(new AlloySmelterRecipe(ItemIngots.getIngotByName("lead", 4), ItemDusts.getDustByName("antimony", 1), ItemIngots.getIngotByName("batteryAlloy", 5), 200, 16)); RecipeHandler.addRecipe(new AlloySmelterRecipe(ItemDusts.getDustByName("lead", 4), ItemIngots.getIngotByName("antimony", 1), ItemIngots.getIngotByName("batteryAlloy", 5), 200, 16)); RecipeHandler.addRecipe(new AlloySmelterRecipe(ItemDusts.getDustByName("lead", 4), ItemDusts.getDustByName("antimony", 1), ItemIngots.getIngotByName("batteryAlloy", 5), 200, 16)); //Brass if(OreDictionary.doesOreNameExist("ingotBrass")) { ItemStack brassStack = OreDictionary.getOres("ingotBrass").get(0); brassStack.stackSize = 4; RecipeHandler.addRecipe(new AlloySmelterRecipe(ItemIngots.getIngotByName("copper", 3), ItemIngots.getIngotByName("zinc", 1), brassStack, 200, 16)); RecipeHandler.addRecipe(new AlloySmelterRecipe(ItemIngots.getIngotByName("copper", 3), ItemDusts.getDustByName("zinc", 1), brassStack, 200, 16)); RecipeHandler.addRecipe(new AlloySmelterRecipe(ItemDusts.getDustByName("copper", 3), ItemIngots.getIngotByName("zinc", 1), brassStack, 200, 16)); RecipeHandler.addRecipe(new AlloySmelterRecipe(ItemDusts.getDustByName("copper", 3), ItemDusts.getDustByName("zinc", 1), brassStack, 200, 16)); } //Red Alloy if(OreDictionary.doesOreNameExist("ingotRedAlloy")) { ItemStack redAlloyStack = OreDictionary.getOres("ingotRedAlloy").get(0); redAlloyStack.stackSize = 1; RecipeHandler.addRecipe(new AlloySmelterRecipe(new ItemStack(Items.redstone, 4), ItemIngots.getIngotByName("copper", 1), redAlloyStack, 200, 16)); RecipeHandler.addRecipe(new AlloySmelterRecipe(new ItemStack(Items.redstone, 4), new ItemStack(Items.iron_ingot, 1), redAlloyStack, 200, 16)); } //Blue Alloy if(OreDictionary.doesOreNameExist("ingotBlueAlloy")) { ItemStack blueAlloyStack = OreDictionary.getOres("ingotBlueAlloy").get(0); blueAlloyStack.stackSize = 1; RecipeHandler.addRecipe(new AlloySmelterRecipe(ItemDusts.getDustByName("teslatite", 4), ItemIngots.getIngotByName("silver", 1), blueAlloyStack, 200, 16)); } //Blue Alloy if(OreDictionary.doesOreNameExist("ingotPurpleAlloy") && OreDictionary.doesOreNameExist("dustInfusedTeslatite")) { ItemStack purpleAlloyStack = OreDictionary.getOres("ingotPurpleAlloy").get(0); purpleAlloyStack.stackSize = 1; ItemStack infusedTeslatiteStack = OreDictionary.getOres("ingotPurpleAlloy").get(0); infusedTeslatiteStack.stackSize = 8; RecipeHandler.addRecipe(new AlloySmelterRecipe(ItemIngots.getIngotByName("redAlloy", 1), ItemIngots.getIngotByName("blueAlloy", 1), purpleAlloyStack, 200, 16)); RecipeHandler.addRecipe(new AlloySmelterRecipe(new ItemStack(Items.gold_ingot, 1), infusedTeslatiteStack, purpleAlloyStack, 200, 16)); } //Aluminum Brass if(OreDictionary.doesOreNameExist("ingotAluminumBrass")) { ItemStack aluminumBrassStack = OreDictionary.getOres("ingotAluminumBrass").get(0); aluminumBrassStack.stackSize = 4; RecipeHandler.addRecipe(new AlloySmelterRecipe(ItemIngots.getIngotByName("copper", 3), ItemIngots.getIngotByName("aluminum", 1), aluminumBrassStack, 200, 16)); RecipeHandler.addRecipe(new AlloySmelterRecipe(ItemIngots.getIngotByName("copper", 3), ItemDusts.getDustByName("aluminum", 1), aluminumBrassStack, 200, 16)); RecipeHandler.addRecipe(new AlloySmelterRecipe(ItemDusts.getDustByName("copper", 3), ItemIngots.getIngotByName("aluminum", 1), aluminumBrassStack, 200, 16)); RecipeHandler.addRecipe(new AlloySmelterRecipe(ItemDusts.getDustByName("copper", 3), ItemDusts.getDustByName("aluminum", 1), aluminumBrassStack, 200, 16)); } //Manyullyn if(OreDictionary.doesOreNameExist("ingotManyullyn") && OreDictionary.doesOreNameExist("ingotCobalt") && OreDictionary.doesOreNameExist("ingotArdite")) { ItemStack manyullynStack = OreDictionary.getOres("ingotManyullyn").get(0); manyullynStack.stackSize = 1; ItemStack cobaltStack = OreDictionary.getOres("ingotCobalt").get(0); cobaltStack.stackSize = 1; ItemStack arditeStack = OreDictionary.getOres("ingotArdite").get(0); arditeStack.stackSize = 1; RecipeHandler.addRecipe(new AlloySmelterRecipe(cobaltStack, arditeStack, manyullynStack, 200, 16)); RecipeHandler.addRecipe(new AlloySmelterRecipe(cobaltStack, ItemDusts.getDustByName("ardite", 1), manyullynStack, 200, 16)); RecipeHandler.addRecipe(new AlloySmelterRecipe(ItemDusts.getDustByName("cobalt", 1), arditeStack, manyullynStack, 200, 16)); RecipeHandler.addRecipe(new AlloySmelterRecipe(ItemDusts.getDustByName("cobalt", 1), ItemDusts.getDustByName("ardite", 1), manyullynStack, 200, 16)); } //Conductive Iron if(OreDictionary.doesOreNameExist("ingotConductiveIron")) { ItemStack conductiveIronStack = OreDictionary.getOres("ingotConductiveIron").get(0); conductiveIronStack.stackSize = 1; RecipeHandler.addRecipe(new AlloySmelterRecipe(new ItemStack(Items.redstone, 1), new ItemStack(Items.iron_ingot, 1), conductiveIronStack, 200, 16)); } //Redstone Alloy if(OreDictionary.doesOreNameExist("ingotRedstoneAlloy") && OreDictionary.doesOreNameExist("itemSilicon")) { ItemStack redstoneAlloyStack = OreDictionary.getOres("ingotRedstoneAlloy").get(0); redstoneAlloyStack.stackSize = 1; ItemStack siliconStack = OreDictionary.getOres("itemSilicon").get(0); siliconStack.stackSize = 1; RecipeHandler.addRecipe(new AlloySmelterRecipe(new ItemStack(Items.redstone, 1), siliconStack, redstoneAlloyStack, 200, 16)); } //Pulsating Iron if(OreDictionary.doesOreNameExist("ingotPhasedIron")) { ItemStack pulsatingIronStack = OreDictionary.getOres("ingotPhasedIron").get(0); pulsatingIronStack.stackSize = 1; RecipeHandler.addRecipe(new AlloySmelterRecipe(new ItemStack(Items.iron_ingot, 1), new ItemStack(Items.ender_pearl, 1), pulsatingIronStack, 200, 16)); RecipeHandler.addRecipe(new AlloySmelterRecipe(new ItemStack(Items.iron_ingot, 1), ItemDusts.getDustByName("enderPearl", 1), pulsatingIronStack, 200, 16)); } //Vibrant Alloy if(OreDictionary.doesOreNameExist("ingotEnergeticAlloy") && OreDictionary.doesOreNameExist("ingotPhasedGold")) { ItemStack energeticAlloyStack = OreDictionary.getOres("ingotEnergeticAlloy").get(0); energeticAlloyStack.stackSize = 1; ItemStack vibrantAlloyStack = OreDictionary.getOres("ingotPhasedGold").get(0); vibrantAlloyStack.stackSize = 1; RecipeHandler.addRecipe(new AlloySmelterRecipe(energeticAlloyStack, new ItemStack(Items.ender_pearl, 1), vibrantAlloyStack, 200, 16)); RecipeHandler.addRecipe(new AlloySmelterRecipe(energeticAlloyStack, ItemDusts.getDustByName("enderPearl", 1), vibrantAlloyStack, 200, 16)); } //Soularium if(OreDictionary.doesOreNameExist("ingotSoularium")) { ItemStack soulariumStack = OreDictionary.getOres("ingotSoularium").get(0); soulariumStack.stackSize = 1; RecipeHandler.addRecipe(new AlloySmelterRecipe(new ItemStack(Blocks.soul_sand, 1), new ItemStack(Items.gold_ingot, 1), soulariumStack, 200, 16)); } } public static void addLatheRecipes() { //Metal Rods RecipeHandler.addRecipe(new LatheRecipe(ItemIngots.getIngotByName("brass", 1), ItemRods.getRodByName("brass", 1), 300, 16)); RecipeHandler.addRecipe(new LatheRecipe(ItemIngots.getIngotByName("bronze", 1), ItemRods.getRodByName("bronze", 1), 380, 16)); RecipeHandler.addRecipe(new LatheRecipe(ItemIngots.getIngotByName("copper", 1), ItemRods.getRodByName("copper", 1), 300, 16)); RecipeHandler.addRecipe(new LatheRecipe(ItemIngots.getIngotByName("electrum", 1), ItemRods.getRodByName("electrum", 1), 740, 16)); RecipeHandler.addRecipe(new LatheRecipe(new ItemStack(Items.gold_ingot), ItemRods.getRodByName("gold", 1), 980, 16)); RecipeHandler.addRecipe(new LatheRecipe(ItemIngots.getIngotByName("invar", 1), ItemRods.getRodByName("invar", 1), 280, 16)); RecipeHandler.addRecipe(new LatheRecipe(ItemIngots.getIngotByName("iridium", 1), ItemRods.getRodByName("iridium", 1), 960, 16)); RecipeHandler.addRecipe(new LatheRecipe(new ItemStack(Items.iron_ingot), ItemRods.getRodByName("iron", 1), 280, 16)); RecipeHandler.addRecipe(new LatheRecipe(ItemIngots.getIngotByName("lead", 1), ItemRods.getRodByName("lead", 1), 1020, 16)); RecipeHandler.addRecipe(new LatheRecipe(ItemIngots.getIngotByName("nickel", 1), ItemRods.getRodByName("nickel", 1), 280, 16)); RecipeHandler.addRecipe(new LatheRecipe(ItemIngots.getIngotByName("platinum", 1), ItemRods.getRodByName("platinum", 1), 960, 16)); RecipeHandler.addRecipe(new LatheRecipe(ItemIngots.getIngotByName("silver", 1), ItemRods.getRodByName("silver", 1), 520, 16)); RecipeHandler.addRecipe(new LatheRecipe(ItemIngots.getIngotByName("steel", 1), ItemRods.getRodByName("steel", 1), 280, 16)); RecipeHandler.addRecipe(new LatheRecipe(ItemIngots.getIngotByName("tin", 1), ItemRods.getRodByName("tin", 1), 580, 16)); RecipeHandler.addRecipe(new LatheRecipe(ItemIngots.getIngotByName("titanium", 1), ItemRods.getRodByName("titanium", 1), 240, 16)); RecipeHandler.addRecipe(new LatheRecipe(ItemIngots.getIngotByName("tungstensteel", 1), ItemRods.getRodByName("tungstensteel", 1), 580, 16)); //Laser Focus RecipeHandler.addRecipe(new LatheRecipe(ItemPlates.getPlateByName("ruby", 1), ItemParts.getPartByName("laserFocus", 1), 10, 16)); RecipeHandler.addRecipe(new LatheRecipe(ItemPlates.getPlateByName("redGarnet", 1), ItemParts.getPartByName("laserFocus", 1), 10, 16)); } public static void addPlateCuttingMachineRecipes() { //Storage Blocks if(OreDictionary.doesOreNameExist("blockAluminum")) { ItemStack blockStack = OreDictionary.getOres("blockAluminum").get(0); RecipeHandler.addRecipe(new PlateCuttingMachineRecipe(blockStack, ItemPlates.getPlateByName("aluminum", 9), 200, 16)); } if(OreDictionary.doesOreNameExist("blockBrass")) { ItemStack blockStack = OreDictionary.getOres("blockBrass").get(0); RecipeHandler.addRecipe(new PlateCuttingMachineRecipe(blockStack, ItemPlates.getPlateByName("brass", 9), 200, 116)); } if(OreDictionary.doesOreNameExist("blockBronze")) { ItemStack blockStack = OreDictionary.getOres("blockBronze").get(0); RecipeHandler.addRecipe(new PlateCuttingMachineRecipe(blockStack, ItemPlates.getPlateByName("bronze", 9), 200, 16)); } if(OreDictionary.doesOreNameExist("blockCoal")) { ItemStack blockStack = OreDictionary.getOres("blockCoal").get(0); RecipeHandler.addRecipe(new PlateCuttingMachineRecipe(blockStack, ItemPlates.getPlateByName("carbon", 9), 200, 16)); } if(OreDictionary.doesOreNameExist("blockChrome")) { ItemStack blockStack = OreDictionary.getOres("blockChrome").get(0); RecipeHandler.addRecipe(new PlateCuttingMachineRecipe(blockStack, ItemPlates.getPlateByName("chrome", 9), 200, 16)); } if(OreDictionary.doesOreNameExist("blockCopper")) { ItemStack blockStack = OreDictionary.getOres("blockCopper").get(0); RecipeHandler.addRecipe(new PlateCuttingMachineRecipe(blockStack, ItemPlates.getPlateByName("copper", 9), 200, 16)); } if(OreDictionary.doesOreNameExist("blockDiamond")) { ItemStack blockStack = OreDictionary.getOres("blockDiamond").get(0); RecipeHandler.addRecipe(new PlateCuttingMachineRecipe(blockStack, ItemPlates.getPlateByName("diamond", 9), 200, 16)); } if(OreDictionary.doesOreNameExist("blockElectrum")) { ItemStack blockStack = OreDictionary.getOres("blockElectrum").get(0); RecipeHandler.addRecipe(new PlateCuttingMachineRecipe(blockStack, ItemPlates.getPlateByName("electrum", 9), 200, 16)); } if(OreDictionary.doesOreNameExist("blockEmerald")) { ItemStack blockStack = OreDictionary.getOres("blockEmerald").get(0); RecipeHandler.addRecipe(new PlateCuttingMachineRecipe(blockStack, ItemPlates.getPlateByName("emerald", 9), 200, 16)); } if(OreDictionary.doesOreNameExist("blockGold")) { ItemStack blockStack = OreDictionary.getOres("blockGold").get(0); RecipeHandler.addRecipe(new PlateCuttingMachineRecipe(blockStack, ItemPlates.getPlateByName("gold", 9), 200, 16)); } if(OreDictionary.doesOreNameExist("blockInvar")) { ItemStack blockStack = OreDictionary.getOres("blockInvar").get(0); RecipeHandler.addRecipe(new PlateCuttingMachineRecipe(blockStack, ItemPlates.getPlateByName("invar", 9), 200, 16)); } if(OreDictionary.doesOreNameExist("blockIridium")) { ItemStack blockStack = OreDictionary.getOres("blockIridium").get(0); RecipeHandler.addRecipe(new PlateCuttingMachineRecipe(blockStack, ItemPlates.getPlateByName("iridium", 9), 200, 16)); } if(OreDictionary.doesOreNameExist("blockIron")) { ItemStack blockStack = OreDictionary.getOres("blockIron").get(0); RecipeHandler.addRecipe(new PlateCuttingMachineRecipe(blockStack, ItemPlates.getPlateByName("iron", 9), 200, 16)); } if(OreDictionary.doesOreNameExist("blockLapis")) { ItemStack blockStack = OreDictionary.getOres("blockLapis").get(0); RecipeHandler.addRecipe(new PlateCuttingMachineRecipe(blockStack, ItemPlates.getPlateByName("lapis", 9), 200, 16)); } if(OreDictionary.doesOreNameExist("blockLead")) { ItemStack blockStack = OreDictionary.getOres("blockLead").get(0); RecipeHandler.addRecipe(new PlateCuttingMachineRecipe(blockStack, ItemPlates.getPlateByName("lead", 9), 200, 16)); } if(OreDictionary.doesOreNameExist("blockNickel")) { ItemStack blockStack = OreDictionary.getOres("blockNickel").get(0); RecipeHandler.addRecipe(new PlateCuttingMachineRecipe(blockStack, ItemPlates.getPlateByName("nickel", 9), 200, 16)); } if(OreDictionary.doesOreNameExist("blockOsmium")) { ItemStack blockStack = OreDictionary.getOres("blockOsmium").get(0); RecipeHandler.addRecipe(new PlateCuttingMachineRecipe(blockStack, ItemPlates.getPlateByName("osmium", 9), 200, 16)); } if(OreDictionary.doesOreNameExist("blockPeridot")) { ItemStack blockStack = OreDictionary.getOres("blockPeridot").get(0); RecipeHandler.addRecipe(new PlateCuttingMachineRecipe(blockStack, ItemPlates.getPlateByName("peridot", 9), 200, 16)); } if(OreDictionary.doesOreNameExist("blockPlatinum")) { ItemStack blockStack = OreDictionary.getOres("blockPlatinum").get(0); RecipeHandler.addRecipe(new PlateCuttingMachineRecipe(blockStack, ItemPlates.getPlateByName("platinum", 9), 200, 16)); } if(OreDictionary.doesOreNameExist("blockRedGarnet")) { ItemStack blockStack = OreDictionary.getOres("blockRedGarnet").get(0); RecipeHandler.addRecipe(new PlateCuttingMachineRecipe(blockStack, ItemPlates.getPlateByName("redGarnet", 9), 200, 16)); } if(OreDictionary.doesOreNameExist("BlockRedstone")) { ItemStack blockStack = OreDictionary.getOres("blockRedstone").get(0); RecipeHandler.addRecipe(new PlateCuttingMachineRecipe(blockStack, ItemPlates.getPlateByName("redstone", 9), 200, 16)); } if(OreDictionary.doesOreNameExist("blockRuby")) { ItemStack blockStack = OreDictionary.getOres("blockRuby").get(0); RecipeHandler.addRecipe(new PlateCuttingMachineRecipe(blockStack, ItemPlates.getPlateByName("ruby", 9), 200, 16)); } if(OreDictionary.doesOreNameExist("blockSapphire")) { ItemStack blockStack = OreDictionary.getOres("blockSapphire").get(0); RecipeHandler.addRecipe(new PlateCuttingMachineRecipe(blockStack, ItemPlates.getPlateByName("sapphire", 9), 200, 16)); } if(OreDictionary.doesOreNameExist("blockSilver")) { ItemStack blockStack = OreDictionary.getOres("blockSilver").get(0); RecipeHandler.addRecipe(new PlateCuttingMachineRecipe(blockStack, ItemPlates.getPlateByName("silver", 9), 200, 16)); } if(OreDictionary.doesOreNameExist("blockSteel")) { ItemStack blockStack = OreDictionary.getOres("blockSteel").get(0); RecipeHandler.addRecipe(new PlateCuttingMachineRecipe(blockStack, ItemPlates.getPlateByName("steel", 9), 200, 16)); } if(OreDictionary.doesOreNameExist("blockTeslatite")) { ItemStack blockStack = OreDictionary.getOres("blockTeslatite").get(0); RecipeHandler.addRecipe(new PlateCuttingMachineRecipe(blockStack, ItemPlates.getPlateByName("teslatite", 9), 200, 16)); } if(OreDictionary.doesOreNameExist("blockTin")) { ItemStack blockStack = OreDictionary.getOres("blockTin").get(0); RecipeHandler.addRecipe(new PlateCuttingMachineRecipe(blockStack, ItemPlates.getPlateByName("tin", 9), 200, 16)); } if(OreDictionary.doesOreNameExist("blockTitanium")) { ItemStack blockStack = OreDictionary.getOres("blockTitanium").get(0); RecipeHandler.addRecipe(new PlateCuttingMachineRecipe(blockStack, ItemPlates.getPlateByName("titanium", 9), 200, 16)); } if(OreDictionary.doesOreNameExist("blockTungsten")) { ItemStack blockStack = OreDictionary.getOres("blockTungsten").get(0); RecipeHandler.addRecipe(new PlateCuttingMachineRecipe(blockStack, ItemPlates.getPlateByName("tungsten", 9), 200, 16)); } if(OreDictionary.doesOreNameExist("blockTungstensteel")) { ItemStack blockStack = OreDictionary.getOres("blockTungstensteel").get(0); RecipeHandler.addRecipe(new PlateCuttingMachineRecipe(blockStack, ItemPlates.getPlateByName("tungstensteel", 9), 200, 16)); } if(OreDictionary.doesOreNameExist("blockYellowGarnet")) { ItemStack blockStack = OreDictionary.getOres("blockYellowGarnet").get(0); RecipeHandler.addRecipe(new PlateCuttingMachineRecipe(blockStack, ItemPlates.getPlateByName("yellowGarnet", 9), 200, 16)); } if(OreDictionary.doesOreNameExist("blockZinc")) { ItemStack blockStack = OreDictionary.getOres("blockZinc").get(0); RecipeHandler.addRecipe(new PlateCuttingMachineRecipe(blockStack, ItemPlates.getPlateByName("zinc", 9), 200, 16)); } //Obsidian RecipeHandler.addRecipe(new PlateCuttingMachineRecipe(new ItemStack(Blocks.obsidian), ItemPlates.getPlateByName("obsidian", 9), 100, 4)); } public static void addIndustrialSawmillRecipes() { ItemStack pulpStack = OreDictionary.getOres("pulpWood").get(0); RecipeHandler.addRecipe(new IndustrialSawmillRecipe(new ItemStack(Blocks.log, 1, 0), null, new FluidStack(FluidRegistry.WATER, 1000), new ItemStack(Blocks.planks, 6, 0), pulpStack, null, 200, 30, false)); RecipeHandler.addRecipe(new IndustrialSawmillRecipe(new ItemStack(Blocks.log, 1, 0), IC2Items.getItem("waterCell"), null, new ItemStack(Blocks.planks, 6, 0), pulpStack, IC2Items.getItem("cell"), 200, 30, false)); RecipeHandler.addRecipe(new IndustrialSawmillRecipe(new ItemStack(Blocks.log, 1, 0), new ItemStack(Items.water_bucket), null, new ItemStack(Blocks.planks, 6, 0), pulpStack, new ItemStack(Items.bucket), 200, 30, false)); RecipeHandler.addRecipe(new IndustrialSawmillRecipe(new ItemStack(Blocks.log, 1, 1), null, new FluidStack(FluidRegistry.WATER, 1000), new ItemStack(Blocks.planks, 6, 1), pulpStack, null, 200, 30, false)); RecipeHandler.addRecipe(new IndustrialSawmillRecipe(new ItemStack(Blocks.log, 1, 1), IC2Items.getItem("waterCell"), null, new ItemStack(Blocks.planks, 6, 1), pulpStack, IC2Items.getItem("cell"), 200, 30, false)); RecipeHandler.addRecipe(new IndustrialSawmillRecipe(new ItemStack(Blocks.log, 1, 1), new ItemStack(Items.water_bucket), null, new ItemStack(Blocks.planks, 6, 1), pulpStack, new ItemStack(Items.bucket), 200, 30, false)); RecipeHandler.addRecipe(new IndustrialSawmillRecipe(new ItemStack(Blocks.log, 1, 2), null, new FluidStack(FluidRegistry.WATER, 1000), new ItemStack(Blocks.planks, 6, 2), pulpStack, null, 200, 30, false)); RecipeHandler.addRecipe(new IndustrialSawmillRecipe(new ItemStack(Blocks.log, 1, 2), IC2Items.getItem("waterCell"), null, new ItemStack(Blocks.planks, 6, 2), pulpStack, IC2Items.getItem("cell"), 200, 30, false)); RecipeHandler.addRecipe(new IndustrialSawmillRecipe(new ItemStack(Blocks.log, 1, 2), new ItemStack(Items.water_bucket), null, new ItemStack(Blocks.planks, 6, 2), pulpStack, new ItemStack(Items.bucket), 200, 30, false)); RecipeHandler.addRecipe(new IndustrialSawmillRecipe(new ItemStack(Blocks.log, 1, 3), null, new FluidStack(FluidRegistry.WATER, 1000), new ItemStack(Blocks.planks, 6, 3), pulpStack, null, 200, 30, false)); RecipeHandler.addRecipe(new IndustrialSawmillRecipe(new ItemStack(Blocks.log, 1, 3), IC2Items.getItem("waterCell"), null, new ItemStack(Blocks.planks, 6, 3), pulpStack, IC2Items.getItem("cell"), 200, 30, false)); RecipeHandler.addRecipe(new IndustrialSawmillRecipe(new ItemStack(Blocks.log, 1, 3), new ItemStack(Items.water_bucket), null, new ItemStack(Blocks.planks, 6, 3), pulpStack, new ItemStack(Items.bucket), 200, 30, false)); RecipeHandler.addRecipe(new IndustrialSawmillRecipe(new ItemStack(Blocks.log2, 1, 0), null, new FluidStack(FluidRegistry.WATER, 1000), new ItemStack(Blocks.planks, 6, 4), pulpStack, null, 200, 30, false)); RecipeHandler.addRecipe(new IndustrialSawmillRecipe(new ItemStack(Blocks.log2, 1, 0), IC2Items.getItem("waterCell"), null, new ItemStack(Blocks.planks, 6, 4), pulpStack, IC2Items.getItem("cell"), 200, 30, false)); RecipeHandler.addRecipe(new IndustrialSawmillRecipe(new ItemStack(Blocks.log2, 1, 0), new ItemStack(Items.water_bucket), null, new ItemStack(Blocks.planks, 6, 4), pulpStack, new ItemStack(Items.bucket), 200, 30, false)); RecipeHandler.addRecipe(new IndustrialSawmillRecipe(new ItemStack(Blocks.log2, 1, 1), null, new FluidStack(FluidRegistry.WATER, 1000), new ItemStack(Blocks.planks, 6, 5), pulpStack, null, 200, 30, false)); RecipeHandler.addRecipe(new IndustrialSawmillRecipe(new ItemStack(Blocks.log2, 1, 1), IC2Items.getItem("waterCell"), null, new ItemStack(Blocks.planks, 6, 5), pulpStack, IC2Items.getItem("cell"), 200, 30, false)); RecipeHandler.addRecipe(new IndustrialSawmillRecipe(new ItemStack(Blocks.log2, 1, 1), new ItemStack(Items.water_bucket), null, new ItemStack(Blocks.planks, 6, 5), pulpStack, new ItemStack(Items.bucket), 200, 30, false)); } public static void addUUrecipes() { if(ConfigTechReborn.UUrecipesIridiamOre) CraftingHelper.addShapedOreRecipe((IC2Items.getItem("iridiumOre")), "UUU", " U ", "UUU", 'U', ModItems.uuMatter); if(ConfigTechReborn.UUrecipesWood) CraftingHelper.addShapedOreRecipe(new ItemStack(Blocks.log, 8), " U ", " ", " ", 'U', ModItems.uuMatter); if(ConfigTechReborn.UUrecipesStone) CraftingHelper.addShapedOreRecipe(new ItemStack(Blocks.stone, 16), " ", " U ", " ", 'U', ModItems.uuMatter); if(ConfigTechReborn.UUrecipesSnowBlock) CraftingHelper.addShapedOreRecipe(new ItemStack(Blocks.snow, 16), "U U", " ", " ", 'U', ModItems.uuMatter); if(ConfigTechReborn.UUrecipesGrass) CraftingHelper.addShapedOreRecipe(new ItemStack(Blocks.grass, 16), " ", "U ", "U ", 'U', ModItems.uuMatter); if(ConfigTechReborn.UUrecipesObsidian) CraftingHelper.addShapedOreRecipe(new ItemStack(Blocks.obsidian, 12), "U U", "U U", " ", 'U', ModItems.uuMatter); if(ConfigTechReborn.UUrecipesGlass) CraftingHelper.addShapedOreRecipe(new ItemStack(Blocks.glass, 32), " U ", "U U", " U ", 'U', ModItems.uuMatter); if(ConfigTechReborn.UUrecipesWater) CraftingHelper.addShapedOreRecipe(new ItemStack(Blocks.water, 1), " ", " U ", " U ", 'U', ModItems.uuMatter); if(ConfigTechReborn.UUrecipesLava) CraftingHelper.addShapedOreRecipe(new ItemStack(Blocks.lava, 1), " U ", " U ", " U ", 'U', ModItems.uuMatter); if(ConfigTechReborn.UUrecipesCocoa) CraftingHelper.addShapedOreRecipe(new ItemStack(Items.dye, 32, 3), "UU ", " U", "UU ", 'U', ModItems.uuMatter); if(ConfigTechReborn.UUrecipesGlowstoneBlock) CraftingHelper.addShapedOreRecipe(new ItemStack(Blocks.glowstone, 8), " U ", "U U", "UUU", 'U', ModItems.uuMatter); if(ConfigTechReborn.UUrecipesCactus) CraftingHelper.addShapedOreRecipe(new ItemStack(Blocks.cactus, 48), " U ", "UUU", "U U", 'U', ModItems.uuMatter); if(ConfigTechReborn.UUrecipesSugarCane) CraftingHelper.addShapedOreRecipe(new ItemStack(Items.reeds, 48), "U U", "U U", "U U", 'U', ModItems.uuMatter); if(ConfigTechReborn.UUrecipesVine) CraftingHelper.addShapedOreRecipe(new ItemStack(Blocks.vine, 24), "U ", "U ", "U ", 'U', ModItems.uuMatter); if(ConfigTechReborn.UUrecipesSnowBall) CraftingHelper.addShapedOreRecipe(new ItemStack(Items.snowball, 16), " ", " ", "UUU", 'U', ModItems.uuMatter); CraftingHelper.addShapedOreRecipe(new ItemStack(Items.clay_ball, 48), "UU ", "U ", "UU ", 'U', ModItems.uuMatter); if(ConfigTechReborn.UUrecipeslilypad) CraftingHelper.addShapedOreRecipe(new ItemStack(Blocks.waterlily, 64), "U U", " U ", " U ", 'U', ModItems.uuMatter); if(ConfigTechReborn.UUrecipesGunpowder) CraftingHelper.addShapedOreRecipe(new ItemStack(Items.gunpowder, 15), "UUU", "U ", "UUU", 'U', ModItems.uuMatter); if(ConfigTechReborn.UUrecipesBone) CraftingHelper.addShapedOreRecipe(new ItemStack(Items.bone, 32), "U ", "UU ", "U ", 'U', ModItems.uuMatter); if(ConfigTechReborn.UUrecipesFeather) CraftingHelper.addShapedOreRecipe(new ItemStack(Items.feather, 32), " U ", " U ", "U U", 'U', ModItems.uuMatter); if(ConfigTechReborn.UUrecipesInk) CraftingHelper.addShapedOreRecipe(new ItemStack(Items.dye, 48), " UU", " UU", " U ", 'U', ModItems.uuMatter); if(ConfigTechReborn.UUrecipesEnderPearl) CraftingHelper.addShapedOreRecipe(new ItemStack(Items.ender_pearl, 1), "UUU", "U U", " U ", 'U', ModItems.uuMatter); if(ConfigTechReborn.UUrecipesCoal) CraftingHelper.addShapedOreRecipe(new ItemStack(Items.coal, 5), " U", "U ", " U", 'U', ModItems.uuMatter); if(ConfigTechReborn.UUrecipesIronOre) CraftingHelper.addShapedOreRecipe(new ItemStack(Blocks.iron_ore, 2), "U U", " U ", "U U", 'U', ModItems.uuMatter); if(ConfigTechReborn.UUrecipesGoldOre) CraftingHelper.addShapedOreRecipe(new ItemStack(Blocks.gold_ore, 2), " U ", "UUU", " U ", 'U', ModItems.uuMatter); if(ConfigTechReborn.UUrecipesRedStone) CraftingHelper.addShapedOreRecipe(new ItemStack(Items.redstone, 24), " ", " U ", "UUU", 'U', ModItems.uuMatter); if(ConfigTechReborn.UUrecipesLapis) CraftingHelper.addShapedOreRecipe(new ItemStack(Items.dye, 9 , 4), " U ", " U ", " UU", 'U', ModItems.uuMatter); if(ConfigTechReborn.UUrecipesEmeraldOre) CraftingHelper.addShapedOreRecipe(new ItemStack(Blocks.emerald_ore, 1), "UU ", "U U", " UU", 'U', ModItems.uuMatter); if(ConfigTechReborn.UUrecipesEmerald) CraftingHelper.addShapedOreRecipe(new ItemStack(Items.emerald, 2), "UUU", "UUU", " U ", 'U', ModItems.uuMatter); if(ConfigTechReborn.UUrecipesDiamond) CraftingHelper.addShapedOreRecipe(new ItemStack(Items.diamond, 1), "UUU", "UUU", "UUU", 'U', ModItems.uuMatter); if(ConfigTechReborn.UUrecipesTinDust) CraftingHelper.addShapedOreRecipe(new ItemStack(ModItems.dusts, 10, 77), " ", "U U", " U", 'U', ModItems.uuMatter); if(ConfigTechReborn.UUrecipesCopperDust) CraftingHelper.addShapedOreRecipe(new ItemStack(ModItems.dusts, 10, 21), " U", "U U", " ", 'U', ModItems.uuMatter); if(ConfigTechReborn.UUrecipesLeadDust) CraftingHelper.addShapedOreRecipe(new ItemStack(ModItems.dusts, 14, 42), "UUU", "UUU", "U ", 'U', ModItems.uuMatter); if(ConfigTechReborn.UUrecipesPlatinumDust) CraftingHelper.addShapedOreRecipe(new ItemStack(ModItems.dusts, 1, 58), " U", "UUU", "UUU", 'U', ModItems.uuMatter); if(ConfigTechReborn.UUrecipesTungstenDust) CraftingHelper.addShapedOreRecipe(new ItemStack(ModItems.dusts, 1, 79), "U ", "UUU", "UUU", 'U', ModItems.uuMatter); if(ConfigTechReborn.UUrecipesTitaniumDust) CraftingHelper.addShapedOreRecipe(new ItemStack(ModItems.dusts, 2, 78), "UUU", " U ", " U ", 'U', ModItems.uuMatter); if(ConfigTechReborn.UUrecipesAluminumDust) CraftingHelper.addShapedOreRecipe(new ItemStack(ModItems.dusts, 16, 2), " U ", " U ", "UUU", 'U', ModItems.uuMatter); if(ConfigTechReborn.HideUuRecipes) hideUUrecipes(); } public static void hideUUrecipes() { //TODO } }
// This software may be modified and distributed under the terms package wyil.util; import java.util.Arrays; import wybs.lang.NameID; import wyil.lang.Bytecode; import wyil.lang.Constant; import wyil.lang.Type; import wyil.lang.Bytecode.Blocks; import wyil.lang.Bytecode.Extras; import wyil.lang.Bytecode.OperandGroups; import wyil.lang.Bytecode.Operands; import wyil.lang.Bytecode.Schema; public abstract class AbstractBytecode { private final int[] operands; private final int[][] operandGroups; private final int[] blocks; public AbstractBytecode() { this.operands = null; this.operandGroups = null; this.blocks = null; } public AbstractBytecode(int operand) { this.operands = new int[] { operand }; this.operandGroups = null; this.blocks = null; } public AbstractBytecode(int[] operands) { this.operands = operands; this.operandGroups = null; this.blocks = null; } public AbstractBytecode(int[][] operandGroups) { this.operands = null; this.operandGroups = operandGroups; this.blocks = null; } public AbstractBytecode(int operand, int[][] operandGroups) { this.operands = new int[] { operand }; this.operandGroups = operandGroups; this.blocks = null; } public AbstractBytecode(int[] operands, int[][] operandGroups) { this.operands = operands; this.operandGroups = operandGroups; this.blocks = null; } public AbstractBytecode(int operand, int[][] operandGroups, int[] blocks) { this.operands = new int[] {operand}; this.operandGroups = operandGroups; this.blocks = blocks; } public AbstractBytecode(int[] operands, int[][] operandGroups, int[] blocks) { this.operands = operands; this.operandGroups = operandGroups; this.blocks = blocks; } @Override public int hashCode() { return getOpcode() ^ Arrays.hashCode(getOperands()) & Arrays.deepHashCode(operandGroups); } @Override public boolean equals(Object o) { if (o instanceof AbstractBytecode) { AbstractBytecode bo = (AbstractBytecode) o; return getOpcode() == bo.getOpcode() && Arrays.equals(getOperands(), bo.getOperands()) && Arrays.deepEquals(operandGroups, operandGroups) && Arrays.equals(blocks, bo.blocks); } return false; } /** * Return the opcode value of this bytecode. * * @return */ public abstract int getOpcode(); /** * Return the top-level operands in this bytecode. * * @return */ public int[] getOperands() { if(operands == null) { return new int[0]; } else { return operands; } } /** * Return the number of top-level operands in this bytecode * @return */ public int numberOfOperands() { if(operands == null) { return 0; } else { return operands.length; } } /** * Return the ith top-level operand in this bytecode. * * @param i * @return */ public int getOperand(int i) { return operands[i]; } /** * Get the number of operand groups in this bytecode * * @return */ public int numberOfOperandGroups() { if(operandGroups == null) { return 0; } else { return operandGroups.length; } } /** * Get the ith operand group in this bytecode * * @param i * @return */ public int[] getOperandGroup(int i) { return operandGroups[i]; } /** * Determine the number of blocks contained in this bytecode. * * @return */ public int numberOfBlocks() { if(blocks == null) { return 0; } else { return blocks.length; } } /** * Get the ith block contained in this statement * * @param i * @return */ public int getBlock(int i) { return blocks[i]; } /** * Get the blocks contained in this statement * * @param i * @return */ public int[] getBlocks() { if(blocks == null) { return new int[0]; } else { return blocks; } } public static final Schema[] schemas = new Schema[255]; static { schemas[Bytecode.OPCODE_add] = new Schema(Operands.TWO){ @Override public Bytecode construct(int opcode,int[] operands, int[][] groups, int[] blocks, Object[] extras) { return new Bytecode.Operator(operands, Bytecode.OperatorKind.ADD); } }; schemas[Bytecode.OPCODE_aliasdecl] = new Schema(Operands.ONE){ @Override public Bytecode construct(int opcode,int[] operands, int[][] groups, int[] blocks, Object[] extras) { return new Bytecode.AliasDeclaration(operands[0]); } }; schemas[Bytecode.OPCODE_array] = new Schema(Operands.MANY){ @Override public Bytecode construct(int opcode,int[] operands, int[][] groups, int[] blocks, Object[] extras) { return new Bytecode.Operator(operands, Bytecode.OperatorKind.ARRAYCONSTRUCTOR); } }; schemas[Bytecode.OPCODE_arrayindex] = new Schema(Operands.TWO){ @Override public Bytecode construct(int opcode,int[] operands, int[][] groups, int[] blocks, Object[] extras) { return new Bytecode.Operator(operands,Bytecode.OperatorKind.ARRAYINDEX); } }; schemas[Bytecode.OPCODE_arraygen] = new Schema(Operands.TWO){ @Override public Bytecode construct(int opcode,int[] operands, int[][] groups, int[] blocks, Object[] extras) { return new Bytecode.Operator(operands,Bytecode.OperatorKind.ARRAYGENERATOR); } }; schemas[Bytecode.OPCODE_arraylength] = new Schema(Operands.ONE) { @Override public Bytecode construct(int opcode, int[] operands, int[][] groups, int[] blocks, Object[] extras) { return new Bytecode.Operator(operands, Bytecode.OperatorKind.ARRAYLENGTH); } }; schemas[Bytecode.OPCODE_assert] = new Schema(Operands.ONE){ @Override public Bytecode construct(int opcode,int[] operands, int[][] groups, int[] blocks, Object[] extras) { return new Bytecode.Assert(operands[0]); } }; schemas[Bytecode.OPCODE_assign] = new Schema(Operands.ZERO, OperandGroups.TWO){ @Override public Bytecode construct(int opcode,int[] operands, int[][] groups, int[] blocks, Object[] extras) { return new Bytecode.Assign(groups[0],groups[1]); } }; schemas[Bytecode.OPCODE_assume] = new Schema(Operands.ONE){ @Override public Bytecode construct(int opcode,int[] operands, int[][] groups, int[] blocks, Object[] extras) { return new Bytecode.Assume(operands[0]); } }; schemas[Bytecode.OPCODE_bitwiseinvert] = new Schema(Operands.ONE){ @Override public Bytecode construct(int opcode,int[] operands, int[][] groups, int[] blocks, Object[] extras) { return new Bytecode.Operator(operands, Bytecode.OperatorKind.BITWISEINVERT); } }; schemas[Bytecode.OPCODE_bitwiseor] = new Schema(Operands.TWO){ @Override public Bytecode construct(int opcode,int[] operands, int[][] groups, int[] blocks, Object[] extras) { return new Bytecode.Operator(operands, Bytecode.OperatorKind.BITWISEOR); } }; schemas[Bytecode.OPCODE_bitwisexor] = new Schema(Operands.TWO){ @Override public Bytecode construct(int opcode,int[] operands, int[][] groups, int[] blocks, Object[] extras) { return new Bytecode.Operator(operands, Bytecode.OperatorKind.BITWISEXOR); } }; schemas[Bytecode.OPCODE_bitwiseand] = new Schema(Operands.TWO){ @Override public Bytecode construct(int opcode,int[] operands, int[][] groups, int[] blocks, Object[] extras) { return new Bytecode.Operator(operands, Bytecode.OperatorKind.BITWISEAND); } }; schemas[Bytecode.OPCODE_block] = new Schema(Operands.MANY){ @Override public Bytecode construct(int opcode,int[] operands, int[][] groups, int[] blocks, Object[] extras) { return new Bytecode.Block(operands); } }; schemas[Bytecode.OPCODE_break] = new Schema(Operands.ZERO){ @Override public Bytecode construct(int opcode,int[] operands, int[][] groups, int[] blocks, Object[] extras) { return new Bytecode.Break(); } }; schemas[Bytecode.OPCODE_const] = new Schema(Operands.ZERO, Extras.CONSTANT){ @Override public Bytecode construct(int opcode, int[] operands, int[][] groups, int[] blocks, Object[] extras) { return new Bytecode.Const((Constant) extras[0]); } }; schemas[Bytecode.OPCODE_continue] = new Schema(Operands.ZERO){ @Override public Bytecode construct(int opcode,int[] operands, int[][] groups, int[] blocks, Object[] extras) { return new Bytecode.Continue(); } }; schemas[Bytecode.OPCODE_convert] = new Schema(Operands.ONE){ @Override public Bytecode construct(int opcode,int[] operands, int[][] groups, int[] blocks, Object[] extras) { return new Bytecode.Convert(operands[0]); } }; schemas[Bytecode.OPCODE_debug] = new Schema(Operands.ONE){ @Override public Bytecode construct(int opcode,int[] operands, int[][] groups, int[] blocks, Object[] extras) { return new Bytecode.Debug(operands[0]); } }; schemas[Bytecode.OPCODE_dereference] = new Schema(Operands.ONE){ @Override public Bytecode construct(int opcode,int[] operands, int[][] groups, int[] blocks, Object[] extras) { return new Bytecode.Operator(operands, Bytecode.OperatorKind.DEREFERENCE); } }; schemas[Bytecode.OPCODE_div] = new Schema(Operands.TWO){ @Override public Bytecode construct(int opcode,int[] operands, int[][] groups, int[] blocks, Object[] extras) { return new Bytecode.Operator(operands, Bytecode.OperatorKind.DIV); } }; schemas[Bytecode.OPCODE_dowhile] = new Schema(Operands.ONE, OperandGroups.TWO, Blocks.ONE){ @Override public Bytecode construct(int opcode,int[] operands, int[][] groups, int[] blocks, Object[] extras) { int body = blocks[0]; int condition = operands[0]; int[] invariants = groups[0]; int[] modified = groups[1]; return new Bytecode.DoWhile(body,condition,invariants,modified); } }; schemas[Bytecode.OPCODE_eq] = new Schema(Operands.TWO){ @Override public Bytecode construct(int opcode,int[] operands, int[][] groups, int[] blocks, Object[] extras) { return new Bytecode.Operator(operands, Bytecode.OperatorKind.EQ); } }; schemas[Bytecode.OPCODE_if] = new Schema(Operands.ONE, OperandGroups.ZERO, Blocks.ONE){ @Override public Bytecode construct(int opcode,int[] operands, int[][] groups, int[] blocks, Object[] extras) { int trueBranch = blocks[0]; return new Bytecode.If(operands[0], trueBranch); } }; schemas[Bytecode.OPCODE_ifelse] = new Schema(Operands.ONE, OperandGroups.ZERO, Blocks.TWO){ @Override public Bytecode construct(int opcode,int[] operands, int[][] groups, int[] blocks, Object[] extras) { int trueBranch = blocks[0]; int falseBranch = blocks[1]; return new Bytecode.If(operands[0], trueBranch, falseBranch); } }; schemas[Bytecode.OPCODE_fail] = new Schema(Operands.ZERO){ @Override public Bytecode construct(int opcode,int[] operands, int[][] groups, int[] blocks, Object[] extras) { return new Bytecode.Fail(); } }; schemas[Bytecode.OPCODE_fieldload] = new Schema(Operands.ONE, Extras.STRING){ @Override public Bytecode construct(int opcode,int[] operands, int[][] groups, int[] blocks, Object[] extras) { return new Bytecode.FieldLoad(operands[0], (String) extras[0]); } }; schemas[Bytecode.OPCODE_gt] = new Schema(Operands.TWO){ @Override public Bytecode construct(int opcode,int[] operands, int[][] groups, int[] blocks, Object[] extras) { return new Bytecode.Operator(operands, Bytecode.OperatorKind.GT); } }; schemas[Bytecode.OPCODE_ge] = new Schema(Operands.TWO){ @Override public Bytecode construct(int opcode,int[] operands, int[][] groups, int[] blocks, Object[] extras) { return new Bytecode.Operator(operands, Bytecode.OperatorKind.GTEQ); } }; schemas[Bytecode.OPCODE_invoke] = new Schema(Operands.MANY, Extras.TYPE, Extras.NAME) { @Override public Bytecode construct(int opcode,int[] operands, int[][] groups, int[] blocks, Object[] extras) { return new Bytecode.Invoke((Type.FunctionOrMethod) extras[0], operands, (NameID) extras[1]); } }; schemas[Bytecode.OPCODE_indirectinvoke] = new Schema(Operands.ONE, OperandGroups.ONE, Extras.TYPE){ @Override public Bytecode construct(int opcode,int[] operands, int[][] groups, int[] blocks, Object[] extras) { int[] arguments = Arrays.copyOfRange(operands, 1, operands.length); return new Bytecode.IndirectInvoke((Type.FunctionOrMethod) extras[0], operands[0], arguments); } }; schemas[Bytecode.OPCODE_is] = new Schema(Operands.TWO){ @Override public Bytecode construct(int opcode,int[] operands, int[][] groups, int[] blocks, Object[] extras) { return new Bytecode.Operator(operands, Bytecode.OperatorKind.IS); } }; schemas[Bytecode.OPCODE_lambda] = new Schema(Operands.ONE, OperandGroups.TWO, Extras.TYPE) { @Override public Bytecode construct(int opcode,int[] operands, int[][] groups, int[] blocks, Object[] extras) { Type.FunctionOrMethod type = (Type.FunctionOrMethod) extras[0]; int body = operands[0]; int[] parameters = groups[0]; int[] environment = groups[1]; return new Bytecode.Lambda(type,body,parameters,environment); } }; schemas[Bytecode.OPCODE_lt] = new Schema(Operands.TWO){ @Override public Bytecode construct(int opcode,int[] operands, int[][] groups, int[] blocks, Object[] extras) { return new Bytecode.Operator(operands, Bytecode.OperatorKind.LT); } }; schemas[Bytecode.OPCODE_le] = new Schema(Operands.TWO){ @Override public Bytecode construct(int opcode,int[] operands, int[][] groups, int[] blocks, Object[] extras) { return new Bytecode.Operator(operands, Bytecode.OperatorKind.LTEQ); } }; schemas[Bytecode.OPCODE_logicalor] = new Schema(Operands.TWO){ @Override public Bytecode construct(int opcode,int[] operands, int[][] groups, int[] blocks, Object[] extras) { return new Bytecode.Operator(operands, Bytecode.OperatorKind.OR); } }; schemas[Bytecode.OPCODE_logicaland] = new Schema(Operands.TWO){ @Override public Bytecode construct(int opcode,int[] operands, int[][] groups, int[] blocks, Object[] extras) { return new Bytecode.Operator(operands, Bytecode.OperatorKind.AND); } }; schemas[Bytecode.OPCODE_logicalnot] = new Schema(Operands.ONE){ @Override public Bytecode construct(int opcode,int[] operands, int[][] groups, int[] blocks, Object[] extras) { return new Bytecode.Operator(operands, Bytecode.OperatorKind.NOT); } }; schemas[Bytecode.OPCODE_mul] = new Schema(Operands.TWO){ @Override public Bytecode construct(int opcode,int[] operands, int[][] groups, int[] blocks, Object[] extras) { return new Bytecode.Operator(operands, Bytecode.OperatorKind.MUL); } }; schemas[Bytecode.OPCODE_namedblock] = new Schema(Operands.ZERO, OperandGroups.ZERO, Blocks.ONE, Extras.STRING) { @Override public Bytecode construct(int opcode,int[] operands, int[][] groups, int[] blocks, Object[] extras) { String name = (String) extras[0]; return new Bytecode.NamedBlock(blocks[0],name); } }; schemas[Bytecode.OPCODE_ne] = new Schema(Operands.TWO){ @Override public Bytecode construct(int opcode,int[] operands, int[][] groups, int[] blocks, Object[] extras) { return new Bytecode.Operator(operands, Bytecode.OperatorKind.NEQ); } }; schemas[Bytecode.OPCODE_neg] = new Schema(Operands.ONE){ @Override public Bytecode construct(int opcode,int[] operands, int[][] groups, int[] blocks, Object[] extras) { return new Bytecode.Operator(operands, Bytecode.OperatorKind.NEG); } }; schemas[Bytecode.OPCODE_newobject] = new Schema(Operands.ONE){ @Override public Bytecode construct(int opcode,int[] operands, int[][] groups, int[] blocks, Object[] extras) { return new Bytecode.Operator(operands,Bytecode.OperatorKind.NEW); } }; schemas[Bytecode.OPCODE_record] = new Schema(Operands.MANY){ @Override public Bytecode construct(int opcode,int[] operands, int[][] groups, int[] blocks, Object[] extras) { return new Bytecode.Operator(operands, Bytecode.OperatorKind.RECORDCONSTRUCTOR); } }; schemas[Bytecode.OPCODE_rem] = new Schema(Operands.TWO){ @Override public Bytecode construct(int opcode,int[] operands, int[][] groups, int[] blocks, Object[] extras) { return new Bytecode.Operator(operands, Bytecode.OperatorKind.REM); } }; schemas[Bytecode.OPCODE_return] = new Schema(Operands.MANY){ @Override public Bytecode construct(int opcode,int[] operands, int[][] groups, int[] blocks, Object[] extras) { return new Bytecode.Return(operands); } }; schemas[Bytecode.OPCODE_shl] = new Schema(Operands.TWO){ @Override public Bytecode construct(int opcode,int[] operands, int[][] groups, int[] blocks, Object[] extras) { return new Bytecode.Operator(operands, Bytecode.OperatorKind.LEFTSHIFT); } }; schemas[Bytecode.OPCODE_shr] = new Schema(Operands.TWO){ @Override public Bytecode construct(int opcode,int[] operands, int[][] groups, int[] blocks, Object[] extras) { return new Bytecode.Operator(operands, Bytecode.OperatorKind.RIGHTSHIFT); } }; schemas[Bytecode.OPCODE_skip] = new Schema(Operands.ZERO){ @Override public Bytecode construct(int opcode,int[] operands, int[][] groups, int[] blocks, Object[] extras) { return new Bytecode.Skip(); } }; schemas[Bytecode.OPCODE_sub] = new Schema(Operands.TWO){ @Override public Bytecode construct(int opcode,int[] operands, int[][] groups, int[] blocks, Object[] extras) { return new Bytecode.Operator(operands, Bytecode.OperatorKind.SUB); } }; schemas[Bytecode.OPCODE_switch] = new Schema(Operands.ONE, OperandGroups.ZERO, Extras.SWITCH_ARRAY) { @Override public Bytecode construct(int opcode,int[] operands, int[][] groups, int[] blocks, Object[] extras) { Bytecode.Case[] cases = (Bytecode.Case[]) extras[0]; return new Bytecode.Switch(operands[0], cases); } }; schemas[Bytecode.OPCODE_vardecl] = new Schema(Operands.ZERO,Extras.STRING){ @Override public Bytecode construct(int opcode,int[] operands, int[][] groups, int[] blocks, Object[] extras) { String name = (String) extras[0]; return new Bytecode.VariableDeclaration(name); } }; schemas[Bytecode.OPCODE_vardeclinit] = new Schema(Operands.ONE,Extras.STRING){ @Override public Bytecode construct(int opcode,int[] operands, int[][] groups, int[] blocks, Object[] extras) { String name = (String) extras[0]; return new Bytecode.VariableDeclaration(name,operands[0]); } }; schemas[Bytecode.OPCODE_varcopy] = new Schema(Operands.ONE){ @Override public Bytecode construct(int opcode,int[] operands, int[][] groups, int[] blocks, Object[] extras) { return new Bytecode.VariableAccess(true,operands[0]); } }; schemas[Bytecode.OPCODE_varmove] = new Schema(Operands.ONE){ @Override public Bytecode construct(int opcode,int[] operands, int[][] groups, int[] blocks, Object[] extras) { return new Bytecode.VariableAccess(false,operands[0]); } }; schemas[Bytecode.OPCODE_while] = new Schema(Operands.ONE, OperandGroups.TWO, Blocks.ONE){ @Override public Bytecode construct(int opcode,int[] operands, int[][] groups, int[] blocks, Object[] extras) { int body = blocks[0]; int condition = operands[0]; int[] invariants = groups[0]; int[] modified = groups[1]; return new Bytecode.While(body,condition,invariants,modified); } }; // Quantifiers schemas[Bytecode.OPCODE_some] = schemas[Bytecode.OPCODE_all] = new Schema( Operands.ONE, OperandGroups.MANY) { @Override public Bytecode construct(int opcode, int[] operands, int[][] groups, int[] blocks, Object[] extras) { int body = operands[0]; Bytecode.Range[] ranges = new Bytecode.Range[groups.length]; for (int i = 0; i != ranges.length; i = i + 1) { int[] group = groups[i]; ranges[i] = new Bytecode.Range(group[0],group[1],group[2]); } Bytecode.QuantifierKind kind; switch(opcode) { case Bytecode.OPCODE_some: kind = Bytecode.QuantifierKind.SOME; break; case Bytecode.OPCODE_all: kind = Bytecode.QuantifierKind.ALL; break; default: // deadcpde throw new IllegalArgumentException(); } return new Bytecode.Quantifier(kind, body, ranges); } }; } }
package com.pixable.trackingwrap; import android.content.Context; import android.util.Log; import android.widget.Toast; import com.flurry.android.FlurryAgent; import java.util.Arrays; import java.util.HashSet; import java.util.Set; /** * Entry point for the tracking wrap library. To make usage simple, it is a singleton. Use the * {@link #initialize} method once, and then grab the instance with {@link #getInstance}. It is * strongly recommended that you perform the initialization from your * {@link android.app.Application#onCreate} method. * * See the lifecycle methods (named like {@code onXXX}) and call them wherever needed. Track your * custom events with {@link #trackEvent}. */ public class TrackingWrap { private static final String TAG = TrackingWrap.class.getSimpleName(); private static TrackingWrap INSTANCE; private final TrackingConfiguration configuration; private final Set<TrackingDestination> initializedDestinations = new HashSet<>(); private TrackingWrap(TrackingConfiguration configuration) { this.configuration = configuration; } public static void initialize(TrackingConfiguration configuration) { INSTANCE = new TrackingWrap(configuration); } public static TrackingWrap getInstance() { if (INSTANCE == null) { throw new IllegalStateException("The tracking wrap singleton is not initialized"); } else { return INSTANCE; } } /** * To be called from the {@code onCreate} method of your {@link android.app.Application} instance. * * @param context application context * @param destinations destinations to be initialized. You will only be able to use these */ public void onApplicationCreate(Context context, TrackingDestination... destinations) { for (TrackingDestination destination : destinations) { switch (destination.getPlatform()) { case GOOGLE_ANALYTICS: throw new UnsupportedOperationException("not yet"); case MIXPANEL: throw new UnsupportedOperationException("not yet"); case FLURRY: { FlurryAgent.init(context, destination.getAppKey()); initializedDestinations.add(destination); } } } } /** * To be called from the {@code onStart} of every activity in your application. * * @param context activity context, not the global application context */ public void onActivityStart(Context context) { for (TrackingDestination destination : initializedDestinations) { switch (destination.getPlatform()) { case GOOGLE_ANALYTICS: throw new UnsupportedOperationException("not yet"); case MIXPANEL: throw new UnsupportedOperationException("not yet"); case FLURRY: { FlurryAgent.onStartSession(context); } } } } /** * To be called from the {@code onStop} of every activity in your application. * * @param context activity context, not the global application context */ public void onActivityStop(Context context) { for (TrackingDestination destination : initializedDestinations) { switch (destination.getPlatform()) { case GOOGLE_ANALYTICS: throw new UnsupportedOperationException("not yet"); case MIXPANEL: throw new UnsupportedOperationException("not yet"); case FLURRY: { FlurryAgent.onEndSession(context); } } } } /** * Tracks the provided event in the provided destinations. */ public void trackEvent(Context context, TrackingEvent event, TrackingDestination.Platform... platforms) { // First some logging for (TrackingConfiguration.DebugPrint debugPrint : configuration.getDebugPrints()) { switch (debugPrint) { case LOGCAT: { Log.d(TAG, "Track " + event + " to " + Arrays.asList(platforms)); break; } case TOAST: { Toast.makeText(context, "Track " + event, Toast.LENGTH_LONG).show(); break; } } } // And here's the actual tracking for (TrackingDestination.Platform platform : platforms) { switch (platform) { case GOOGLE_ANALYTICS: Log.e(TAG, "Not implemented yet"); break; case MIXPANEL: Log.e(TAG, "Not implemented yet"); break; case FLURRY: FlurryAgent.logEvent(event.getName(), event.getProperties()); break; } } } }
package com.reason.lang.core.psi; import com.intellij.extapi.psi.ASTWrapperPsiElement; import com.intellij.lang.ASTNode; import com.intellij.psi.PsiElement; import com.reason.lang.MlTypes; import org.jetbrains.annotations.NotNull; public class PsiLocalOpen extends ASTWrapperPsiElement { private final MlTypes m_types; public PsiLocalOpen(ASTNode node, MlTypes types) { super(node); m_types = types; } @NotNull public String getName() { PsiElement firstChild = getFirstChild(); StringBuilder sb = new StringBuilder(firstChild.getText()); PsiElement nextSibling = firstChild.getNextSibling(); while (nextSibling != null && nextSibling.getNode().getElementType() != m_types.SCOPED_EXPR) { sb.append(nextSibling.getText()); nextSibling = nextSibling.getNextSibling(); } String name = sb.toString(); return name.substring(0, name.length() - 1); } @Override public boolean canNavigate() { return false; } @Override public String toString() { return "Local open " + getName(); } }
package com.redhat.ukiservices.utils; import java.util.ArrayList; import java.util.List; import org.jdom2.Document; import org.jdom2.Element; import io.vertx.core.json.JsonArray; import io.vertx.core.json.JsonObject; public class RssUtils { private static final String CHANNEL = "channel"; private static final String ITEM = "item"; private static final String GUID = "guid"; private static final String TITLE = "title"; private static final String PUBLISH_DATE = "pubDate"; private static final String ROAD = "road"; private static final String REGION = "region"; private static final String COUNTY = "county"; private static final String CATEGORIES = "categories"; private static final String CATEGORY = "category"; private static final String DESCRIPTION = "description"; public static List<JsonObject> toJson(Document doc) { List<JsonObject> entries = new ArrayList<JsonObject>(); List<Element> list = doc.getRootElement().getChild(CHANNEL).getChildren(ITEM); for (Element el : list) { JsonObject obj = new JsonObject(); obj.put(GUID, el.getChildText(GUID)); obj.put(TITLE, el.getChildText(TITLE)); obj.put(PUBLISH_DATE, el.getChildText(PUBLISH_DATE)); obj.put(ROAD, el.getChildText(ROAD)); obj.put(REGION, el.getChildText(REGION)); obj.put(COUNTY, el.getChildText(COUNTY)); JsonArray categories = new JsonArray(); for (Element cat : el.getChildren(CATEGORY)) { categories.add(new JsonObject().put(CATEGORY, cat.getText())); } obj.put(CATEGORIES, categories); obj.put(DESCRIPTION, el.getChildText(DESCRIPTION)); entries.add(obj); } return entries; } }
package com.rvprg.raft.protocol.impl; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map.Entry; import java.util.Random; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.RejectedExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.stream.Collectors; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.slf4j.Marker; import org.slf4j.MarkerFactory; import com.google.inject.Inject; import com.google.protobuf.InvalidProtocolBufferException; import com.rvprg.raft.configuration.Configuration; import com.rvprg.raft.log.Log; import com.rvprg.raft.log.LogEntryFactory; import com.rvprg.raft.log.LogException; import com.rvprg.raft.protocol.Raft; import com.rvprg.raft.protocol.RaftObserver; import com.rvprg.raft.protocol.Role; import com.rvprg.raft.protocol.messages.ProtocolMessages.AppendEntries; import com.rvprg.raft.protocol.messages.ProtocolMessages.AppendEntriesResponse; import com.rvprg.raft.protocol.messages.ProtocolMessages.DynamicMembershipChangeCommand; import com.rvprg.raft.protocol.messages.ProtocolMessages.DynamicMembershipChangeCommand.CommandType; import com.rvprg.raft.protocol.messages.ProtocolMessages.LogEntry; import com.rvprg.raft.protocol.messages.ProtocolMessages.LogEntry.LogEntryType; import com.rvprg.raft.protocol.messages.ProtocolMessages.RaftMessage; import com.rvprg.raft.protocol.messages.ProtocolMessages.RaftMessage.MessageType; import com.rvprg.raft.protocol.messages.ProtocolMessages.RequestVote; import com.rvprg.raft.protocol.messages.ProtocolMessages.RequestVoteResponse; import com.rvprg.raft.protocol.messages.ProtocolMessages.RequestVoteResponse.Builder; import com.rvprg.raft.protocol.messages.ProtocolMessages.SnapshotDownloadRequest; import com.rvprg.raft.sm.SnapshotDescriptor; import com.rvprg.raft.sm.SnapshotInstallException; import com.rvprg.raft.sm.Snapshotable; import com.rvprg.raft.sm.StateMachine; import com.rvprg.raft.transport.ChannelPipelineInitializer; import com.rvprg.raft.transport.Member; import com.rvprg.raft.transport.MemberConnector; import com.rvprg.raft.transport.MemberId; import com.rvprg.raft.transport.MessageReceiver; import com.rvprg.raft.transport.impl.SnapshotReceiver; import com.rvprg.raft.transport.impl.SnapshotSender; import com.rvprg.raft.transport.impl.SnapshotTransferCompletedEvent; import com.rvprg.raft.transport.impl.SnapshotTransferConnectionClosedEvent; import com.rvprg.raft.transport.impl.SnapshotTransferConnectionOpenEvent; import com.rvprg.raft.transport.impl.SnapshotTransferEvent; import com.rvprg.raft.transport.impl.SnapshotTransferExceptionThrownEvent; import com.rvprg.raft.transport.impl.SnapshotTransferStartedEvent; import io.netty.channel.Channel; import io.netty.channel.EventLoopGroup; import io.netty.channel.nio.NioEventLoopGroup; import io.netty.util.concurrent.ScheduledFuture; import net.jcip.annotations.GuardedBy; public class RaftImpl implements Raft { private final Logger logger = LoggerFactory.getLogger(RaftImpl.class); private final static Marker severe = MarkerFactory.getMarker("SEVERE"); private final MemberId selfId; private final Configuration configuration; private final RaftMemberConnector memberConnector; private final MessageReceiver messageReceiver; private final Log log; private final AtomicReference<ScheduledFuture<?>> newElectionInitiatorTask = new AtomicReference<>(); private final AtomicReference<ScheduledFuture<?>> electionTimeoutMonitorTask = new AtomicReference<>(); private final AtomicReference<ScheduledFuture<?>> periodicHeartbeatTask = new AtomicReference<>(); private final AtomicReference<CompletableFuture<?>> logCompactionTask = new AtomicReference<>(CompletableFuture.completedFuture(true)); private final ConcurrentHashMap<Long, ApplyCommandFuture> replicationCompletableFutures = new ConcurrentHashMap<>(); private final ConcurrentHashMap<MemberId, AtomicReference<ScheduledFuture<?>>> replicationRetryTasks = new ConcurrentHashMap<>(); private final ReentrantReadWriteLock indexesLock = new ReentrantReadWriteLock(); private final ConcurrentHashMap<MemberId, AtomicLong> nextIndexes = new ConcurrentHashMap<>(); private final ConcurrentHashMap<MemberId, AtomicLong> matchIndexes = new ConcurrentHashMap<>(); private final ConcurrentHashMap<MemberId, Channel> snapshotRecipients = new ConcurrentHashMap<>(); private final SnapshotSender snapshotSender; private final Object snapshotInstallLock = new Object(); @GuardedBy("snapshotInstallLock") private SnapshotReceiver snapshotReceiver = null; private final Object dynamicMembershipChangeLock = new Object(); @GuardedBy("dynamicMembershipChangeLock") private ApplyCommandResult dynamicMembershipChangeInProgress = null; private final RaftObserver observer; private final Object stateLock = new Object(); @GuardedBy("stateLock") private int currentTerm = 0; @GuardedBy("stateLock") private int votesReceived = 0; @GuardedBy("stateLock") private MemberId votedFor = null; @GuardedBy("stateLock") private MemberId leader; @GuardedBy("stateLock") private Role role = Role.Follower; @GuardedBy("stateLock") private boolean started = false; @GuardedBy("stateLock") private AtomicReference<EventLoopGroup> eventLoop = new AtomicReference<EventLoopGroup>(null); private final MemberConnectorObserverImpl memberConnectorObserver; private final Random random; private final StateMachine stateMachine; private final AtomicBoolean catchingUpMember = new AtomicBoolean(false); private final ChannelPipelineInitializer channelPipelineInitializer; @Inject public RaftImpl(Configuration configuration, MemberConnector memberConnector, MessageReceiver messageReceiver, Log log, StateMachine stateMachine, RaftObserver observer) throws InterruptedException, SnapshotInstallException, FileNotFoundException, IOException { this(configuration, memberConnector, messageReceiver, log, stateMachine, log.getTerm(), Role.Follower, observer); } public RaftImpl(Configuration configuration, MemberConnector memberConnector, MessageReceiver messageReceiver, Log log, StateMachine stateMachine, int initTerm, Role initRole, RaftObserver observer) throws InterruptedException, SnapshotInstallException, FileNotFoundException, IOException { this.memberConnector = new RaftMemberConnector(memberConnector); this.messageReceiver = messageReceiver; this.selfId = messageReceiver.getMemberId(); this.log = log; this.currentTerm = initTerm; this.role = initRole; this.leader = null; this.configuration = configuration; this.random = new Random(); this.observer = observer == null ? RaftObserver.getDefaultInstance() : observer; this.stateMachine = stateMachine; this.votedFor = log.getVotedFor(); this.channelPipelineInitializer = messageReceiver.getChannelPipelineInitializer(); this.snapshotSender = new SnapshotSender(this.channelPipelineInitializer, new MemberId(messageReceiver.getMemberId().getHostName(), configuration.getSnapshotSenderPort()), x -> { snapshotSenderEventHandler(x); }); if (messageReceiver.getMemberId() != null) { initializeFromTheLatestSnapshot(); } memberConnectorObserver = new MemberConnectorObserverImpl(this, this.channelPipelineInitializer); configuration.getMemberIds().forEach(memberId -> memberConnector.register(memberId, memberConnectorObserver)); } private void initializeFromTheLatestSnapshot() throws InterruptedException, SnapshotInstallException, FileNotFoundException, IOException { SnapshotDescriptor latestSnapshot = SnapshotDescriptor.getLatestSnapshotDescriptor(configuration.getSnapshotFolderPath()); if (latestSnapshot != null) { loadFromSnapshot(latestSnapshot); this.snapshotSender.setSnapshotDescriptor(latestSnapshot); } } private void loadFromSnapshot(SnapshotDescriptor latestSnapshot) throws InterruptedException, SnapshotInstallException, FileNotFoundException, IOException { try (InputStream inputStream = latestSnapshot.getInputStream()) { // FIXME: synchronization this.stateMachine.installSnapshot(inputStream); this.log.setFakeLogEntryAndCommit(latestSnapshot.getIndex(), latestSnapshot.getTerm()); } } private void cancelAllSnapshotTransfers() { snapshotRecipients.forEach((memberId, channel) -> { logger.info("MemberId: {}. Aborting snapshot transfer."); channel.disconnect(); }); snapshotRecipients.clear(); } private void snapshotSenderEventHandler(SnapshotTransferEvent x) { if (x instanceof SnapshotTransferConnectionOpenEvent) { SnapshotTransferConnectionOpenEvent event = (SnapshotTransferConnectionOpenEvent) x; logger.info("MemberId: {} connected to the snapshot sender. SnapshotDescriptor: {}.", event.getMemberId(), event.getSnapshotDescriptor()); Channel prevValue = snapshotRecipients.put(event.getMemberId(), event.getChannel()); if (prevValue != null) { prevValue.close(); } } else if (x instanceof SnapshotTransferStartedEvent) { SnapshotTransferStartedEvent event = (SnapshotTransferStartedEvent) x; logger.info("MemberId: {}. SnapshotDescriptor: {}. Snapshot transfer started.", event.getMemberId(), event.getSnapshotDescriptor()); } else if (x instanceof SnapshotTransferCompletedEvent) { SnapshotTransferCompletedEvent event = (SnapshotTransferCompletedEvent) x; logger.info("MemberId: {}. SnapshotDescriptor: {}. Snapshot transfer completed.", event.getMemberId(), event.getSnapshotDescriptor()); snapshotRecipients.remove(event.getMemberId()); } else if (x instanceof SnapshotTransferConnectionClosedEvent) { SnapshotTransferConnectionClosedEvent event = (SnapshotTransferConnectionClosedEvent) x; logger.info("MemberId: {}. SnapshotDescriptor: {}. Closing.", event.getMemberId(), event.getSnapshotDescriptor()); snapshotRecipients.remove(event.getMemberId()); } else if (x instanceof SnapshotTransferExceptionThrownEvent) { SnapshotTransferExceptionThrownEvent event = (SnapshotTransferExceptionThrownEvent) x; logger.info("MemberId: {}. SnapshotDescriptor: {}. Error occured.", event.getMemberId(), event.getSnapshotDescriptor(), event.getThrowable()); Channel prevValue = snapshotRecipients.remove(event.getMemberId()); if (prevValue != null) { prevValue.close(); } } } private void initializeEventLoop() { EventLoopGroup prevEventLoop = eventLoop.getAndSet( configuration.getMainEventLoopThreadPoolSize() > 0 ? new NioEventLoopGroup(configuration.getMainEventLoopThreadPoolSize()) : new NioEventLoopGroup()); if (prevEventLoop != null) { prevEventLoop.shutdownGracefully(); } } @Override public void start() throws InterruptedException { synchronized (stateLock) { initializeEventLoop(); messageReceiver.start(this); memberConnector.connectAllRegistered(); if (isVotingMember()) { scheduleHeartbeatMonitorTask(); } started = true; observer.started(); memberConnector.getRegisteredMemberIds().forEach( memberId -> replicationRetryTasks.put(memberId, new AtomicReference<ScheduledFuture<?>>(null))); } } @Override public void shutdown() throws InterruptedException { synchronized (stateLock) { cancelHeartbeatMonitorTask(); cancelElectionTimeoutTask(); cancelAllSnapshotTransfers(); messageReceiver.shutdown(); snapshotSender.shutdown(); eventLoop.get().shutdownGracefully(); started = false; observer.shutdown(); } } @Override public void consumeRequestVote(Member member, RequestVote requestVote) { boolean ignoreMessage = checkTermRecency(requestVote.getTerm()); if (ignoreMessage || isCatchingUpMember()) { return; } Builder response = RequestVoteResponse.newBuilder(); boolean grantVote = false; synchronized (stateLock) { response.setTerm(getCurrentTerm()); MemberId candidateId = MemberId.fromString(requestVote.getCandidateId()); if (votedFor == null || votedFor.equals(candidateId)) { try { grantVote = checkCandidatesLogIsUpToDate(requestVote); } catch (LogException e) { logger.error(severe, "Member: {}. Term: {}. checkCandidatesLogIsUpToDate failed.", member.getMemberId(), getCurrentTerm(), e); } } if (grantVote) { votedFor = candidateId; log.setVotedFor(votedFor); logger.debug("Member: {}. Term: {}. Giving vote to: {}.", selfId, getCurrentTerm(), votedFor); } } RaftMessage responseMessage = RaftMessage.newBuilder() .setType(MessageType.RequestVoteResponse) .setRequestVoteResponse(response.setVoteGranted(grantVote).build()) .build(); member.getChannel().writeAndFlush(responseMessage); } @Override public void consumeRequestVoteResponse(Member member, RequestVoteResponse requestVoteResponse) { boolean ignoreMessage = checkTermRecency(requestVoteResponse.getTerm()); if (ignoreMessage) { observer.voteRejected(); return; } if (getRole() != Role.Candidate) { return; } synchronized (stateLock) { boolean sameTerm = requestVoteResponse.getTerm() == getCurrentTerm(); if (sameTerm && requestVoteResponse.getVoteGranted()) { observer.voteReceived(); ++votesReceived; logger.debug("Member: {}. Term: {}. Votes received: {}.", selfId, getCurrentTerm(), votesReceived); } else { observer.voteRejected(); } if (votesReceived >= getMajority()) { becomeLeader(); } } } private boolean checkCandidatesLogIsUpToDate(RequestVote requestVote) throws LogException { if (log.getLast().getTerm() == requestVote.getLastLogTerm()) { return requestVote.getLastLogIndex() >= log.getLastIndex(); } return requestVote.getLastLogTerm() >= log.getLast().getTerm(); } private int compareAndUpdateCurrentTerm(int term) { synchronized (stateLock) { if (term > currentTerm) { currentTerm = term; log.setTerm(currentTerm); return 1; } else if (term < currentTerm) { return -1; } } return 0; } private void becomeLeader() { synchronized (stateLock) { if (getRole() != Role.Candidate) { return; } logger.debug("Member: {}. Term: {}. Votes Received: {}. BECAME LEADER.", selfId, getCurrentTerm(), votesReceived); cancelElectionTimeoutTask(); scheduleSendHeartbeats(); schedulePeriodicHeartbeatTask(); role = Role.Leader; leader = selfId; votesReceived = 0; memberConnector.getRegisteredMemberIds().forEach( memberId -> { updateMemberIdRelatedBookkeeping(memberId); }); } applyNoOperationCommand(); observer.electionWon(getCurrentTerm(), this); } private void updateMemberIdRelatedBookkeeping(MemberId memberId) { nextIndexes.put(memberId, new AtomicLong(log.getLastIndex() + 1)); matchIndexes.put(memberId, new AtomicLong(0)); replicationRetryTasks.putIfAbsent(memberId, new AtomicReference<ScheduledFuture<?>>(null)); scheduleAppendEntries(memberId); } private void removeMemberIdRelatedBookkeeping(MemberId memberId) { cancelAppendEntriesRetry(memberId); replicationRetryTasks.remove(memberId); indexesLock.writeLock().lock(); try { nextIndexes.remove(memberId); matchIndexes.remove(memberId); } finally { indexesLock.writeLock().unlock(); } } private void becomeFollower() { if (isStarted()) { cancelElectionTimeoutTask(); cancelPeriodicHeartbeatTask(); if (isVotingMember()) { scheduleHeartbeatMonitorTask(); } } synchronized (stateLock) { role = Role.Follower; votedFor = null; log.setVotedFor(null); votesReceived = 0; memberConnector.getRegisteredMemberIds().forEach( memberId -> { replicationRetryTasks.putIfAbsent(memberId, new AtomicReference<ScheduledFuture<?>>(null)); cancelTask(replicationRetryTasks.get(memberId).getAndSet(null)); }); memberConnector.unregisterAllCatchingUpServers(); cancelAllSnapshotTransfers(); replicationCompletableFutures.values().forEach(future -> future.complete(false)); replicationCompletableFutures.clear(); } } private boolean checkTermRecency(int term) { int comparisonResult = compareAndUpdateCurrentTerm(term); if (comparisonResult == -1) { return true; } if (comparisonResult == 1) { becomeFollower(); } return false; } @Override public void consumeAppendEntries(Member member, AppendEntries appendEntries) { boolean ignoreMessage = checkTermRecency(appendEntries.getTerm()); if (ignoreMessage) { return; } synchronized (stateLock) { if (appendEntries.hasLeaderId()) { MemberId otherLeader = MemberId.fromString(appendEntries.getLeaderId()); if (leader != null && !leader.equals(otherLeader) && role != Role.Follower) { becomeFollower(); } leader = otherLeader; } } try { if (appendEntries.hasInstallSnapshot()) { processInstallSnapshot(appendEntries.getInstallSnapshot()); } else if (appendEntries.getLogEntriesCount() == 0) { processHeartbeat(appendEntries); log.commit(appendEntries.getLeaderCommitIndex(), stateMachine); } else { boolean successFlag = processAppendEntries(appendEntries); long indexOfFirstNewEntry = appendEntries.getPrevLogIndex() + 1; long indexOfLastNewEntry = appendEntries.getPrevLogIndex() + appendEntries.getLogEntriesCount(); if (successFlag) { log.commit(Math.min(appendEntries.getLeaderCommitIndex(), indexOfLastNewEntry), stateMachine); } else { log.commit(appendEntries.getLeaderCommitIndex(), stateMachine); } sendAppendEntriesResponse(member, getAppendEntriesResponse(indexOfFirstNewEntry, indexOfLastNewEntry, successFlag)); } logCompaction(); } catch (LogException e) { logger.error(severe, "Member: {}. Term: {}. consumeAppendEntries failed.", member.getMemberId(), getCurrentTerm(), e); } } private void writeSnapshotAndCompactLog() throws FileNotFoundException, IOException, LogException { if (!(stateMachine instanceof Snapshotable)) { logger.info("{} doesn't support snapshots. ", stateMachine); return; } Snapshotable snapshotable = (Snapshotable) stateMachine; snapshotable.begin(); long commitIndex = log.getCommitIndex(); LogEntry logEntry = log.get(commitIndex); int term = logEntry.getTerm(); SnapshotDescriptor descriptor = new SnapshotDescriptor(configuration.getSnapshotFolderPath(), commitIndex, term); try (OutputStream outputStream = descriptor.getOutputStream()) { logger.info("{} snapshot writing started. SnapshotDescriptor={}.", stateMachine, descriptor); long fileSize = snapshotable.write(outputStream); logger.info("{} snapshot writing finished. File size {} bytes.", stateMachine, fileSize); logger.info("{} compaction started.", log); log.truncate(commitIndex); logger.info("{} compaction finished.", log); snapshotSender.setSnapshotDescriptor(descriptor); } catch (Exception e) { logger.info("Error producing snapshot. SnapshotDescriptor={}.", descriptor, e); } finally { snapshotable.end(); } } private void logCompaction() throws LogException { if (configuration.getLogCompactionThreshold() == 0 || log.getCommitIndex() - log.getFirstIndex() < configuration.getLogCompactionThreshold()) { return; } logCompactionTask.updateAndGet(compactionTask -> { if (!compactionTask.isDone()) { return compactionTask; } return CompletableFuture.runAsync(() -> { try { writeSnapshotAndCompactLog(); } catch (Exception e) { logger.error(severe, "{} Snapshot/Log Compaction failed.", log, e); } }, eventLoop.get()); }); } private void sendAppendEntriesResponse(Member member, AppendEntriesResponse appendEntriesResponse) { RaftMessage responseMessage = RaftMessage.newBuilder() .setType(MessageType.AppendEntriesResponse) .setAppendEntriesResponse(appendEntriesResponse).build(); member.getChannel().writeAndFlush(responseMessage); } private AppendEntriesResponse getAppendEntriesResponse(long indexOfFirstNewEntry, long indexOfLastNewEntry, boolean successFlag) { AppendEntriesResponse.Builder appendEntriesResponse = AppendEntriesResponse.newBuilder(); return appendEntriesResponse .setSuccess(successFlag) .setTerm(getCurrentTerm()) .setStartIndex(indexOfFirstNewEntry) .setEndIndex(indexOfLastNewEntry) .setLogLength(log.getLastIndex()) .build(); } private void processInstallSnapshot(SnapshotDownloadRequest request) { synchronized (snapshotInstallLock) { if (snapshotReceiver == null || snapshotReceiver.isDone()) { final MemberId snapshotSenderMemberId = MemberId.fromString(request.getMemberId()); final String snapshotId = request.getSnapshotId(); final File fileName = new File(configuration.getSnapshotFolderPath(), snapshotId); try { snapshotReceiver = new SnapshotReceiver(channelPipelineInitializer, selfId, snapshotSenderMemberId, snapshotId, fileName, request.getSize(), (File file, Throwable e) -> { try { if (e != null) { throw new IllegalStateException(e); } loadFromSnapshot(new SnapshotDescriptor(file)); } catch (Exception e1) { logger.error(severe, "Member: {}. SnapshotId: {}. Installation failed.", snapshotSenderMemberId, snapshotId, e1); } }); } catch (Exception e) { logger.error(severe, "Member: {}. SnapshotId: {}. Receiver initialization failed.", snapshotSenderMemberId, snapshotId, e); } } } } private void processRaftCommands(List<LogEntry> logEntries) { List<LogEntry> raftCommands = logEntries.stream().filter(x -> x.getType() == LogEntryType.RaftProtocolCommand) .collect(Collectors.toList()); for (LogEntry le : raftCommands) { try { DynamicMembershipChangeCommand command = DynamicMembershipChangeCommand.parseFrom(le.getEntry().toByteArray()); MemberId memberId = MemberId.fromString(command.getMemberId()); if (command.getType() == CommandType.AddMember) { logger.info("Member: {}. Term: {}. Adding {} to the cluster.", selfId, getCurrentTerm(), memberId); if (!memberId.equals(selfId)) { memberConnector.register(memberId, memberConnectorObserver); memberConnector.connect(memberId); } } else { memberConnector.unregister(memberId); logger.info("Member: {}. Term: {}. Removing {} from from cluster.", selfId, getCurrentTerm(), memberId); } } catch (InvalidProtocolBufferException e) { logger.error("Error on processing raft command.", e); } } } private boolean processAppendEntries(AppendEntries appendEntries) throws LogException { List<LogEntry> logEntries = appendEntries.getLogEntriesList(); processRaftCommands(logEntries); return log.append(appendEntries.getPrevLogIndex(), appendEntries.getPrevLogTerm(), logEntries); } @Override public void consumeAppendEntriesResponse(Member member, AppendEntriesResponse appendEntriesResponse) { boolean ignoreMessage = checkTermRecency(appendEntriesResponse.getTerm()); if (ignoreMessage) { return; } if (!isLeader()) { return; } boolean isAccepted = appendEntriesResponse.getSuccess(); if (isAccepted) { processSuccessfulAppendEntriesResponse(member, appendEntriesResponse); } else { processFailedAppendEntriesResponse(member, appendEntriesResponse); } } private void processSuccessfulAppendEntriesResponse(Member member, AppendEntriesResponse appendEntriesResponse) { indexesLock.writeLock().lock(); try { AtomicLong nextIndexRef = nextIndexes.get(member.getMemberId()); AtomicLong matchIndexRef = matchIndexes.get(member.getMemberId()); if (nextIndexRef.get() != appendEntriesResponse.getStartIndex()) { return; } long newMatchIndex = appendEntriesResponse.getEndIndex(); long newNextIndex = appendEntriesResponse.getEndIndex() + 1; nextIndexRef.set(newNextIndex); matchIndexRef.set(newMatchIndex); if (newNextIndex <= log.getLastIndex()) { scheduleAppendEntries(member.getMemberId()); } else { cancelAppendEntriesRetry(member.getMemberId()); } } finally { indexesLock.writeLock().unlock(); } try { checkReplicationStatus(); } catch (LogException e) { logger.error(severe, "Member: {}. Term: {}. Replication check failed due to log exception.", member.getMemberId(), getCurrentTerm(), e); } } private void processFailedAppendEntriesResponse(Member member, AppendEntriesResponse appendEntriesResponse) { indexesLock.writeLock().lock(); try { AtomicLong nextIndexRef = nextIndexes.get(member.getMemberId()); if (nextIndexRef.get() == appendEntriesResponse.getStartIndex()) { nextIndexRef.set(appendEntriesResponse.getLogLength() + 1); } } finally { indexesLock.writeLock().unlock(); } scheduleAppendEntries(member.getMemberId()); } private void processHeartbeat(AppendEntries heartbeat) { observer.heartbeatReceived(); scheduleHeartbeatMonitorTask(); } private void cancelTask(ScheduledFuture<?> task) { if (task != null) { task.cancel(true); } } private void cancelHeartbeatMonitorTask() { cancelTask(newElectionInitiatorTask.get()); } private void cancelElectionTimeoutTask() { cancelTask(electionTimeoutMonitorTask.get()); } private void cancelPeriodicHeartbeatTask() { cancelTask(periodicHeartbeatTask.get()); } private void scheduleHeartbeatMonitorTask() { cancelTask(newElectionInitiatorTask.getAndSet(getNextElectionTask())); } private ScheduledFuture<?> getNextElectionTask() { observer.nextElectionScheduled(); final int timeout = random.nextInt(configuration.getElectionMaxTimeout() - configuration.getElectionMinTimeout()) + configuration.getElectionMinTimeout(); return eventLoop.get().schedule(() -> RaftImpl.this.heartbeatTimedout(), timeout, TimeUnit.MILLISECONDS); } private void scheduleElectionTimeoutTask() { final int timeout = random.nextInt(configuration.getElectionMaxTimeout() - configuration.getElectionMinTimeout()) + configuration.getElectionMinTimeout(); ScheduledFuture<?> prevTask = electionTimeoutMonitorTask.getAndSet( eventLoop.get().schedule(() -> RaftImpl.this.electionTimedout(), timeout, TimeUnit.MILLISECONDS)); cancelTask(prevTask); } private void schedulePeriodicHeartbeatTask() { ScheduledFuture<?> prevTask = periodicHeartbeatTask.getAndSet( eventLoop.get().scheduleAtFixedRate(() -> RaftImpl.this.scheduleSendHeartbeats(), 0, configuration.getHeartbeatInterval(), TimeUnit.MILLISECONDS)); cancelTask(prevTask); } private ApplyCommandResult applyNoOperationCommand() { return applyCommand(LogEntryType.NoOperationCommand, new byte[0]); } private long logAppend(LogEntry logEntry) { if (logEntry.getType() == LogEntryType.RaftProtocolCommand) { List<LogEntry> raftCommand = new ArrayList<>(); raftCommand.add(logEntry); processRaftCommands(raftCommand); } return log.append(logEntry); } private ApplyCommandResult applyCommand(LogEntryType type, byte[] command) { if (!isLeader()) { return new ApplyCommandResult(null, getLeaderMemberId()); } LogEntry logEntry = LogEntryFactory.create(getCurrentTerm(), type, command); ApplyCommandFuture applyCommandFuture = scheduleReplication(logAppend(logEntry)); synchronized (stateLock) { return new ApplyCommandResult(applyCommandFuture, getLeaderMemberId()); } } @Override public ApplyCommandResult applyCommand(byte[] command) { return applyCommand(LogEntryType.StateMachineCommand, command); } private ApplyCommandFuture scheduleReplication(long index) { ApplyCommandFuture applyCommandFuture = new ApplyCommandFuture(); replicationCompletableFutures.put(index, applyCommandFuture); for (MemberId memberId : memberConnector.getRegisteredMemberIds()) { long nextIndex = 0; indexesLock.readLock().lock(); try { AtomicLong nextIndexInt = nextIndexes.get(memberId); if (nextIndexInt == null) { throw new IllegalStateException("nextIndex is not initialized"); } nextIndex = nextIndexInt.get(); } finally { indexesLock.readLock().unlock(); } if (nextIndex <= log.getLastIndex()) { scheduleAppendEntries(memberId); } } return applyCommandFuture; } private void cancelAppendEntriesRetry(MemberId memberId) { cancelTask(replicationRetryTasks.get(memberId).get()); } private void scheduleAppendEntriesRetry(MemberId memberId) { try { observer.appendEntriesRetryScheduled(memberId); ScheduledFuture<?> future = eventLoop.get().schedule(() -> this.scheduleAppendEntries(memberId), configuration.getReplicationRetryInterval(), TimeUnit.MILLISECONDS); cancelTask(replicationRetryTasks.get(memberId).getAndSet(future)); } catch (RejectedExecutionException e) { logger.error(severe, "Member: {}. Term: {}. Scheduling replication retry failed.", memberId, getCurrentTerm(), e); } } private void scheduleAppendEntries(MemberId memberId) { scheduleAppendEntriesRetry(memberId); Member member = memberConnector.getActiveMember(memberId); if (member != null) { try { scheduleSendMessageToMember(member, getAppendEntries(memberId)); } catch (LogException e) { logger.error(severe, "Member: {}. Term: {}. Could not schedule replication.", memberId, getCurrentTerm(), e); } } } private void checkReplicationStatus() throws LogException { Iterator<Entry<Long, ApplyCommandFuture>> it = replicationCompletableFutures.entrySet().iterator(); while (it.hasNext()) { Entry<Long, ApplyCommandFuture> entry = it.next(); long currIndex = entry.getKey(); int replicationCount = 0; for (AtomicLong matchIndex : matchIndexes.values()) { indexesLock.readLock().lock(); try { if (matchIndex.get() >= currIndex) { replicationCount++; } } finally { indexesLock.readLock().unlock(); } } if (replicationCount >= getMajority()) { if (log.get(currIndex).getTerm() == getCurrentTerm()) { log.commit(entry.getKey(), stateMachine); logCompaction(); } entry.getValue().complete(true); it.remove(); } } } private int getMajority() { return (memberConnector.getVotingMembersCount() + 1) / 2 + 1; } private void electionTimedout() { observer.electionTimedout(); logger.debug("Member: {}. Term: {}. Election timedout.", selfId, getCurrentTerm()); initiateElection(); } private void heartbeatTimedout() { observer.heartbeatTimedout(); initiateElection(); } private void initiateElection() { cancelHeartbeatMonitorTask(); scheduleElectionTimeoutTask(); synchronized (stateLock) { role = Role.Candidate; ++currentTerm; log.setTerm(currentTerm); votedFor = null; log.setVotedFor(votedFor); leader = null; votesReceived = 0; logger.debug("Member: {}. Term: {}. New election.", selfId, getCurrentTerm()); } synchronized (stateLock) { if (votedFor == null) { ++votesReceived; votedFor = selfId; logger.debug("Member: {}. Term: {}. Votes Received: {}. Voted for itself.", selfId, getCurrentTerm(), votesReceived); } else { return; } } scheduleSendVoteRequests(); } private void scheduleSendMessageToEachMember(RaftMessage msg) { memberConnector.getAllActiveMembers().forEach(member -> scheduleSendMessageToMember(member, msg)); } private void scheduleSendMessageToEachVotingMember(RaftMessage msg) { memberConnector.getAllActiveVotingMembers().forEach(member -> scheduleSendMessageToMember(member, msg)); } private ScheduledFuture<?> scheduleSendMessageToMember(Member member, RaftMessage msg) { try { return member.getChannel().eventLoop().schedule(() -> RaftImpl.this.sendMessage(member, msg), 0, TimeUnit.MILLISECONDS); } catch (RejectedExecutionException e) { logger.error("Member: {}. Term: {}. Message type: {}. Message sending failed.", member, getCurrentTerm(), msg.getType(), e); } return null; } private void scheduleSendVoteRequests() { try { scheduleSendMessageToEachVotingMember(getRequestVoteMessage()); } catch (LogException e) { logger.error(severe, "Term: {}. scheduleSendVoteRequests failed.", getCurrentTerm(), e); } } private void scheduleSendHeartbeats() { scheduleSendMessageToEachMember(getHeartbeatMessage()); } private RaftMessage getAppendEntries(MemberId memberId) throws LogException { long nextIndex = 0; indexesLock.readLock().lock(); try { nextIndex = nextIndexes.get(memberId).get(); } finally { indexesLock.readLock().unlock(); } AppendEntries.Builder req = AppendEntries.newBuilder(); MemberId leaderMemberId = getLeaderMemberId(); if (log.getFirstIndex() > nextIndex) { SnapshotDescriptor snapshot = snapshotSender.getSnapshotDescriptor(); req.setTerm(getCurrentTerm()) .setLeaderId(leaderMemberId != null ? leaderMemberId.toString() : null) .setInstallSnapshot(SnapshotDownloadRequest.newBuilder() .setSnapshotId(snapshot.getSnapshotId()) .setSize(snapshot.getSize()) .setMemberId(snapshotSender.getMemberId().toString())); } else { List<LogEntry> logEntries = log.get(nextIndex, configuration.getMaxNumberOfLogEntriesPerRequest()); if (logEntries.size() == 0) { return getHeartbeatMessage(); } long prevLogIndex = nextIndex - 1; int prevLogTerm = log.get(prevLogIndex).getTerm(); long commitIndex = log.getCommitIndex(); req.setTerm(getCurrentTerm()) .setPrevLogIndex(prevLogIndex) .setPrevLogTerm(prevLogTerm) .setLeaderCommitIndex(commitIndex) .setLeaderId(leaderMemberId != null ? leaderMemberId.toString() : null); req.addAllLogEntries(logEntries); } RaftMessage requestVote = RaftMessage.newBuilder() .setType(MessageType.AppendEntries) .setAppendEntries(req) .build(); return requestVote; } private RaftMessage getRequestVoteMessage() throws LogException { RequestVote req = RequestVote.newBuilder() .setTerm(getCurrentTerm()) .setCandidateId(selfId.toString()) .setLastLogIndex(log.getLastIndex()) .setLastLogTerm(log.getLast().getTerm()).build(); RaftMessage requestVote = RaftMessage.newBuilder() .setType(MessageType.RequestVote) .setRequestVote(req) .build(); return requestVote; } private RaftMessage getHeartbeatMessage() { AppendEntries.Builder req = AppendEntries.newBuilder(); synchronized (stateLock) { req.setTerm(getCurrentTerm()); if (leader != null) { req.setLeaderId(leader.toString()); } } RaftMessage requestVote = RaftMessage.newBuilder() .setType(MessageType.AppendEntries) .setAppendEntries(req.setLeaderCommitIndex(log.getCommitIndex())) .build(); return requestVote; } private void sendMessage(Member member, RaftMessage req) { Channel memberChannel = member.getChannel(); if (memberChannel.isActive()) { if (req.getType() == RaftMessage.MessageType.RequestVote) { logger.debug("Member: {}. Term: {}. Vote request sent to: {}.", selfId, getCurrentTerm(), member.getMemberId()); } memberChannel.writeAndFlush(req); } } @Override public Role getRole() { synchronized (stateLock) { return role; } } @Override public int getCurrentTerm() { synchronized (stateLock) { return currentTerm; } } @Override public boolean isStarted() { synchronized (stateLock) { return started; } } @Override public Configuration getConfiguration() { return configuration; } @Override public MemberId getMemberId() { return selfId; } private boolean isLeader() { return getRole() == Role.Leader; } @Override public Log getLog() { return log; } @Override public AddCatchingUpMemberResult addCatchingUpMember(MemberId memberId) { if (!isLeader()) { return new AddCatchingUpMemberResult(false, getLeaderMemberId()); } if (memberConnector.getRegisteredMemberIds().contains(memberId)) { throw new IllegalArgumentException("Member has already been added"); } memberConnector.registerAsCatchingUpMember(memberId, memberConnectorObserver); memberConnector.connect(memberId); updateMemberIdRelatedBookkeeping(memberId); return new AddCatchingUpMemberResult(true, getLeaderMemberId()); } @Override public RemoveCatchingUpMemberResult removeCatchingUpMember(MemberId memberId) { if (!memberConnector.isCatchingUpMember(memberId)) { throw new IllegalArgumentException("Unknown member"); } memberConnector.unregister(memberId); removeMemberIdRelatedBookkeeping(memberId); return new RemoveCatchingUpMemberResult(true, getLeaderMemberId()); } private ApplyCommandResult addRemoveMemberDynamically(CommandType type, MemberId memberId) { return applyCommand(LogEntryType.RaftProtocolCommand, DynamicMembershipChangeCommand.newBuilder() .setType(type) .setMemberId(memberId.toString()) .build().toByteArray()); } @Override public ApplyCommandResult addMemberDynamically(MemberId memberId) { if (!isLeader()) { return new ApplyCommandResult(null, getLeaderMemberId()); } if (!memberConnector.getRegisteredMemberIds().contains(memberId)) { throw new IllegalArgumentException("Member must be added as a catching up member first."); } synchronized (dynamicMembershipChangeLock) { if (dynamicMembershipChangeInProgress != null && dynamicMembershipChangeInProgress.getResult() != null && !dynamicMembershipChangeInProgress.getResult().isDone()) { throw new IllegalStateException("Member adding is in progress."); } dynamicMembershipChangeInProgress = addRemoveMemberDynamically(CommandType.AddMember, memberId); return dynamicMembershipChangeInProgress; } } @Override public ApplyCommandResult removeMemberDynamically(MemberId memberId) { if (!isLeader()) { return new ApplyCommandResult(null, getLeaderMemberId()); } if (selfId.equals(memberId) && isLeader()) { throw new IllegalArgumentException("Can't remove leader. Please initiate leader stepdown first."); } synchronized (dynamicMembershipChangeLock) { if (dynamicMembershipChangeInProgress != null && dynamicMembershipChangeInProgress.getResult() != null && !dynamicMembershipChangeInProgress.getResult().isDone()) { throw new IllegalStateException("Member removing is in progress."); } dynamicMembershipChangeInProgress = addRemoveMemberDynamically(CommandType.RemoveMember, memberId); return dynamicMembershipChangeInProgress; } } @Override public boolean isVotingMember() { return !catchingUpMember.get(); } @Override public boolean isCatchingUpMember() { return catchingUpMember.get(); } @Override public void becomeCatchingUpMember() { catchingUpMember.set(true); cancelHeartbeatMonitorTask(); becomeFollower(); } @Override public void becomeVotingMember() { catchingUpMember.set(false); scheduleHeartbeatMonitorTask(); } public MemberId getLeaderMemberId() { synchronized (stateLock) { return leader; } } }
package com.secret.fastalign.main; import jaligner.Alignment; import jaligner.SmithWatermanGotoh; import jaligner.matrix.MatrixLoader; import jaligner.matrix.MatrixLoaderException; import java.io.BufferedReader; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStreamReader; import java.util.Calendar; import java.util.GregorianCalendar; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Random; import java.util.logging.Level; import java.util.logging.Logger; import com.secret.fastalign.general.FastaData; import com.secret.fastalign.general.Sequence; import com.secret.fastalign.utils.IntervalTree; import com.secret.fastalign.utils.Utils; public class EstimateROC { private static final double MIN_IDENTITY = 0.60; private static final int DEFAULT_NUM_TRIALS = 10000; private static final int DEFAULT_MIN_OVL = 500; private static final boolean DEFAULT_DO_DP = false; private static boolean DEBUG = false; private static class Pair { public int first; public int second; public Pair(int startInRef, int endInRef) { this.first = startInRef; this.second = endInRef; } @SuppressWarnings("unused") public int size() { return (Math.max(this.first, this.second) - Math.min(this.first, this.second) + 1); } } private static class Overlap { public int afirst; public int bfirst; public int asecond; public int bsecond; public boolean isFwd; public String id1; public String id2; public Overlap() { // do nothing } @Override public String toString() { StringBuilder stringBuilder = new StringBuilder(); stringBuilder.append("Overlap Aid="); stringBuilder.append(this.id1); stringBuilder.append(" ("); stringBuilder.append(this.afirst); stringBuilder.append(", "); stringBuilder.append(this.asecond); stringBuilder.append("), Bid="); stringBuilder.append(this.id2); stringBuilder.append(" ("); stringBuilder.append(this.bfirst); stringBuilder.append("), "); stringBuilder.append(this.bsecond); return stringBuilder.toString(); } } private static Random generator = null; public static int seed = 0; private HashMap<String, IntervalTree<Integer>> clusters = new HashMap<String, IntervalTree<Integer>>(); private HashMap<String, String> seqToChr = new HashMap<String, String>(10000000); private HashMap<String, Integer> seqToScore = new HashMap<String, Integer>(10000000); private HashMap<String, Pair> seqToPosition = new HashMap<String, Pair>(10000000); private HashMap<Integer, String> seqToName = new HashMap<Integer, String>(10000000); private HashMap<String, Integer> seqNameToIndex = new HashMap<String, Integer>(10000000); private HashSet<String> ovlNames = new HashSet<String>(10000000*10); private HashMap<String, Overlap> ovlInfo = new HashMap<String, Overlap>(10000000*10); private HashMap<Integer, String> ovlToName = new HashMap<Integer, String>(10000000*10); private int minOvlLen = DEFAULT_MIN_OVL; private int numTrials = DEFAULT_NUM_TRIALS; private boolean doDP = false; private long tp = 0; private long fn = 0; private long tn = 0; private long fp = 0; private double ppv = 0; private Sequence[] dataSeq = null; public static void printUsage() { System.err .println("This program uses random sampling to estimate PPV/Sensitivity/Specificity"); System.err.println("The program requires 2 arguments:"); System.err .println("\t1. A blasr M4 file mapping sequences to a reference (or reference subset)"); System.err .println("\t2. All-vs-all mappings of same sequences in CA ovl format"); System.err .println("\t3. Fasta sequences"); System.err.println("\t4. Minimum overlap length (default: " + DEFAULT_MIN_OVL); System.err.println("\t5. Number of random trials, 0 means full compute (default : " + DEFAULT_NUM_TRIALS); System.err.println("\t6. Compute DP during PPV true/false"); System.err.println("\t7. Debug output true/false"); } public static void main(String[] args) throws Exception { if (args.length < 3) { printUsage(); System.exit(1); } EstimateROC g = null; if (args.length > 5) { g = new EstimateROC(Integer.parseInt(args[3]), Integer.parseInt(args[4]), Boolean.parseBoolean(args[5])); } else if (args.length > 4) { g = new EstimateROC(Integer.parseInt(args[3]), Integer.parseInt(args[4])); } else if (args.length > 3) { g = new EstimateROC(Integer.parseInt(args[3])); } else { g = new EstimateROC(); } if (args.length > 6) { DEBUG = Boolean.parseBoolean(args[6]); } System.err.println("Running, reference: " + args[0] + " matches: " + args[1]); System.err.println("Number trials: " + (g.numTrials == 0 ? "all" : g.numTrials)); System.err.println("Minimum ovl: " + g.minOvlLen); // load and cluster reference System.err.print("Loading reference..."); long startTime = System.nanoTime(); long totalTime = startTime; g.processReference(args[0]); System.err.println("done " + (System.nanoTime() - startTime) * 1.0e-9 + "s."); // load fasta System.err.print("Loading fasta..."); startTime = System.nanoTime(); g.loadFasta(args[2]); System.err.println("done " + (System.nanoTime() - startTime) * 1.0e-9 + "s."); // load matches System.err.print("Loading matches..."); startTime = System.nanoTime(); g.processOverlaps(args[1]); System.err.println("done " + (System.nanoTime() - startTime) * 1.0e-9 + "s."); if (g.numTrials == 0) { System.err.print("Computing full statistics O(" + g.seqToName.size() + "^2) operations!..."); startTime = System.nanoTime(); g.fullEstimate(); System.err.println("done " + (System.nanoTime() - startTime) * 1.0e-9 + "s."); } else { System.err.print("Computing sensitivity..."); startTime = System.nanoTime(); g.estimateSensitivity(); System.err.println("done " + (System.nanoTime() - startTime) * 1.0e-9 + "s."); // now estimate FP/TN by picking random match and checking reference // mapping System.err.print("Computing specificity..."); startTime = System.nanoTime(); g.estimateSpecificity(); System.err.println("done " + (System.nanoTime() - startTime) * 1.0e-9 + "s."); // last but not least PPV, pick random subset of our matches and see what percentage are true System.err.print("Computing PPV..."); startTime = System.nanoTime(); g.estimatePPV(); System.err.println("done " + (System.nanoTime() - startTime) * 1.0e-9 + "s."); } System.err.println("Total time: " + (System.nanoTime() - totalTime) * 1.0e-9 + "s."); System.out.println("Estimated sensitivity:\t" + Utils.DECIMAL_FORMAT.format((double) g.tp / (double)(g.tp + g.fn))); System.out.println("Estimated specificity:\t" + Utils.DECIMAL_FORMAT.format((double) g.tn / (double)(g.fp + g.tn))); System.out.println("Estimated PPV:\t " + Utils.DECIMAL_FORMAT.format(g.ppv)); } public EstimateROC() { this(DEFAULT_MIN_OVL, DEFAULT_NUM_TRIALS); } public EstimateROC(int minOvlLen) { this(minOvlLen, DEFAULT_NUM_TRIALS); } public EstimateROC(int minOvlLen, int numTrials) { this(minOvlLen, numTrials, DEFAULT_DO_DP); } @SuppressWarnings("unused") public EstimateROC(int minOvlLen, int numTrials, boolean doDP) { this.minOvlLen = minOvlLen; this.numTrials = numTrials; this.doDP = doDP; if (false) { GregorianCalendar t = new GregorianCalendar(); int t1 = t.get(Calendar.SECOND); int t2 = t.get(Calendar.MINUTE); int t3 = t.get(Calendar.HOUR_OF_DAY); int t4 = t.get(Calendar.DAY_OF_MONTH); int t5 = t.get(Calendar.MONTH); int t6 = t.get(Calendar.YEAR); seed = t6 + 65 * (t5 + 12 * (t4 + 31 * (t3 + 24 * (t2 + 60 * t1)))); } generator = new Random(seed); } private String getOvlName(String id, String id2) { return (id.compareTo(id2) <= 0 ? id + "_" + id2 : id2 + "_" + id); } private String pickRandomSequence() { int val = generator.nextInt(this.seqToName.size()); return this.seqToName.get(val); } private String pickRandomMatch() { int val = generator.nextInt(this.ovlToName.size()); return this.ovlToName.get(val); } private int getOverlapSize(String id, String id2) { String chr = this.seqToChr.get(id); String chr2 = this.seqToChr.get(id2); Pair p1 = this.seqToPosition.get(id); Pair p2 = this.seqToPosition.get(id2); if (!chr.equalsIgnoreCase(chr2)) { System.err.println("Error: comparing wrong chromosomes betweeen sequences " + id + " and sequence " + id2); System.exit(1); } return Utils.getRangeOverlap(p1.first, p1.second, p2.first, p2.second); } private HashSet<String> getSequenceMatches(String id, int min) { String chr = this.seqToChr.get(id); Pair p1 = this.seqToPosition.get(id); List<Integer> intersect = this.clusters.get(chr).get(p1.first, p1.second); HashSet<String> result = new HashSet<String>(); Iterator<Integer> it = intersect.iterator(); while (it.hasNext()) { String id2 = this.seqToName.get(it.next()); Pair p2 = this.seqToPosition.get(id2); String chr2 = this.seqToChr.get(id2); if (!chr.equalsIgnoreCase(chr2)) { System.err.println("Error: comparing wrong chromosomes betweeen sequences " + id + " and sequence in its cluster " + id2); System.exit(1); } int overlap = Utils.getRangeOverlap(p1.first, p1.second, p2.first, p2.second); if (overlap >= min && !id.equalsIgnoreCase(id2)) { result.add(id2); } } return result; } @SuppressWarnings("unused") private Overlap getOverlapInfo(String line) { Overlap overlap = new Overlap(); String[] splitLine = line.trim().split("\\s+"); try { if (splitLine.length == 7 || splitLine.length == 6) { overlap.id1 = splitLine[0]; overlap.id2 = splitLine[1]; double score = Double.parseDouble(splitLine[5]) * 5; int aoffset = Integer.parseInt(splitLine[3]); int boffset = Integer.parseInt(splitLine[4]); boolean isFwd = ("N".equals(splitLine[2])); if (this.dataSeq != null) { int alen = this.dataSeq[Integer.parseInt(overlap.id1)-1].length(); int blen = this.dataSeq[Integer.parseInt(overlap.id2)-1].length(); overlap.afirst = Math.max(0, aoffset); overlap.asecond = Math.min(alen, alen + boffset); overlap.bfirst = -1*Math.min(0, aoffset); overlap.bsecond = Math.min(blen, blen - boffset); } } else if (splitLine.length == 13) { overlap.afirst = Integer.parseInt(splitLine[5]); overlap.asecond = Integer.parseInt(splitLine[6]); overlap.bfirst = Integer.parseInt(splitLine[9]); overlap.bsecond = Integer.parseInt(splitLine[10]); overlap.isFwd = (Integer.parseInt(splitLine[8]) == 0); if (!overlap.isFwd) { overlap.bsecond = Integer.parseInt(splitLine[11]) - Integer.parseInt(splitLine[9]); overlap.bfirst = Integer.parseInt(splitLine[11]) - Integer.parseInt(splitLine[10]); } overlap.id1 = splitLine[0]; if (overlap.id1.indexOf("/") != -1) { overlap.id1 = overlap.id1.substring(0, splitLine[0].indexOf("/")); } if (overlap.id1.indexOf(",") != -1) { overlap.id1 = overlap.id1.split(",")[1]; } overlap.id2 = splitLine[1]; if (overlap.id2.indexOf(",") != -1) { overlap.id2 = overlap.id2.split(",")[1]; } } } catch (NumberFormatException e) { System.err.println("Warning: could not parse input line: " + line + " " + e.getMessage()); } return overlap; } private void loadFasta(String file) throws IOException { FastaData data = new FastaData(file, 0); data.enqueueFullFile(); this.dataSeq = data.toArray(); } private void processOverlaps(String file) throws Exception { BufferedReader bf = new BufferedReader(new InputStreamReader( new FileInputStream(file))); String line = null; int counter = 0; while ((line = bf.readLine()) != null) { Overlap ovl = getOverlapInfo(line); String id = ovl.id1; String id2 = ovl.id2; if (id == null || id2 == null) { continue; } if (id.equalsIgnoreCase(id2)) { continue; } if (this.seqToChr.get(id) == null || this.seqToChr.get(id2) == null) { continue; } String ovlName = getOvlName(id, id2); if (this.ovlNames.contains(ovlName)) { continue; } this.ovlNames.add(ovlName); this.ovlToName.put(counter, ovlName); this.ovlInfo.put(ovlName, ovl); counter++; if (counter % 100000 == 0) { System.err.println("Loaded " + counter); } } System.err.print("Processed " + this.ovlNames.size() + " overlaps"); if (this.ovlNames.isEmpty()) { System.err .println("Error: No sequence matches to reference loaded!"); System.exit(1); } bf.close(); } /** * We are parsing file of the format 18903/0_100 ref000001|lambda_NEB3011 * -462 96.9697 0 0 99 100 0 2 101 48502 254 21589/0_100 * ref000001|lambda_NEB3011 -500 100 0 0 100 100 1 4 104 48502 254 * 15630/0_100 ref000001|lambda_NEB3011 -478 98 0 0 100 100 0 5 105 48502 * 254 **/ @SuppressWarnings("unused") private void processReference(String file) throws Exception { BufferedReader bf = new BufferedReader(new InputStreamReader( new FileInputStream(file))); String line = null; int counter = 0; while ((line = bf.readLine()) != null) { String[] splitLine = line.trim().split("\\s+"); String id = splitLine[0]; if (id.indexOf("/") != -1) { id = id.substring(0, splitLine[0].indexOf("/")); } if (id.indexOf(",") != -1) { id = id.split(",")[1]; } int start = Integer.parseInt(splitLine[5]); int end = Integer.parseInt(splitLine[6]); int length = Integer.parseInt(splitLine[7]); int startInRef = Integer.parseInt(splitLine[9]); int endInRef = Integer.parseInt(splitLine[10]); int score = Integer.parseInt(splitLine[2]); String chr = splitLine[1]; if (!this.clusters.containsKey(chr)) { this.clusters.put(chr, new IntervalTree<Integer>()); } if (this.seqToPosition.containsKey(id)) { if (score < this.seqToScore.get(id)) { // replace this.seqToPosition.put(id, new Pair(startInRef, endInRef)); this.seqToChr.put(id, chr); this.seqToScore.put(id, score); } } else { this.seqToPosition.put(id, new Pair(startInRef, endInRef)); this.seqToChr.put(id, chr); this.seqToName.put(counter, id); this.seqNameToIndex.put(id, counter); this.seqToScore.put(id, score); counter++; } } bf.close(); for (String id : this.seqToPosition.keySet()) { String chr = this.seqToChr.get(id); if (!this.clusters.containsKey(chr)) { this.clusters.put(chr, new IntervalTree<Integer>()); } Pair p = this.seqToPosition.get(id); this.clusters.get(chr).addInterval(p.first, p.second, this.seqNameToIndex.get(id)); } System.err.print("Processed " + this.clusters.size() + " chromosomes, " + this.seqToPosition.size() + " sequences matching ref"); if (this.seqToPosition.isEmpty()) { System.err .println("Error: No sequence matches to reference loaded!"); System.exit(1); } } private boolean overlapExists(String id, String id2) { return this.ovlNames.contains(getOvlName(id, id2)); } private void checkMatches(String id, HashSet<String> matches) { for (String m : matches) { if (overlapExists(id, m)) { this.tp++; } else { this.fn++; if (DEBUG) { System.err.println("Overlap between sequences: " + id + ", " + m + " is missing."); } } } } private boolean computeDP(String id, String id2) { if (this.doDP == false) { return false; } Logger logger = Logger.getLogger(SmithWatermanGotoh.class.getName()); logger.setLevel(Level.OFF); logger = Logger.getLogger(MatrixLoader.class.getName()); logger.setLevel(Level.OFF); Overlap ovl = this.ovlInfo.get(getOvlName(id, id2)); jaligner.Sequence s1 = new jaligner.Sequence(this.dataSeq[Integer.parseInt(ovl.id1)-1].toString().substring(ovl.afirst, ovl.asecond)); jaligner.Sequence s2 = null; if (ovl.isFwd) { s2 = new jaligner.Sequence(this.dataSeq[Integer.parseInt(ovl.id2)-1].toString().substring(ovl.bfirst, ovl.bsecond)); } else { s2 = new jaligner.Sequence(this.dataSeq[Integer.parseInt(ovl.id2)-1].getReverseCompliment().toString().substring(ovl.bfirst, ovl.bsecond)); } Alignment alignment; try { alignment = SmithWatermanGotoh.align(s1, s2, MatrixLoader.load("IDENTITY"), 2f, 1f); } catch (MatrixLoaderException e) { return false; } return ((double)alignment.getSimilarity()/s1.length() > MIN_IDENTITY); } private void estimateSensitivity() { // we estimate TP/FN by randomly picking a sequence, getting its // cluster, and checking our matches for (int i = 0; i < this.numTrials; i++) { // pick cluster String id = pickRandomSequence(); HashSet<String> matches = getSequenceMatches(id, this.minOvlLen); if (DEBUG) { System.err.println("Estimated sensitivity trial #" + i + " " + id + " matches " + matches); } checkMatches(id, matches); } } private void estimateSpecificity() { // we estimate FP/TN by randomly picking two sequences for (int i = 0; i < this.numTrials; i++) { // pick cluster String id = pickRandomSequence(); String other = pickRandomSequence(); while (id.equalsIgnoreCase(other)) { other = pickRandomSequence(); } HashSet<String> matches = getSequenceMatches(id, 0); if (overlapExists(id, other)) { if (!matches.contains(other)) { this.fp++; } } else { if (!matches.contains(other)) { this.tn++; } } } } private void estimatePPV() { int numTP = 0; for (int i = 0; i < this.numTrials; i++) { int ovlLen = 0; String[] ovl = null; String ovlName = null; while (ovlLen < this.minOvlLen) { // pick an overlap ovlName = pickRandomMatch(); Overlap o = this.ovlInfo.get(ovlName); ovlLen = Utils.getRangeOverlap(o.afirst, o.asecond, o.bfirst, o.bsecond); } if (ovlName == null) { System.err.println("Could not find any computed overlaps > " + this.minOvlLen); System.exit(1); } else { ovl = ovlName.split("_"); String id = ovl[0]; String id2 = ovl[1]; HashSet<String> matches = getSequenceMatches(id, 0); if (matches.contains(id2)) { numTP++; } else { if (computeDP(id, id2)) { numTP++; } else { if (DEBUG) { System.err.println("Overlap between sequences: " + id + ", " + id2 + " is not correct."); } } } } } // now our formula for PPV. Estimate percent of our matches which are true this.ppv = (double)numTP / (double)this.numTrials; } @SuppressWarnings("cast") private void fullEstimate() { for (int i = 0; i < this.seqToName.size(); i++) { String id = this.seqToName.get(i); for (int j = i+1; j < this.seqToName.size(); j++) { String id2 = this.seqToName.get(j); if (id == null || id2 == null) { continue; } HashSet<String> matches = getSequenceMatches(id, 0); if (!overlapExists(id, id2)) { if (!matches.contains(id2)) { this.tn++; } else if (getOverlapSize(id, id2) > this.minOvlLen) { this.fn++; } } else { if (matches.contains(id2)) { this.tp++; } else { if (computeDP(id, id2)) { this.tp++; } else { this.fp++; } } } } } this.ppv = (double) this.tp / ((double)this.tp+(double)this.fp); } }
package com.speedledger.measure.jenkins; import java.util.Map; import java.util.Calendar; /** * Jenkins build. */ public class BuildData { // ISO 8601 date format public transient static final DateFormat DATE_FORMATTER = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssZ"); } public class Build { private String timestamp; private int number; private String jobName; private String result; private long startTime; private long duration; private Map<String, String> environment; public Build() { } public Build(String timestamp, int number, String jobName, String result, long startTime, long duration, Map<String, String> environment) { this.timestamp = timestamp; this.number = number; this.jobName = jobName; this.result = result; this.startTime = startTime; this.duration = duration; this.environment = environment; } public String getJobName() { return jobName; } public void setJobName(String jobName) { this.jobName = jobName; } public int getNumber() { return number; } public void setNumber(int number) { this.number = number; } public String getResult() { return result; } public void setResult(String result) { this.result = result; } public long getStartTime() { return startTime; } public void setStartTime(long startTime) { this.startTime = startTime; } public long getDuration() { return duration; } public void setDuration(long duration) { this.duration = duration; } public Map<String, String> getEnvironment() { return environment; } public void setEnvironment(Map<String, String> environment) { this.environment = environment; } public void setTimestamp(Calendar timestamp) { this.timestamp = DATE_FORMATTER.format(timestamp.getTime()); } @Override public String toString() { return "Build{" + "@timestamp" + timestamp + ", number=" + number + ", jobName='" + jobName + '\'' + ", result='" + result + '\'' + ", startTime=" + startTime + ", duration=" + duration + ", environment=" + environment + '}'; } }
package com.speedledger.measure.jenkins; import java.util.Map; /** * Jenkins build. */ public class Build { private int number; private String jobName; private String result; private long startTime; private long duration; private Map<String, String> environment; private Map<Object, Object> systemProperties; public Build() { } public Build(int number, String jobName, String result, long startTime, long duration, Map<String, String> environment) { this.number = number; this.jobName = jobName; this.result = result; this.startTime = startTime; this.duration = duration; this.environment = environment; this.systemProperties = systemProperties; this.timestamp = timestamp } public String getJobName() { return jobName; } public void setJobName(String jobName) { this.jobName = jobName; } public int getNumber() { return number; } public void setNumber(int number) { this.number = number; } public String getResult() { return result; } public void setResult(String result) { this.result = result; } public long getStartTime() { return startTime; } public void setStartTime(long startTime) { this.startTime = startTime; } public long getDuration() { return duration; } public void setDuration(long duration) { this.duration = duration; } public Map<String, String> getEnvironment() { return environment; } public void setEnvironment(Map<String, String> environment) { this.environment = environment; } public Map<Object, Object> getSystemProperties() { return systemProperties; } public void setSystemProperties(Map<Object, Object> systemProperties) { this.systemProperties = systemProperties; } @Override public String toString() { return "Build{" + "number=" + number + ", jobName='" + jobName + '\'' + ", result='" + result + '\'' + ", startTime=" + startTime + ", duration=" + duration + ", environment=" + environment + '}'; } }
package com.tinkerpop.gremlin.pipes; import com.tinkerpop.gremlin.pipes.util.Table; import com.tinkerpop.pipes.AbstractPipe; import com.tinkerpop.pipes.sideeffect.SideEffectPipe; import com.tinkerpop.pipes.util.AsPipe; import groovy.lang.Closure; import java.util.ArrayList; import java.util.Collection; import java.util.List; public class TablePipe<S> extends AbstractPipe<S, S> implements SideEffectPipe<S, Table> { private Table table; private final Closure[] closures; private int currentClosure; private final List<AsPipe> asPipes = new ArrayList<AsPipe>(); private final boolean doClosures; public TablePipe(final Table table, final Collection<String> columnNames, final GremlinPipeline pipeline, final Closure... closures) { this.table = table; this.closures = closures; if (this.doClosures = this.closures.length > 0) currentClosure = 0; final List<String> tempNames = new ArrayList<String>(); for (final AsPipe asPipe : (List<AsPipe>) pipeline.getAsPipes()) { final String columnName = asPipe.getName(); if (null == columnNames || columnNames.contains(columnName)) { tempNames.add(columnName); this.asPipes.add(asPipe); } } if (tempNames.size() > 0) table.setColumnNames(tempNames.toArray(new String[tempNames.size()])); } public Table getSideEffect() { return this.table; } public S processNextStart() { final S s = this.starts.next(); final List row = new ArrayList(); for (final AsPipe asPipe : this.asPipes) { if (doClosures) { row.add(this.closures[currentClosure++ % closures.length].call(asPipe.getCurrentEnd())); } else { row.add(asPipe.getCurrentEnd()); } } this.table.addRow(row); return s; } public void reset() { this.table = new Table(); this.currentClosure = 0; super.reset(); } }
package com.xing.qa.selenium.grid.hub; import org.json.JSONException; import org.json.JSONObject; import org.openqa.grid.internal.Registry; import org.openqa.grid.internal.RemoteProxy; import org.openqa.grid.web.Hub; import org.openqa.grid.web.servlet.RegistryBasedServlet; import org.openqa.selenium.Capabilities; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.io.InputStream; import java.io.Writer; import java.util.*; import java.util.logging.Level; import java.util.logging.Logger; /** * Console information nad more as JSON * * @author Jens Hausherr (jens.hausherr@xing.com) */ public class Console extends RegistryBasedServlet { private final Logger log = Logger.getLogger(getClass().getName()); private String coreVersion; private String coreRevision; public Console() { this(null); } public Console(Registry registry) { super(registry); getVersion(); } @Override protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { try { if ("/requests".equals(req.getPathInfo())) { sendJson(pendingRequests(), req, resp); } else { sendJson(status(), req, resp); } } catch (JSONException je) { resp.setContentType("application/json"); resp.setCharacterEncoding("UTF-8"); resp.setStatus(500); JSONObject error = new JSONObject(); try { error.put("message", je.getMessage()); error.put("location", je.getStackTrace()); error.write(resp.getWriter()); } catch (JSONException e1) { log.log(Level.WARNING, "Failed to write error response", e1); } } } protected void sendJson(JSONObject jo, HttpServletRequest req, HttpServletResponse resp) { resp.setContentType("application/json"); resp.setCharacterEncoding("UTF-8"); resp.setStatus(200); Writer w = null; try { w = resp.getWriter(); jo.write(w); } catch (IOException e) { log.log(Level.WARNING, "Error writing response", e); } catch (JSONException e) { log.log(Level.WARNING, "Failed to serialize JSON response", e); } } protected JSONObject pendingRequests() throws JSONException { JSONObject pending = new JSONObject(); int p = getRegistry().getNewSessionRequestCount(); int to = getRegistry().getNewSessionWaitTimeout(); List<Map<String,?>> desired; if (p > 0) { desired = new ArrayList<Map<String, ?>>(); for (Capabilities c: getRegistry().getDesiredCapabilities()) { desired.add(c.asMap()); } } else { desired = Collections.emptyList(); } pending.put("pending", p); pending.put("requested_capabilities", desired); pending.put("timeout", to); return pending; } protected JSONObject status() throws JSONException { JSONObject status = new JSONObject(); Hub h = getRegistry().getHub(); List<JSONObject> nodes = new ArrayList<JSONObject>(); for (RemoteProxy proxy : getRegistry().getAllProxies()) { JSONRenderer beta = new WebProxyJsonRenderer(proxy); nodes.add(beta.render()); } status.put("version", coreVersion + coreRevision); status.put("configuration", getRegistry().getConfiguration().getAllParams()); status.put("host", h.getHost()); status.put("port", h.getPort()); status.put("registration_url", h.getRegistrationURL()); status.put("nodes", nodes); status.put("requests", pendingRequests()); return status; } private void getVersion() { final Properties p = new Properties(); InputStream stream = Thread.currentThread().getContextClassLoader().getResourceAsStream("VERSION.txt"); if (stream == null) { log.severe("Couldn't determine version number"); return; } try { p.load(stream); } catch (IOException e) { log.severe("Cannot load version from VERSION.txt" + e.getMessage()); } coreVersion = p.getProperty("selenium.core.version"); coreRevision = p.getProperty("selenium.core.revision"); if (coreVersion == null) { log.severe("Cannot load selenium.core.version from VERSION.txt"); } } }
package de.alpharogroup.io; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.OutputStream; import java.io.Serializable; import java.nio.charset.Charset; import lombok.Setter; /** * The class {@link StringOutputStream}. */ public class StringOutputStream extends OutputStream implements Serializable { /** * The serialVersionUID. */ private static final long serialVersionUID = 1L; /** The buffer. */ private final ByteArrayOutputStream byteBuffer = new ByteArrayOutputStream(); /** The charset. */ @Setter private Charset charset; /** * {@inheritDoc} */ @Override public void close() throws IOException { byteBuffer.close(); } /** * {@inheritDoc} */ @Override public String toString() { return new String(byteBuffer.toByteArray(), getCharset()); } /** * {@inheritDoc} */ @Override public void write(final byte[] b) throws IOException { byteBuffer.write(b); } /** * {@inheritDoc} */ @Override public void write(final byte[] b, final int off, final int len) { byteBuffer.write(b, off, len); } /** * {@inheritDoc} */ @Override public void write(final int b) { byteBuffer.write(b); } /** * Gets the charset that is used for write in this {@link StringOutputStream} object. * Note: if not set the default charset of "UTF-8" will be taken. * * @return the charset that is used for write in this {@link StringOutputStream} object. */ public Charset getCharset() { if(charset == null) { charset = Charset.forName("UTF-8"); } return charset; } /** * Write the given {@link String} object to this {@link StringOutputStream} object. * * @param string the string to write. * @throws IOException Signals that an I/O exception has occurred. */ public void write(final String value) throws IOException { write(value.getBytes()); } }
package edu.wright.hendrix11.c3; import org.apache.commons.lang3.StringUtils; import javax.faces.component.UIComponent; import javax.faces.context.FacesContext; import javax.faces.context.ResponseWriter; import javax.faces.render.FacesRenderer; import javax.faces.render.Renderer; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.logging.Logger; /** * @author Joe Hendrix */ @FacesRenderer(rendererType = ChartComponent.DEFAULT_RENDERER, componentFamily = ChartComponent.COMPONENT_FAMILY) public class ChartRenderer extends Renderer { private static final Logger LOG = Logger.getLogger(ChartRenderer.class.getName()); @Override public void encodeBegin(FacesContext context, UIComponent component) throws IOException { ChartComponent chart = (ChartComponent) component; ChartModel model = chart.getChartModel(); ResponseWriter writer = context.getResponseWriter(); writer.startElement("div", null); writer.writeAttribute("id", chart.getId(), "id"); if ( chart.getStyle() != null ) { writer.writeAttribute("style", chart.getStyle(), "style"); } if ( chart.getStyleClass() != null ) { writer.writeAttribute("class", chart.getStyleClass(), "styleClass"); } writer.endElement("div"); encodeScript(chart, model, writer); } private void encodeScript(ChartComponent chart, ChartModel model, ResponseWriter writer) throws IOException { writer.startElement("script", null); writer.writeAttribute("type", "text/javascript", null); writer.write("var chart = c3.generate({"); writer.write("bindto:' writer.write(chart.getId()); writer.write("',"); encodeData(chart, model, writer); encodeColors(model, writer); encodeAxis(chart, model, writer); encodeLegend(chart, writer); encodeGrid(chart, writer); writer.write("});"); writer.endElement("script"); } private void encodeColors(ChartModel model, ResponseWriter writer) throws IOException { if(model.getColors() != null && !model.getColors().isEmpty()) { writer.write(",color:{pattern:['"); writer.write(StringUtils.join(model.getColors(), "','")); writer.write("']}"); } } private void encodeLegend(ChartComponent chart, ResponseWriter writer) throws IOException { if ( chart.getShowLegend() != null || chart.getLegendPosition() != null ) { writer.write(",legend:{"); if ( chart.getShowLegend() != null ) { writer.write("show:"); writer.write(chart.getShowLegend().toString()); } if ( chart.getShowLegend() != null && chart.getLegendPosition() != null ) { writer.write(","); } if ( chart.getLegendPosition() != null ) { writer.write("position:'"); writer.write(chart.getLegendPosition()); writer.write("'"); } writer.write("}"); } } private void encodeGrid(ChartComponent chart, ResponseWriter writer) throws IOException { if ( chart.getGridX() != null || chart.getGridY() != null ) { writer.write(",grid:{"); if ( chart.getGridX() != null ) { writer.write("x:{show:"); writer.write(chart.getGridX().toString()); writer.write("}"); } if ( chart.getGridX() != null && chart.getGridY() != null ) { writer.write(","); } if ( chart.getGridY() != null ) { writer.write("y:{show:"); writer.write(chart.getGridY().toString()); writer.write("}"); } writer.write("}"); } } private void encodeAxis(ChartComponent chart, ChartModel model, ResponseWriter writer) throws IOException { if ( model.getxAxis() != null || model.getyAxis() != null ) { writer.write(",axis:{"); if ( model.getxAxis() != null ) { writer.write(model.getxAxis().toString()); } if ( model.getxAxis() != null && model.getyAxis() != null ) { writer.write(","); } if ( model.getyAxis() != null ) { writer.write(model.getyAxis().toString()); } writer.write("}"); } } private void encodeData(ChartComponent chart, ChartModel model, ResponseWriter writer) throws IOException { writer.write("data:{x:'x',"); if ( model.hasArrayData() ) { encodeArrayData(chart, model, writer); } else { writer.write("columns:[['x',"); writer.write("'"); writer.write(StringUtils.join(model.getAxisLabels(), "','")); writer.write("'"); writer.write("],['"); if ( model.getxAxis() != null && model.getxAxis().getLabel() != null && model.getxAxis().getLabel().hasText() ) { writer.write(model.getxAxis().getLabel().getText()); } else { writer.write("data"); } writer.write("',"); writer.write(StringUtils.join(model.getData().values(), ',')); writer.write("]]"); } if ( chart.getType() != null ) { writer.write(",type:'"); writer.write(chart.getType()); writer.write("'"); } writer.write("}"); } private void encodeArrayData(ChartComponent chart, ChartModel model, ResponseWriter writer) throws IOException { List<String> barLabels = model.getBarLabels(); if ( barLabels == null || barLabels.isEmpty() ) { barLabels = new ArrayList<>(); for ( int i = 0; i < model.getArrayData().values().iterator().next().length; i++ ) { barLabels.add("data" + i); } } List<StringBuilder> data = new ArrayList<>(); for ( Object label : model.getAxisLabels() ) { StringBuilder sb = new StringBuilder(); sb.append("["); sb.append("'"); sb.append(label); sb.append("','"); sb.append(StringUtils.join(model.getArrayData(label), "','")); sb.append("']"); data.add(sb); } writer.write("rows:[['x','"); writer.write(StringUtils.join(barLabels, "','")); writer.write("'],"); writer.write(StringUtils.join(data, ",")); writer.write("]"); } }
package gui.sub_controllers; import graph.SequenceGraph; import gui.GraphDrawer; import javafx.animation.Animation; import javafx.animation.KeyFrame; import javafx.animation.Timeline; import javafx.concurrent.Task; import javafx.event.ActionEvent; import javafx.event.EventHandler; import javafx.scene.Node; import javafx.scene.control.Button; import javafx.scene.input.KeyCode; import javafx.scene.input.KeyEvent; import javafx.scene.input.MouseEvent; import javafx.util.Duration; public class PanningController { /** * The speed at which to pan. */ private static final double PANN_FACTOR = 0.005; /** * The the threshold to update the subGraph. */ private static final int RENDER_THRESHOLD = 1000; /** * The amount of nodes to render. */ static final int RENDER_RANGE = 2000; /** * The amount of nodes to shift. */ private static final int RENDER_SHIFT = 1000; private GraphDrawer drawer; private Button rightPannButton; private Button leftPannButton; private Timeline timelineRight; private Timeline timelineLeft; private boolean updating; /** * Constructor for the panning controller. * @param drawer - the drawer. * @param leftPannButton - the pan left button. * @param rightPannButton - the pan right button, */ public PanningController(GraphDrawer drawer, Button leftPannButton, Button rightPannButton) { this.drawer = drawer; this.leftPannButton = leftPannButton; this.rightPannButton = rightPannButton; initializeTimer(); initializeButtons(); } /** * Timer for panning. */ private void initializeTimer() { timelineRight = new Timeline(new KeyFrame(Duration.millis(10), new EventHandler<ActionEvent>() { @Override public void handle(ActionEvent event) { panRight(); } })); timelineRight.setCycleCount(Animation.INDEFINITE); timelineLeft = new Timeline(new KeyFrame(Duration.millis(10), new EventHandler<ActionEvent>() { @Override public void handle(ActionEvent event) { panLeft(); } })); timelineLeft.setCycleCount(Animation.INDEFINITE); } /** * initialize function for the pan buttons. */ private void initializeButtons() { rightPannButton.addEventHandler(MouseEvent.MOUSE_PRESSED, new EventHandler<MouseEvent>() { @Override public void handle(MouseEvent event) { timelineRight.play(); } }); rightPannButton.addEventHandler(MouseEvent.MOUSE_RELEASED, new EventHandler<MouseEvent>() { @Override public void handle(MouseEvent event) { timelineRight.pause(); } }); leftPannButton.addEventHandler(MouseEvent.MOUSE_PRESSED, new EventHandler<MouseEvent>() { @Override public void handle(MouseEvent event) { timelineLeft.play(); } }); leftPannButton.addEventHandler(MouseEvent.MOUSE_RELEASED, new EventHandler<MouseEvent>() { @Override public void handle(MouseEvent event) { timelineLeft.pause(); } }); } /** * listener for key presses. * @param canvasPanel - the canvas which to apply the listener to, */ public void initializeKeys(Node canvasPanel) { canvasPanel.requestFocus(); canvasPanel.addEventHandler(KeyEvent.KEY_PRESSED, new EventHandler<KeyEvent>() { @Override public void handle(KeyEvent event) { if (event.getCode() == KeyCode.RIGHT) { timelineRight.play(); } else if (event.getCode() == KeyCode.LEFT) { timelineLeft.play(); } event.consume(); } }); canvasPanel.addEventHandler(KeyEvent.KEY_RELEASED, new EventHandler<KeyEvent>() { @Override public void handle(KeyEvent event) { if (event.getCode() == KeyCode.RIGHT) { timelineRight.stop(); } else if (event.getCode() == KeyCode.LEFT) { timelineLeft.stop(); } event.consume(); } }); } /** * Pan right method. */ private void panRight() { if (!updating) { if (drawer.getGraph().getRightBoundIndex() < drawer.getGraph().getFullGraphRightBoundIndex()) { if (drawer.getxDifference() + drawer.getZoomLevel() + RENDER_THRESHOLD > drawer.getRange()) { updating = true; new Thread(new Task<Integer>() { @Override protected Integer call() throws Exception { System.out.println("OLD: getRightBoundID: " + drawer.getGraph().getRightBoundID() + ", getFullGraphRightBoundID: " + drawer.getGraph().getFullGraphRightBoundID() + ", getCentreNodeID: " + drawer.getGraph().getCenterNodeID()); SequenceGraph newGraph = drawer.getGraph().copy(); newGraph.createSubGraph(drawer.getGraph().getCenterNodeID() + RENDER_SHIFT, RENDER_RANGE, drawer.getGraph().getPartPath()); int leftMostID = drawer.getMostLeftNode().getId(); drawer.setGraph(newGraph); drawer.initGraph(); //drawer.setxDifference(drawer.getColumnWidth(drawer.getGraph().getNode(leftMostID).getColumn())); drawer.moveShapes(drawer.getColumnWidth(drawer.getGraph().getNode(leftMostID).getColumn())); System.out.println("NEW: getRightBoundID: " + drawer.getGraph().getRightBoundID() + ", getFullGraphRightBoundID: " + drawer.getGraph().getFullGraphRightBoundID() + ", getCentreNodeID: " + drawer.getGraph().getCenterNodeID()); updating = false; return null; } }).start(); } } } if (drawer.getGraph().getNodes().containsKey(drawer.getGraph().getFullGraphRightBoundID())) { if (drawer.getxDifference() + drawer.getZoomLevel() > drawer.getColumnWidth(drawer.getGraph().getColumns().size())) { return; } } drawer.moveShapes(drawer.getxDifference() + drawer.getZoomLevel() * PANN_FACTOR); } /** * Pan left method. */ private void panLeft() { if (!updating) { if (drawer.getGraph().getLeftBoundIndex() > drawer.getGraph().getFullGraphLeftBoundIndex()) { if (drawer.getxDifference() - RENDER_THRESHOLD < 0) { updating = true; new Thread(new Task<Integer>() { @Override protected Integer call() throws Exception { System.out.println("OLD: getLeftBoundID: " + drawer.getGraph().getLeftBoundID() + ", getFullGraphLeftBoundID: " + drawer.getGraph().getFullGraphLeftBoundID() + ", getCentreNodeID: " + drawer.getGraph().getCenterNodeID()); SequenceGraph newGraph = drawer.getGraph().copy(); newGraph.createSubGraph(drawer.getGraph().getCenterNodeID() - RENDER_SHIFT, RENDER_RANGE, drawer.getGraph().getPartPath()); int leftMostID = drawer.getMostLeftNode().getId(); drawer.setGraph(newGraph); drawer.initGraph(); //drawer.setxDifference(drawer.getColumnWidth(drawer.getGraph().getNode(leftMostID).getColumn())); drawer.moveShapes(drawer.getColumnWidth(drawer.getGraph().getNode(leftMostID).getColumn())); System.out.println("NEW: getLeftBoundID: " + drawer.getGraph().getLeftBoundID() + ", getFullGraphLeftBoundID: " + drawer.getGraph().getFullGraphLeftBoundID() + ", getCentreNodeID: " + drawer.getGraph().getCenterNodeID()); updating = false; return null; } }).start(); } } } if (drawer.getGraph().getNodes().containsKey(drawer.getGraph().getFullGraphLeftBoundID())) { if (drawer.getxDifference() < 0) { return; } } drawer.moveShapes(drawer.getxDifference() - drawer.getZoomLevel() * PANN_FACTOR); } }
package gunn.brewski.app.data; import android.content.ContentResolver; import android.content.ContentUris; import android.net.Uri; import android.provider.BaseColumns; import android.text.format.Time; public class BrewskiContract { // The "Content authority" is a name for the entire content provider, similar to the // relationship between a domain name and its website. A convenient string to use for the // content authority is the package name for the app, which is guaranteed to be unique on the // device. public static final String CONTENT_AUTHORITY = "com.example.android.sunshine.app"; // Use CONTENT_AUTHORITY to create the base of all URI's which apps will use to contact // the content provider. public static final Uri BASE_CONTENT_URI = Uri.parse("content://" + CONTENT_AUTHORITY); // Possible paths (appended to base content URI for possible URI's) // For instance, content://com.example.android.sunshine.app/weather/ is a valid path for // looking at weather data. content://com.example.android.sunshine.app/givemeroot/ will fail, // as the ContentProvider hasn't been given any information on what to do with "givemeroot". // At least, let's hope not. Don't be that dev, reader. Don't be that dev. public static final String PATH_WEATHER = "weather"; public static final String PATH_LOCATION = "location"; public static final String PATH_PROFILE = "profile"; public static final String PATH_CATEGORY = "category"; public static final String PATH_BEER = "beer"; public static final String PATH_BREWERY = "brewery"; public static final String PATH_STYLE = "style"; public static final String PATH_X_ANALYSIS = "x_analysis"; // To make it easy to query for the exact date, we normalize all dates that go into // the database to the start of the the Julian day at UTC. public static long normalizeDate(long startDate) { // normalize the start date to the beginning of the (UTC) day Time time = new Time(); time.set(startDate); int julianDay = Time.getJulianDay(startDate, time.gmtoff); return time.setJulianDay(julianDay); } public static final class ProfileEntry implements BaseColumns { public static final Uri CONTENT_URI = BASE_CONTENT_URI.buildUpon().appendPath(PATH_PROFILE).build(); public static final String CONTENT_TYPE = ContentResolver.CURSOR_DIR_BASE_TYPE + "/" + CONTENT_AUTHORITY + "/" + PATH_PROFILE; public static final String CONTENT_ITEM_TYPE = ContentResolver.CURSOR_ITEM_BASE_TYPE + "/" + CONTENT_AUTHORITY + "/" + PATH_PROFILE; // Table name public static final String TABLE_NAME = "profile"; public static Uri buildProfileUri(long id) { return ContentUris.withAppendedId(CONTENT_URI, id); } } public static final class CategoryEntry implements BaseColumns { public static final Uri CONTENT_URI = BASE_CONTENT_URI.buildUpon().appendPath(PATH_CATEGORY).build(); public static final String CONTENT_TYPE = ContentResolver.CURSOR_DIR_BASE_TYPE + "/" + CONTENT_AUTHORITY + "/" + PATH_CATEGORY; public static final String CONTENT_ITEM_TYPE = ContentResolver.CURSOR_ITEM_BASE_TYPE + "/" + CONTENT_AUTHORITY + "/" + PATH_CATEGORY; // Table name public static final String TABLE_NAME = "category"; public static Uri buildCategoryUri(long id) { return ContentUris.withAppendedId(CONTENT_URI, id); } } public static final class BeerEntry implements BaseColumns { public static final Uri CONTENT_URI = BASE_CONTENT_URI.buildUpon().appendPath(PATH_BEER).build(); public static final String CONTENT_TYPE = ContentResolver.CURSOR_DIR_BASE_TYPE + "/" + CONTENT_AUTHORITY + "/" + PATH_BEER; public static final String CONTENT_ITEM_TYPE = ContentResolver.CURSOR_ITEM_BASE_TYPE + "/" + CONTENT_AUTHORITY + "/" + PATH_BEER; // Table name public static final String TABLE_NAME = "beer"; public static Uri buildBeerUri(long id) { return ContentUris.withAppendedId(CONTENT_URI, id); } } public static final class BreweryEntry implements BaseColumns { public static final Uri CONTENT_URI = BASE_CONTENT_URI.buildUpon().appendPath(PATH_BREWERY).build(); public static final String CONTENT_TYPE = ContentResolver.CURSOR_DIR_BASE_TYPE + "/" + CONTENT_AUTHORITY + "/" + PATH_BREWERY; public static final String CONTENT_ITEM_TYPE = ContentResolver.CURSOR_ITEM_BASE_TYPE + "/" + CONTENT_AUTHORITY + "/" + PATH_BREWERY; // Table name public static final String TABLE_NAME = "brewery"; public static Uri buildBreweryUri(long id) { return ContentUris.withAppendedId(CONTENT_URI, id); } } public static final class StyleEntry implements BaseColumns { public static final Uri CONTENT_URI = BASE_CONTENT_URI.buildUpon().appendPath(PATH_STYLE).build(); public static final String CONTENT_TYPE = ContentResolver.CURSOR_DIR_BASE_TYPE + "/" + CONTENT_AUTHORITY + "/" + PATH_STYLE; public static final String CONTENT_ITEM_TYPE = ContentResolver.CURSOR_ITEM_BASE_TYPE + "/" + CONTENT_AUTHORITY + "/" + PATH_STYLE; // Table name public static final String TABLE_NAME = "style"; public static Uri buildStyleUri(long id) { return ContentUris.withAppendedId(CONTENT_URI, id); } } public static final class XAnalysisEntry implements BaseColumns { public static final Uri CONTENT_URI = BASE_CONTENT_URI.buildUpon().appendPath(PATH_X_ANALYSIS).build(); public static final String CONTENT_TYPE = ContentResolver.CURSOR_DIR_BASE_TYPE + "/" + CONTENT_AUTHORITY + "/" + PATH_X_ANALYSIS; public static final String CONTENT_ITEM_TYPE = ContentResolver.CURSOR_ITEM_BASE_TYPE + "/" + CONTENT_AUTHORITY + "/" + PATH_X_ANALYSIS; // Table name public static final String TABLE_NAME = "x_analysis"; public static Uri buildXAnalysisUri(long id) { return ContentUris.withAppendedId(CONTENT_URI, id); } } /* Inner class that defines the table contents of the location table */ public static final class LocationEntry implements BaseColumns { public static final Uri CONTENT_URI = BASE_CONTENT_URI.buildUpon().appendPath(PATH_LOCATION).build(); public static final String CONTENT_TYPE = ContentResolver.CURSOR_DIR_BASE_TYPE + "/" + CONTENT_AUTHORITY + "/" + PATH_LOCATION; public static final String CONTENT_ITEM_TYPE = ContentResolver.CURSOR_ITEM_BASE_TYPE + "/" + CONTENT_AUTHORITY + "/" + PATH_LOCATION; // Table name public static final String TABLE_NAME = "location"; // The location setting string is what will be sent to openweathermap // as the location query. public static final String COLUMN_LOCATION_SETTING = "location_setting"; // Human readable location string, provided by the API. Because for styling, // "Mountain View" is more recognizable than 94043. public static final String COLUMN_CITY_NAME = "city_name"; // In order to uniquely pinpoint the location on the map when we launch the // map intent, we store the latitude and longitude as returned by openweathermap. public static final String COLUMN_COORD_LAT = "coord_lat"; public static final String COLUMN_COORD_LONG = "coord_long"; public static Uri buildLocationUri(long id) { return ContentUris.withAppendedId(CONTENT_URI, id); } } /* Inner class that defines the table contents of the weather table */ public static final class WeatherEntry implements BaseColumns { public static final Uri CONTENT_URI = BASE_CONTENT_URI.buildUpon().appendPath(PATH_WEATHER).build(); public static final String CONTENT_TYPE = ContentResolver.CURSOR_DIR_BASE_TYPE + "/" + CONTENT_AUTHORITY + "/" + PATH_WEATHER; public static final String CONTENT_ITEM_TYPE = ContentResolver.CURSOR_ITEM_BASE_TYPE + "/" + CONTENT_AUTHORITY + "/" + PATH_WEATHER; public static final String TABLE_NAME = "weather"; // Column with the foreign key into the location table. public static final String COLUMN_LOC_KEY = "location_id"; // Date, stored as long in milliseconds since the epoch public static final String COLUMN_DATE = "date"; // Weather id as returned by API, to identify the icon to be used public static final String COLUMN_WEATHER_ID = "weather_id"; // Short description and long description of the weather, as provided by API. // e.g "clear" vs "sky is clear". public static final String COLUMN_SHORT_DESC = "short_desc"; // Min and max temperatures for the day (stored as floats) public static final String COLUMN_MIN_TEMP = "min"; public static final String COLUMN_MAX_TEMP = "max"; // Humidity is stored as a float representing percentage public static final String COLUMN_HUMIDITY = "humidity"; // Humidity is stored as a float representing percentage public static final String COLUMN_PRESSURE = "pressure"; // Windspeed is stored as a float representing windspeed mph public static final String COLUMN_WIND_SPEED = "wind"; // Degrees are meteorological degrees (e.g, 0 is north, 180 is south). Stored as floats. public static final String COLUMN_DEGREES = "degrees"; public static Uri buildWeatherUri(long id) { return ContentUris.withAppendedId(CONTENT_URI, id); } /* Student: This is the buildWeatherLocation function you filled in. */ public static Uri buildWeatherLocation(String locationSetting) { return CONTENT_URI.buildUpon().appendPath(locationSetting).build(); } public static Uri buildWeatherLocationWithStartDate( String locationSetting, long startDate) { long normalizedDate = normalizeDate(startDate); return CONTENT_URI.buildUpon().appendPath(locationSetting) .appendQueryParameter(COLUMN_DATE, Long.toString(normalizedDate)).build(); } public static Uri buildWeatherLocationWithDate(String locationSetting, long date) { return CONTENT_URI.buildUpon().appendPath(locationSetting) .appendPath(Long.toString(normalizeDate(date))).build(); } public static String getLocationSettingFromUri(Uri uri) { return uri.getPathSegments().get(1); } public static long getDateFromUri(Uri uri) { return Long.parseLong(uri.getPathSegments().get(2)); } public static long getStartDateFromUri(Uri uri) { String dateString = uri.getQueryParameter(COLUMN_DATE); if (null != dateString && dateString.length() > 0) return Long.parseLong(dateString); else return 0; } } }
package info.iconmaster.typhon.model; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.stream.Collectors; import info.iconmaster.typhon.TyphonInput; import info.iconmaster.typhon.antlr.TyphonParser.ExprContext; import info.iconmaster.typhon.antlr.TyphonParser.StatContext; import info.iconmaster.typhon.antlr.TyphonParser.TypeContext; import info.iconmaster.typhon.compiler.CodeBlock; import info.iconmaster.typhon.types.TemplateType; import info.iconmaster.typhon.types.Type; import info.iconmaster.typhon.types.TypeRef; import info.iconmaster.typhon.util.SourceInfo; /** * This represents a Typhon function. * It has parameters, templates, and zero or more return types. * * @author iconmaster * */ public class Function extends TyphonModelEntity implements MemberAccess { /** * The name of this function. * Must be a valid Typhon identifier. * May be null (if this function is anonymous, for example). */ private String name; /** * The return types of this function. */ private List<TypeRef> retType = new ArrayList<>(); /** * The template parameters for this function. */ private List<TemplateType> template = new ArrayList<>(); /** * The parameters for this function. */ private List<Parameter> params = new ArrayList<>(); /** * The code that is executed when this function is called. */ private CodeBlock code; /** * The ANTLR rule representing the return types. */ private List<TypeContext> rawRetType; /** * The form of the supplied function, as it was declared. * Only set if this function derives from source code. */ private Form form; /** * The ANTLR rule representing the function's code, if the function is of BLOCK form. * Check the form to see what this list contains. */ private List<?> rawCode; /** * This represents how the function was declared. * * @author iconmaster */ public static enum Form { /** * Block ('{}') form. * rawCode should be a {@link List}<{@link StatContext}>. */ BLOCK, /** * Expression ('=>') form. * rawCode should be a {@link List}<{@link ExprContext}>. */ EXPR, /** * Stub (';') form. * rawCode should be null. */ STUB, } public Function(TyphonInput input, String name) { super(input); this.name = name; } public Function(TyphonInput input, SourceInfo source, String name) { super(input, source); this.name = name; } /** * @return The name of this function. May be null (if this function is anonymous, for example). */ public String getName() { return name; } /** * @return The return types of this function. */ public List<TypeRef> getRetType() { return retType; } /** * @return The template parameters for this function. */ public List<TemplateType> getTemplate() { getTypePackage(); return template; } /** * @return The parameters for this function. */ public List<Parameter> getParams() { return params; } /** * @return The code that is executed when this function is called. */ public CodeBlock getCode() { return code; } /** * @param code The new code that is executed when this function is called. */ public void setCode(CodeBlock code) { this.code = code; } /** * @return The ANTLR rule representing the return types. */ public List<TypeContext> getRawRetType() { return rawRetType; } /** * @return The form of the function as it was declared. */ public Form getForm() { return form; } /** * @return The ANTLR rule representing the function's code. Check the form to see what this list contains. */ public List<?> getRawCode() { return rawCode; } /** * Sets the raw ANTLR data for this function. * * @param rawRetType The ANTLR rule representing the return types. * @param form The form of the function as it was declared. * @param rawCode The ANTLR rule representing the function's code. See {@link Form} for details. */ public void setRawData(List<TypeContext> rawRetType, Form form, List<?> rawCode) { super.setRawData(); this.rawRetType = rawRetType; this.form = form; this.rawCode = rawCode; } /** * The package this function belongs to. */ private Package parent; /** * @return The package this function belongs to. */ public Package getParent() { return parent; } /** * NOTE: Don't call this, call <tt>{@link Package}.addFunction()</tt> instead. * * @param parent The new package this function belongs to. */ public void setParent(Package parent) { this.parent = parent; } @Override public List<TemplateType> getMemberTemplate() { return getTemplate(); } /** * Constructs a library function. * * @param tni * @param name * @param args * @param retTypes */ public Function(TyphonInput tni, String name, TemplateType[] template, Parameter[] args, TypeRef[] retTypes) { this(tni, name); getTemplate().addAll(Arrays.asList(template)); getParams().addAll(Arrays.asList(args)); getRetType().addAll(Arrays.asList(retTypes)); markAsLibrary(); } /** * Constructs a library function. * * @param tni * @param name * @param args * @param retTypes */ public Function(TyphonInput tni, String name, TemplateType[] template, Parameter[] args, Type[] retTypes) { this(tni, name); getTemplate().addAll(Arrays.asList(template)); getParams().addAll(Arrays.asList(args)); getRetType().addAll(Arrays.asList(retTypes).stream().map((a)->new TypeRef(a)).collect(Collectors.toList())); markAsLibrary(); } @Override public MemberAccess getMemberParent() { return getParent(); } /** * @return If this is an instance function: The type this function is part of. If this is a static function: Null. */ public Type getFieldOf() { if (hasAnnot(tni.corePackage.ANNOT_STATIC)) { return null; } MemberAccess access = this; while (access != null) { if (access instanceof Type) { return (Type) access; } access = access.getMemberParent(); } return null; } @Override public String toString() { return "Function("+getRetType()+" "+name+getParams()+")"; } public Map<TemplateType, TypeRef> getFuncTemplateMap() { Map<TemplateType, TypeRef> result = new HashMap<>(); for (TemplateType t : getTemplate()) { result.put(t, t.getDefaultValue() == null ? t.getBaseType() : t.getDefaultValue()); } return result; } private Package typePackage; public Package getTypePackage() { if (typePackage != null) { typePackage.getParent().removeSubpackage(typePackage); } typePackage = new Package(source, null, getParent() == null ? tni.corePackage : getParent()) { @Override public MemberAccess getMemberParent() { return Function.this; } }; for (TemplateType t : template) { typePackage.addType(t); } return typePackage; } @Override public List<MemberAccess> getMembers(Map<TemplateType, TypeRef> templateMap) { return getTypePackage().getMembers(templateMap); } public boolean isStatic() { return getFieldOf() == null; } private Map<Type, Function> virtualBases = new HashMap<>(); public Map<Type, Function> getVirtualBases() { return virtualBases; } private Map<Type, Function> virtualOverrides = new HashMap<>(); public Map<Type, Function> getVirtualOverrides() { if (isStatic()) { return null; } return virtualOverrides; } public static void setOverride(Function virtual, Function override) { virtual.getVirtualOverrides().put(override.getFieldOf(), override); override.getVirtualBases().put(virtual.getFieldOf(), virtual); } }
package innovimax.mixthem.arguments; import innovimax.mixthem.io.InputResource; import java.io.File; import java.io.InputStream; import java.io.IOException; import java.nio.file.Files; import java.nio.file.LinkOption; import java.nio.file.Path; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Enumeration; import java.util.EnumMap; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.zip.ZipEntry; import java.util.zip.ZipException; import java.util.zip.ZipFile; /** * <p>Mix-them command line arguments management.</p> * @author Innovimax * @version 1.0 */ public class Arguments { private FileMode fileMode = null; private Rule rule = null; private Map<RuleParam, ParamValue> ruleParams = null; private Set<Integer> selection = null; private final List<InputResource> inputs = new ArrayList<InputResource>(); private void setFileMode(final FileMode fileMode) { this.fileMode = fileMode; } public FileMode getFileMode() { return this.fileMode; } private void setRule(final Rule rule) { this.rule = rule; } public Rule getRule() { return this.rule; } void setRuleParameters(final Map<RuleParam, ParamValue> ruleParams) { this.ruleParams = ruleParams; } public Map<RuleParam, ParamValue> getRuleParameters() { return this.ruleParams; } public Set<Integer> getSelection() { return this.selection; } void addInput(final InputResource input) { this.inputs.add(input); } public List<InputResource> getInputs() { return this.inputs; } public static Arguments checkArguments(final String[] args) throws ArgumentException, IOException, ZipException { final Arguments mixArgs = new Arguments(); int index = 0; // get file mode [char|byte] FileMode fileMode = findFileModeArgument(args, index); if (fileMode != null) { index++; } else { fileMode = FileMode.CHAR; } mixArgs.setFileMode(fileMode); // get rule & parameters Rule rule = findRuleArgument(args, index, fileMode); Map<RuleParam, ParamValue> ruleParams = null; if (rule != null) { index++; ruleParams = findRuleParameters(args, index, rule); index += ruleParams.size(); } else { rule = Rule.ADD; ruleParams = Collections.emptyMap(); } mixArgs.setRule(rule); mixArgs.setRuleParameters(ruleParams); // get selection final Set<Integer> selection = findSelectionArgument(args, index); mixArgs.setSelection(selection); // get input files final String zipOption = findZipOptionArgument(args, index); if (zipOption == null) { final List<File> files = findFilesArgument(args, index); files.stream().forEach(file -> mixArgs.addInput(InputResource.createFile(file))); } else { final ZipFile zipFile = new ZipFile(findZipFileArgument(args, ++index)); final List<InputStream> inputs = extractZipEntries(zipFile); inputs.stream().forEach(input -> mixArgs.addInput(InputResource.createInputStream(input))); } // check input files count vs selection checkFileCount(mixArgs); return mixArgs; } private static FileMode findFileModeArgument(final String[] args, final int index) throws ArgumentException { if (args.length > index) { return FileMode.findByName(args[index]); } return null; } private static Rule findRuleArgument(final String[] args, final int index, final FileMode fileMode) throws ArgumentException { Rule rule = null; if (args.length > index) { final String ruleString = args[index]; if (ruleString.startsWith("-") && !ruleString.startsWith(" rule = Rule.findByName(ruleString.substring(1), fileMode); if (rule == null) { throw new ArgumentException("Rule argument is incorrect: " + ruleString); } } } return rule; } private static Map<RuleParam, ParamValue> findRuleParameters(final String[] args, final int index, final Rule rule) throws ArgumentException { final Map<RuleParam, ParamValue> map = new EnumMap<RuleParam, ParamValue>(RuleParam.class); final Iterator<RuleParam> iterator = rule.getParams().iterator(); if (iterator.hasNext()) { final RuleParam param = iterator.next(); if (args.length > index) { final String arg = args[index]; if (arg.startsWith(" final String paramString = arg.substring(1); try { final ParamValue value = param.createValue(paramString); map.put(param, value); } catch (NumberFormatException e) { throw new ArgumentException("#" + param.getName() + " parameter is incorrect: " + paramString); } } } if (param.isMandatory() && !map.containsKey(param)) { throw new ArgumentException("#" + param.getName() + " parameter is mandatory."); } } return map; } private static Set<Integer> findSelectionArgument(final String[] args, int index) throws ArgumentException { final Set<Integer> selection = new LinkedHashSet<Integer>(); System.out.println(Arrays.toString(args)); System.out.println(Integer.toString(index)); //TODO return selection; } private static List<File> findFilesArgument(final String[] args, int index) throws ArgumentException { final List<File> files = new ArrayList<File>(); while (args.length > index) { final String filepath = args[index++]; final File file = new File(filepath); final Path path = file.toPath(); if (Files.exists(path, LinkOption.NOFOLLOW_LINKS)) { if (Files.isReadable(path)) { files.add(file); } else { throw new ArgumentException("Input file cannot be read: " + filepath); } } else { throw new ArgumentException("Input file not found: " + filepath); } } switch (files.size()) { case 0: throw new ArgumentException("First input file argument missing."); case 1: throw new ArgumentException("Second input file argument missing."); } return files; } private static String findZipOptionArgument(final String[] args, final int index) { if (args.length > index && (args[index].equals("--zip") || args[index].equals("--jar"))) { return args[index].substring(2); } return null; } private static File findZipFileArgument(final String[] args, final int index) throws ArgumentException { File file = null; if (args.length > index) { final String filepath = args[index]; file = new File(filepath); final Path path = file.toPath(); if (Files.exists(path, LinkOption.NOFOLLOW_LINKS)) { if (!Files.isReadable(path)) { throw new ArgumentException("Zip/Jar file cannot be read: " + filepath); } } else { throw new ArgumentException("Zip/Jar file not found: " + filepath); } } else { throw new ArgumentException("Zip/Jar argument missing."); } return file; } private static List<InputStream> extractZipEntries(final ZipFile zipFile) throws ArgumentException, IOException, ZipException { final List<InputStream> inputs = new ArrayList<InputStream>(); final Enumeration entries = zipFile.entries(); while (entries.hasMoreElements()) { ZipEntry entry = (ZipEntry) entries.nextElement(); if (entry.getName().toUpperCase().startsWith("META-INF")) { continue; } inputs.add(zipFile.getInputStream(entry)); } switch (inputs.size()) { case 0: throw new ArgumentException("First input entry missing."); case 1: throw new ArgumentException("Second input entry missing."); } return inputs; } private static void checkFileCount(Arguments mixArgs) throws ArgumentException { switch (mixArgs.getRule()) { case FILE_K: int index = mixArgs.getRuleParameters().get(RuleParam.FILE_INDEX).asInt(); if (index > mixArgs.getInputs().size()) { throw new ArgumentException("#index is greater than input file count."); } break; case ADD: if (mixArgs.getRuleParameters().containsKey(RuleParam.FILE_LIST)) { int[] indexes = mixArgs.getRuleParameters().get(RuleParam.FILE_LIST).asIntArray(); for (int i=0; i < indexes.length; i++) { if (i > mixArgs.getInputs().size()) { throw new ArgumentException("#files contains an index greater than input file count."); } } } } } public static void printUsage() { System.out.println(" "); System.out.println("Usage:"); System.out.println(" "); System.out.println(" mix-them file1 file2... fileN"); System.out.println(" (will generate any file based on file1 and file2 to fileN)"); System.out.println(" "); System.out.println(" mix-them -[rule] file1 file2... fileN"); System.out.println(" (will generate a file based on the rule)"); System.out.println(" "); System.out.println(" Here are the list of rules"); for(Rule rule : Rule.values()) { System.out.print(" - " + rule.getName()); for(RuleParam param : rule.getParams()) { if (param.isMandatory()) { System.out.print(" #" + param.getName()); } else { System.out.print(" [#" + param.getName() + "]"); } } System.out.println(": " + rule.getDescription()); for(RuleParam param : rule.getParams()) { System.out.println(" (#" +param.getName() + " " + param.getComment() + ")"); } } System.out.println(" "); System.out.println(" mix-them --zip zipfile"); System.out.println(" mix-them --jar jarfile"); System.out.println(" (will generate any entry based on zip/jar file first and second to nth entries)"); System.out.println(" "); System.out.println(" mix-them -[rule] --zip zipFile"); System.out.println(" mix-them -[rule] --jar jarFile"); System.out.println(" (will generate a file based on the rule)"); System.out.println(" "); } }
package intellimate.izou.system; import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.core.Appender; import org.apache.logging.log4j.core.Layout; import org.apache.logging.log4j.core.appender.ConsoleAppender; import org.apache.logging.log4j.core.appender.FileAppender; import org.apache.logging.log4j.core.config.AppenderRef; import org.apache.logging.log4j.core.config.Configuration; import org.apache.logging.log4j.core.config.LoggerConfig; import org.apache.logging.log4j.core.layout.PatternLayout; import org.apache.logging.log4j.spi.ExtendedLogger; import org.apache.logging.log4j.spi.LoggerContext; import java.io.File; public class LogController { private static final Logger rootLogger = LogManager.getRootLogger(); //private static final Logger fileLogger = LogManager.getLogger("intellimate.izou"); public LogController() { } public static synchronized ExtendedLogger createFileLogger(String addOnName, String level) { try { LoggerContext ctx = LogManager.getContext(false); Configuration config = ((org.apache.logging.log4j.core.LoggerContext) ctx).getConfiguration(); Layout layout = PatternLayout.createLayout("%d %-5p [%t] %C{10} (%F:%L) - %m%n", config, null, null, true, false, null, null); Appender fileAppender = FileAppender.createAppender("logs" + File.separator + addOnName + ".log", "true", "false", "File", "true", "false", "false", "4000", layout, null, "false", null, config); fileAppender.start(); config.addAppender(fileAppender); Appender consoleAppender = ConsoleAppender.createAppender(layout, null, "SYSTEM_OUT", "console", null, null); consoleAppender.start(); config.addAppender(consoleAppender); AppenderRef fileRef = AppenderRef.createAppenderRef("File", null, null); AppenderRef consoleRef = AppenderRef.createAppenderRef("console", null, null); AppenderRef[] refs = new AppenderRef[]{fileRef, consoleRef}; LoggerConfig loggerConfig = LoggerConfig.createLogger("false", Level.toLevel(level), "org.apache.logging.log4j", "true", refs, null, config, null); loggerConfig.addAppender(fileAppender, null, null); loggerConfig.addAppender(consoleAppender, null, null); config.addLogger("org.apache.logging.log4j", loggerConfig); ((org.apache.logging.log4j.core.LoggerContext) ctx).updateLoggers(); ExtendedLogger logger = ctx.getLogger("org.apache.logging.log4j"); return logger; } catch(Exception e) { e.printStackTrace(); return null; } } public static ExtendedLogger createIzouFileLogger(String level) { try { LoggerContext ctx = LogManager.getContext(false); Configuration config = ((org.apache.logging.log4j.core.LoggerContext) ctx).getConfiguration(); Layout layout = PatternLayout.createLayout("%d %-5p [%t] %C{10} (%F:%L) - %m%n", config, null, null, true, false, null, null); Appender fileAppender = FileAppender.createAppender("logs" + File.separator + "izou.log", "true", "false", "File", "true", "false", "false", "4000", layout, null, "false", null, config); fileAppender.start(); config.addAppender(fileAppender); Appender consoleAppender = ConsoleAppender.createAppender(layout, null, "SYSTEM_OUT", "console", null, null); consoleAppender.start(); config.addAppender(consoleAppender); AppenderRef fileRef = AppenderRef.createAppenderRef("File", null, null); AppenderRef consoleRef = AppenderRef.createAppenderRef("console", null, null); AppenderRef[] refs = new AppenderRef[]{fileRef, consoleRef}; LoggerConfig loggerConfig = LoggerConfig.createLogger("false", Level.toLevel(level), "org.apache.logging.log4j", "true", refs, null, config, null); loggerConfig.addAppender(fileAppender, null, null); loggerConfig.addAppender(consoleAppender, null, null); config.addLogger("org.apache.logging.log4j", loggerConfig); ((org.apache.logging.log4j.core.LoggerContext) ctx).updateLoggers(); ExtendedLogger logger = ctx.getLogger("org.apache.logging.log4j"); return logger; } catch(Exception e) { e.printStackTrace(); return null; } } public static void logToConsole(String info, String level) { switch (level.toLowerCase()) { case "trace": rootLogger.trace(info); break; case "debug": rootLogger.debug(info); break; case "info": rootLogger.info(info); break; case "warn": rootLogger.warn(info); break; case "error": rootLogger.error(info); break; case "fatal": rootLogger.fatal(info); break; } } public static void main(String[] args) { //rootLogger.info("root logger"); //fileLogger.warn("izou file logger"); ExtendedLogger logger1 = createFileLogger("test", "fatal"); ExtendedLogger logger2 = createIzouFileLogger(("error")); logger1.debug("static file logger"); logger2.debug("dynamic file logger"); LogController.logToConsole("console logger", "info"); } }
package io.bdrc.xmltoldmigration; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.jena.rdf.model.Model; import org.apache.jena.rdf.model.ModelFactory; import io.bdrc.xmltoldmigration.helpers.GitHelpers; import io.bdrc.xmltoldmigration.xml2files.CommonMigration; public class rKTsTransfer { public static List<String> RIDList = new ArrayList<>(); public static Map<String, Model> RidModels = new HashMap<>(); public static void initLists() { initListsForRID("W22084"); initListsForRID("W30532"); initListsForRID("W4CZ5369"); initListsForRID("W1PD96682"); initListsForRID("W4CZ7445"); initListsForRID("W22703"); initListsForRID("W26071"); initListsForRID("W29468"); initListsForRID("W1PD96685"); initListsForRID("W22083"); initListsForRID("W1GS66030"); initListsForRID("W23703"); initListsForRID("W22704"); initListsForRID("W1KG13126"); initListsForRID("W1PD95844"); initListsForRID("W23702"); initListsForRID("W1PD96684"); initListsForRID("W1PD127393"); initListsForRID("W1KG14700"); } public static void initListsForRID(String rid) { RIDList.add(rid); final String workFileName = MigrationApp.getDstFileName("work", rid); Model m = MigrationHelpers.modelFromFileName(workFileName); RidModels.put(rid, m); } public static void finishEditions() { for (String rid : RIDList) { Model m = RidModels.get(rid); final String workFileName = MigrationApp.getDstFileName("work", rid); MigrationHelpers.outputOneModel(m, rid, workFileName, "work"); } } public static void doTransfer() { GitHelpers.ensureGitRepo("work"); initLists(); final File dir = new File(MigrationApp.RKTS_DIR); final File[] directoryListing = dir.listFiles(); System.out.println("transfering "+directoryListing.length+" works produced by rKTs migration"); if (directoryListing != null) { for (File child : directoryListing) { final String fileBaseName = child.getName(); if (!fileBaseName.endsWith(".ttl")) continue; final int underIndex = fileBaseName.indexOf('_'); if (underIndex != -1) { final String rid = fileBaseName.substring(0, underIndex); final Model m = RidModels.get(rid); if (m == null) { System.err.println("hmm, I think I have a problem here..."); continue; } final InputStream in; try { in = new FileInputStream(child); } catch (FileNotFoundException e) { System.err.println("can't read from "+child.getName()); continue; } m.read(in, null, "TTL"); try { in.close(); } catch (IOException e) { e.printStackTrace(); } } else { final Model m = ModelFactory.createDefaultModel(); CommonMigration.setPrefixes(m); final InputStream in; try { in = new FileInputStream(child); } catch (FileNotFoundException e) { System.err.println("can't read from "+child.getName()); continue; } m.read(in, null, "TTL"); try { in.close(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } final String workName = fileBaseName.substring(0, fileBaseName.length()-4); final String workOutFileName = MigrationApp.getDstFileName("work", workName); if (!workName.startsWith("W0R")) { Model existingM = MigrationHelpers.modelFromFileName(workOutFileName); if (existingM != null) { // maybe the sa-x-ndia strings should be removed? m.add(existingM); } } MigrationHelpers.outputOneModel(m, workName, workOutFileName, "work"); } } finishEditions(); } else { System.err.println("The rKTs directory you provided is not a valid directory"); } } }
package io.skelp.verifier.type; import java.util.regex.Pattern; import io.skelp.verifier.VerifierException; import io.skelp.verifier.type.base.BaseComparableVerifier; import io.skelp.verifier.type.base.BaseTruthVerifier; import io.skelp.verifier.util.Function; import io.skelp.verifier.verification.Verification; /** * <p> * An implementation of {@link BaseComparableVerifier} and {@link BaseTruthVerifier} which can be used to verify a * {@code String} value. * </p> * <p> * All of the {@link BaseTruthVerifier} methods are implemented so that {@literal null} and {@literal "false"} (ignoring * case) are <b>always</b> considered to be falsy and {@literal "true"} (ignoring case) is <b>always</b> considered to * be truthy. * </p> * * @author Alasdair Mercer */ public final class StringVerifier extends BaseComparableVerifier<String, StringVerifier> implements BaseTruthVerifier<String, StringVerifier> { private static boolean containsIgnoreCase(final String value, final CharSequence other) { if (other == null) { return false; } final int length = other.length(); final int maximum = value.length() - length; for (int i = 0; i <= maximum; i++) { if (regionMatches(value, true, i, other, 0, length)) { return true; } } return false; } private static boolean endsWith(final String value, final CharSequence other, final boolean ignoreCase) { return other != null && regionMatches(value, ignoreCase, value.length() - other.length(), other, 0, other.length()); } private static boolean isEqualToIgnoreCase(final String value, final CharSequence other) { return other == null ? value == null : value != null && regionMatches(value, true, 0, other, 0, value.length()); } private static boolean matchCharacters(final String value, final Function<Boolean, Character> matcher) { if (value == null) { return false; } final int length = value.length(); for (int i = 0; i < length; i++) { if (!matcher.apply(value.charAt(i))) { return false; } } return true; } private static boolean regionMatches(final String value, final boolean ignoreCase, final int offset, final CharSequence charSequence, final int start, final int length) { if (value == null) { return false; } if (charSequence instanceof String) { return value.regionMatches(ignoreCase, offset, (String) charSequence, start, length); } int index1 = offset; int index2 = start; int tempLength = length; while (tempLength final char ch1 = value.charAt(index1++); final char ch2 = charSequence.charAt(index2++); if (ch1 == ch2) { continue; } if (!ignoreCase) { return false; } if (Character.toUpperCase(ch1) != Character.toUpperCase(ch2) && Character.toLowerCase(ch1) != Character.toLowerCase(ch2)) { return false; } } return true; } private static boolean startsWith(final String value, final CharSequence other, final boolean ignoreCase) { return other != null && regionMatches(value, ignoreCase, 0, other, 0, other.length()); } /** * <p> * Creates an instance of {@link StringVerifier} based on the {@code verification} provided. * </p> * * @param verification * the {@link Verification} to be used */ public StringVerifier(final Verification<String> verification) { super(verification); } /** * <p> * Verifies that the value contains only letters. * </p> * <pre> * Verifier.verify((String) null).alpha() => FAIL * Verifier.verify("\0\r\n").alpha() => FAIL * Verifier.verify("123").alpha() => FAIL * Verifier.verify("abc").alpha() => PASS * Verifier.verify("abc123").alpha() => FAIL * </pre> * * @return A reference to this {@link StringVerifier} for chaining purposes. * @throws VerifierException * If the verification fails while not negated or passes while negated. * @see #alphaSpace() */ public StringVerifier alpha() throws VerifierException { final String value = verification().getValue(); final boolean result = matchCharacters(value, new Function<Boolean, Character>() { @Override public Boolean apply(final Character character) { return Character.isLetter(character); } }); verification().check(result, "contain only letters"); return this; } /** * <p> * Verifies that the value contains only letters or space. * </p> * <pre> * Verifier.verify((String) null).alphaSpace() => FAIL * Verifier.verify("\0 \r \n").alphaSpace() => FAIL * Verifier.verify("1 2 3").alphaSpace() => FAIL * Verifier.verify("a b c").alphaSpace() => PASS * Verifier.verify("a b c 1 2 3").alphaSpace() => FAIL * </pre> * * @return A reference to this {@link StringVerifier} for chaining purposes. * @throws VerifierException * If the verification fails while not negated or passes while negated. * @see #alpha() */ public StringVerifier alphaSpace() throws VerifierException { final String value = verification().getValue(); final boolean result = matchCharacters(value, new Function<Boolean, Character>() { @Override public Boolean apply(final Character character) { return Character.isLetter(character) || character == ' '; } }); verification().check(result, "contain only letters or space"); return this; } /** * <p> * Verifies that the value contains only letters or digits. * </p> * <pre> * Verifier.verify((String) null).alphanumeric() => FAIL * Verifier.verify("\0\r\n").alphanumeric() => FAIL * Verifier.verify("123").alphanumeric() => PASS * Verifier.verify("abc").alphanumeric() => PASS * Verifier.verify("abc123").alphanumeric() => PASS * </pre> * * @return A reference to this {@link StringVerifier} for chaining purposes. * @throws VerifierException * If the verification fails while not negated or passes while negated. * @see #alphanumericSpace() */ public StringVerifier alphanumeric() throws VerifierException { final String value = verification().getValue(); final boolean result = matchCharacters(value, new Function<Boolean, Character>() { @Override public Boolean apply(final Character character) { return Character.isLetterOrDigit(character); } }); verification().check(result, "contain only letters or digits"); return this; } /** * <p> * Verifies that the value contains only letters or digits or space. * </p> * <pre> * Verifier.verify((String) null).alphanumericSpace() => FAIL * Verifier.verify("\0 \r \n").alphanumericSpace() => FAIL * Verifier.verify("1 2 3").alphanumericSpace() => PASS * Verifier.verify("a b c").alphanumericSpace() => PASS * Verifier.verify("a b c 1 2 3").alphanumericSpace() => PASS * </pre> * * @return A reference to this {@link StringVerifier} for chaining purposes. * @throws VerifierException * If the verification fails while not negated or passes while negated. * @see #alphanumeric() */ public StringVerifier alphanumericSpace() throws VerifierException { final String value = verification().getValue(); final boolean result = matchCharacters(value, new Function<Boolean, Character>() { @Override public Boolean apply(final Character character) { return Character.isLetterOrDigit(character) || character == ' '; } }); verification().check(result, "contain only letters or digits or space"); return this; } /** * TODO: Document * * @return * @throws VerifierException */ public StringVerifier asciiPrintable() throws VerifierException { final String value = verification().getValue(); final boolean result = matchCharacters(value, new Function<Boolean, Character>() { @Override public Boolean apply(final Character character) { return character >= 32 && character < 127; } }); verification().check(result, "contain only ASCII printable characters"); return this; } /** * TODO: Document * * @return * @throws VerifierException * @see #empty() */ public StringVerifier blank() throws VerifierException { final String value = verification().getValue(); final boolean result = value == null || value.trim().isEmpty(); verification().check(result, "be blank"); return this; } /** * TODO: Document * * @param other * @return * @throws VerifierException */ public StringVerifier contain(final CharSequence other) throws VerifierException { final String value = verification().getValue(); final boolean result = value != null && other != null && value.contains(other); verification().check(result, "contain '%s'", other); return this; } /** * TODO: Document * * @param others * @return * @throws VerifierException */ public StringVerifier containAll(final CharSequence... others) throws VerifierException { final String value = verification().getValue(); final boolean result = value != null && matchAll(others, new Function<Boolean, CharSequence>() { @Override public Boolean apply(final CharSequence input) { return input != null && value.contains(input); } }); verification().check(result, "contain all %s", verification().getMessageFormatter().formatArray(others)); return this; } /** * TODO: Document * * @param others * @return * @throws VerifierException */ public StringVerifier containAllIgnoreCase(final CharSequence... others) throws VerifierException { final String value = verification().getValue(); final boolean result = value != null && matchAll(others, new Function<Boolean, CharSequence>() { @Override public Boolean apply(final CharSequence input) { return containsIgnoreCase(value, input); } }); verification().check(result, "contain all %s (ignore case)", verification().getMessageFormatter().formatArray(others)); return this; } /** * TODO: Document * * @param others * @return * @throws VerifierException */ public StringVerifier containAny(final CharSequence... others) throws VerifierException { final String value = verification().getValue(); final boolean result = value != null && matchAny(others, new Function<Boolean, CharSequence>() { @Override public Boolean apply(final CharSequence input) { return input != null && value.contains(input); } }); verification().check(result, "contain any %s", verification().getMessageFormatter().formatArray(others)); return this; } /** * TODO: Document * * @param others * @return * @throws VerifierException */ public StringVerifier containAnyIgnoreCase(final CharSequence... others) throws VerifierException { final String value = verification().getValue(); final boolean result = value != null && matchAny(others, new Function<Boolean, CharSequence>() { @Override public Boolean apply(final CharSequence input) { return containsIgnoreCase(value, input); } }); verification().check(result, "contain any %s (ignore case)", verification().getMessageFormatter().formatArray(others)); return this; } /** * TODO: Document * * @param other * @return * @throws VerifierException */ public StringVerifier containIgnoreCase(final CharSequence other) throws VerifierException { final String value = verification().getValue(); final boolean result = value != null && containsIgnoreCase(value, other); verification().check(result, "contain '%s' (ignore case)", other); return this; } /** * TODO: Document * * @return * @throws VerifierException * @see #blank() */ public StringVerifier empty() throws VerifierException { final String value = verification().getValue(); final boolean result = value == null || value.isEmpty(); verification().check(result, "be empty"); return this; } /** * TODO: Document * * @param other * @return * @throws VerifierException */ public StringVerifier endWith(final CharSequence other) throws VerifierException { final String value = verification().getValue(); final boolean result = value != null && endsWith(value, other, false); verification().check(result, "end with '%s'", other); return this; } /** * TODO: Document * * @param others * @return * @throws VerifierException */ public StringVerifier endWithAny(final CharSequence... others) throws VerifierException { final String value = verification().getValue(); final boolean result = value != null && matchAny(others, new Function<Boolean, CharSequence>() { @Override public Boolean apply(final CharSequence input) { return endsWith(value, input, false); } }); verification().check(result, "end with any %s", verification().getMessageFormatter().formatArray(others)); return this; } /** * TODO: Document * * @param others * @return * @throws VerifierException */ public StringVerifier endWithAnyIgnoreCase(final CharSequence... others) throws VerifierException { final String value = verification().getValue(); final boolean result = value != null && matchAny(others, new Function<Boolean, CharSequence>() { @Override public Boolean apply(final CharSequence input) { return endsWith(value, input, true); } }); verification().check(result, "end with any %s (ignore case)", verification().getMessageFormatter().formatArray(others)); return this; } /** * TODO: Document * * @param other * @return * @throws VerifierException */ public StringVerifier endWithIgnoreCase(final CharSequence other) throws VerifierException { final String value = verification().getValue(); final boolean result = value != null && endsWith(value, other, true); verification().check(result, "end with '%s' (ignore case)", other); return this; } /** * TODO: Document * * @param others * @return * @throws VerifierException */ public StringVerifier equalToAnyIgnoreCase(final CharSequence... others) throws VerifierException { final String value = verification().getValue(); final boolean result = matchAny(others, new Function<Boolean, CharSequence>() { @Override public Boolean apply(final CharSequence input) { return isEqualToIgnoreCase(value, input); } }); verification().check(result, "be equal to any %s (ignore case)", verification().getMessageFormatter().formatArray(others)); return chain(); } /** * TODO: Document * * @param other * @return * @throws VerifierException */ public StringVerifier equalToIgnoreCase(final CharSequence other) throws VerifierException { final String value = verification().getValue(); final boolean result = isEqualToIgnoreCase(value, other); verification().check(result, "be equal to '%s' (ignore case)", other); return this; } @Override public StringVerifier falsy() throws VerifierException { final String value = verification().getValue(); final boolean result = value == null || value.isEmpty() || Boolean.FALSE.toString().equalsIgnoreCase(value); verification().check(result, FALSY_MESSAGE); return this; } /** * TODO: Document * * @return * @throws VerifierException * @see #upperCase() */ public StringVerifier lowerCase() throws VerifierException { final String value = verification().getValue(); final boolean result = matchCharacters(value, new Function<Boolean, Character>() { @Override public Boolean apply(final Character character) { return Character.isLowerCase(character); } }); verification().check(result, "be all lower case"); return this; } /** * TODO: Document * * @param regex * @return * @throws VerifierException * @see #match(Pattern) */ public StringVerifier match(final CharSequence regex) throws VerifierException { final String value = verification().getValue(); final boolean result = value != null && regex != null && value.matches(regex.toString()); verification().check(result, "match '%s'", regex); return this; } /** * TODO: Document * * @param pattern * @return * @throws VerifierException * @see #match(CharSequence) */ public StringVerifier match(final Pattern pattern) throws VerifierException { final String value = verification().getValue(); final boolean result = value != null && pattern != null && pattern.matcher(value).matches(); verification().check(result, "match '%s'", pattern); return this; } /** * TODO: Document * * @return * @throws VerifierException * @see #numericSpace() */ public StringVerifier numeric() throws VerifierException { final String value = verification().getValue(); final boolean result = matchCharacters(value, new Function<Boolean, Character>() { @Override public Boolean apply(final Character character) { return Character.isDigit(character); } }); verification().check(result, "contain only digits"); return this; } /** * TODO: Document * * @return * @throws VerifierException * @see #numeric() */ public StringVerifier numericSpace() throws VerifierException { final String value = verification().getValue(); final boolean result = matchCharacters(value, new Function<Boolean, Character>() { @Override public Boolean apply(final Character character) { return Character.isDigit(character) || character == ' '; } }); verification().check(result, "contain only digits or space"); return this; } /** * TODO: Document * * @param size * @return * @throws VerifierException */ public StringVerifier sizeOf(final int size) throws VerifierException { final String value = verification().getValue(); final boolean result = value == null ? size == 0 : value.length() == size; verification().check(result, "have a size of '%d'", size); return this; } /** * TODO: Document * * @param other * @return * @throws VerifierException */ public StringVerifier startWith(final CharSequence other) throws VerifierException { final String value = verification().getValue(); final boolean result = startsWith(value, other, false); verification().check(result, "start with '%s'", other); return this; } /** * TODO: Document * * @param others * @return * @throws VerifierException */ public StringVerifier startWithAny(final CharSequence... others) throws VerifierException { final String value = verification().getValue(); final boolean result = matchAny(others, new Function<Boolean, CharSequence>() { @Override public Boolean apply(final CharSequence input) { return startsWith(value, input, false); } }); verification().check(result, "start with any %s", verification().getMessageFormatter().formatArray(others)); return this; } /** * TODO: Document * * @param others * @return * @throws VerifierException */ public StringVerifier startWithAnyIgnoreCase(final CharSequence... others) throws VerifierException { final String value = verification().getValue(); final boolean result = matchAny(others, new Function<Boolean, CharSequence>() { @Override public Boolean apply(final CharSequence input) { return startsWith(value, input, true); } }); verification().check(result, "start with any %s (ignore case)", verification().getMessageFormatter().formatArray(others)); return this; } /** * TODO: Document * * @param other * @return * @throws VerifierException */ public StringVerifier startWithIgnoreCase(final CharSequence other) throws VerifierException { final String value = verification().getValue(); final boolean result = startsWith(value, other, true); verification().check(result, "start with '%s' (ignore case)", other); return this; } @Override public StringVerifier truthy() throws VerifierException { final String value = verification().getValue(); final boolean result = Boolean.TRUE.toString().equalsIgnoreCase(value); verification().check(result, TRUTHY_MESSAGE); return this; } /** * TODO: Document * * @return * @throws VerifierException * @see #lowerCase() */ public StringVerifier upperCase() throws VerifierException { final String value = verification().getValue(); final boolean result = matchCharacters(value, new Function<Boolean, Character>() { @Override public Boolean apply(final Character character) { return Character.isUpperCase(character); } }); verification().check(result, "be all upper case"); return this; } /** * TODO: Document * * @return * @throws VerifierException */ public StringVerifier whitespace() throws VerifierException { final String value = verification().getValue(); final boolean result = matchCharacters(value, new Function<Boolean, Character>() { @Override public Boolean apply(final Character character) { return Character.isWhitespace(character); } }); verification().check(result, "contain only whitespace"); return this; } }
package io.yawp.repository.query; import io.yawp.commons.utils.EntityUtils; import io.yawp.commons.utils.ObjectModel; import io.yawp.repository.IdRef; import io.yawp.repository.Repository; import io.yawp.repository.query.condition.BaseCondition; import io.yawp.repository.query.condition.Condition; import io.yawp.repository.query.condition.FalsePredicateException; import io.yawp.repository.query.condition.SimpleCondition; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.List; import com.google.appengine.api.datastore.Entity; import com.google.appengine.api.datastore.EntityNotFoundException; import com.google.appengine.api.datastore.Key; public class QueryBuilder<T> { private Class<T> clazz; private ObjectModel model; private Repository r; private IdRef<?> parentId; private BaseCondition condition; private List<DatastoreQueryOrder> preOrders = new ArrayList<DatastoreQueryOrder>(); private List<DatastoreQueryOrder> postOrders = new ArrayList<DatastoreQueryOrder>(); private Integer limit; private String cursor; public static <T> QueryBuilder<T> q(Class<T> clazz, Repository r) { return new QueryBuilder<T>(clazz, r); } private QueryBuilder(Class<T> clazz, Repository r) { this.clazz = clazz; this.r = r; this.model = new ObjectModel(clazz); } public <N> DatastoreQueryTransformer<T, N> transform(String transformName) { return new DatastoreQueryTransformer<T, N>(this, transformName); } @Deprecated public QueryBuilder<T> where(Object... values) { if (values.length % 3 != 0) { throw new RuntimeException("You must pass values 3 at a time."); } for (int i = 0; i < values.length; i += 3) { where(values[i].toString(), values[i + 1].toString(), values[i + 2]); } return this; } public QueryBuilder<T> and(String field, String operator, Object value) { return where(field, operator, value); } public QueryBuilder<T> where(String field, String operator, Object value) { return where(Condition.c(field, operator, value)); } public QueryBuilder<T> where(BaseCondition c) { if (condition == null) { condition = c; } else { condition = Condition.and(condition, c); } condition.init(r, clazz); return this; } public QueryBuilder<T> and(BaseCondition c) { return where(c); } public QueryBuilder<T> from(IdRef<?> parentId) { if (parentId == null) { this.parentId = null; return this; } this.parentId = parentId; return this; } public QueryBuilder<T> order(String property) { order(property, null); return this; } public QueryBuilder<T> order(String property, String direction) { preOrders.add(new DatastoreQueryOrder(null, property, direction)); return this; } public QueryBuilder<T> sort(String property) { sort(property, null); return this; } public QueryBuilder<T> sort(String property, String direction) { sort(null, property, direction); return this; } public QueryBuilder<T> sort(String entity, String property, String direction) { postOrders.add(new DatastoreQueryOrder(entity, property, direction)); return this; } public QueryBuilder<T> limit(int limit) { this.limit = limit; return this; } public QueryBuilder<T> cursor(String cursor) { this.cursor = cursor; return this; } public IdRef<?> getParentId() { return parentId; } public String getCursor() { return this.cursor; } public void setCursor(String cursor) { this.cursor = cursor; } public Integer getLimit() { return limit; } public List<DatastoreQueryOrder> getPreOrders() { return preOrders; } public BaseCondition getCondition() { return condition; } public Repository getRepository() { return this.r; } public ObjectModel getModel() { return model; } public QueryBuilder<T> options(DatastoreQueryOptions options) { if (options.getCondition() != null) { where(options.getCondition()); } if (options.getPreOrders() != null) { preOrders.addAll(options.getPreOrders()); } if (options.getPostOrders() != null) { postOrders.addAll(options.getPostOrders()); } if (options.getLimit() != null) { limit(options.getLimit()); } return this; } public List<T> executeQueryList() { r.namespace().set(getClazz()); try { return executeQuery(); } finally { r.namespace().reset(); } } public List<T> list() { List<T> list = executeQueryList(); sortList(list); return list; } public T first() { r.namespace().set(getClazz()); try { if (isQueryById()) { return executeQueryById(); } return executeQueryFirst(); } finally { r.namespace().reset(); } } private T executeQueryFirst() { limit(1); List<T> list = executeQuery(); if (list.size() == 0) { return null; } return list.get(0); } public T only() throws NoResultException, MoreThanOneResultException { r.namespace().set(getClazz()); try { T object = null; if (isQueryById()) { object = executeQueryById(); } else { object = executeQueryOnly(); } if (object == null) { throw new NoResultException(); } return object; } finally { r.namespace().reset(); } } private T executeQueryOnly() throws MoreThanOneResultException { List<T> list = executeQuery(); if (list.size() == 0) { return null; } if (list.size() == 1) { return list.get(0); } throw new MoreThanOneResultException(); } private List<T> executeQuery() { try { List<T> objects = r.driver().query().objects(this); return postFilter(objects); } catch (FalsePredicateException ex) { return Collections.emptyList(); } } private List<T> postFilter(List<T> objects) { if (condition == null || !condition.hasPostFilter()) { return objects; } return condition.applyPostFilter(objects); } private T executeQueryById() { try { SimpleCondition c = (SimpleCondition) condition; IdRef<?> idRef = (IdRef<?>) c.getWhereValue(); Key key = idRef.asKey(); Entity entity = r.datastore().get(key); return EntityUtils.toObject(r, entity, clazz); } catch (EntityNotFoundException e) { return null; } } private boolean isQueryById() { if (condition == null || !(condition instanceof SimpleCondition)) { return false; } SimpleCondition c = (SimpleCondition) condition; return c.isIdField() && c.isEqualOperator(); } public void sortList(List<?> objects) { if (postOrders.size() == 0) { return; } Collections.sort(objects, new Comparator<Object>() { @Override public int compare(Object o1, Object o2) { for (DatastoreQueryOrder order : postOrders) { int compare = order.compare(o1, o2); if (compare == 0) { continue; } return compare; } return 0; } }); } protected Class<T> getClazz() { return clazz; } public QueryBuilder<T> whereById(String operator, IdRef<?> id) { return from(id.getParentId()).where(model.getIdField().getName(), operator, id); } public T fetch(IdRef<?> idRef) { return whereById("=", idRef).only(); } public T fetch(Long id) { IdRef<?> idRef = IdRef.create(r, clazz, id); return fetch(idRef); } public T fetch(String name) { IdRef<?> idRef = IdRef.create(r, clazz, name); return fetch(idRef); } public List<IdRef<T>> ids() { r.namespace().set(getClazz()); try { List<IdRef<T>> ids = r.driver().query().ids(this); return ids; } catch (FalsePredicateException ex) { return Collections.emptyList(); } finally { r.namespace().reset(); } } public IdRef<T> onlyId() throws NoResultException, MoreThanOneResultException { List<IdRef<T>> ids = ids(); if (ids.size() == 0) { throw new NoResultException(); } if (ids.size() > 1) { throw new MoreThanOneResultException(); } return ids.get(0); } }
package it.growbit.servlet; import com.google.common.collect.Lists; import it.growbit.flex.GAEFlexAutoScaler; import it.growbit.matlab.Matlab; import it.growbit.matlab.model.Last24HoursAvg; import it.growbit.matlab.model.Last24HoursTrend; import it.growbit.matlab.model.Next24HourAvg; import it.growbit.model.trt.Trades_last_24; import it.growbit.telegram.Telegram; import it.growbit.telegram.model.SendMessage; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.net.URISyntaxException; import java.util.List; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; import java.util.logging.Logger; public class TaskDailyHourTrend extends HttpServlet { private static final Logger log = Logger.getLogger(TaskDailyHourTrend.class.getName()); private static final GAEFlexAutoScaler fas = GAEFlexAutoScaler.singleton("matlab", "20170709t110004"); @Override public void doPost(HttpServletRequest req, HttpServletResponse resp) throws IOException { Future<Boolean> async_matlab_start = fas.start(); List<Trades_last_24> l24avgs = Trades_last_24.list(25); // reverse perche' la vista sql le ritorna DESC Last24HoursAvg matlab_model = new Last24HoursTrend(Lists.reverse(l24avgs)); Next24HourAvg forecast; try { async_matlab_start.get(); forecast = Matlab.superCriptoOracleTrend(matlab_model); fas.stop_as_task(); } catch (URISyntaxException e) { e.printStackTrace(); return; } catch (InterruptedException e) { e.printStackTrace(); return; } catch (ExecutionException e) { e.printStackTrace(); return; } String telegram_message = ""; telegram_message += "In base all'ultimo valore di " + l24avgs.get(0).getTf_price() + ", visto alle " + l24avgs.get(0).getTf_hour() + " UTC"; telegram_message += ", superCriptoOracleTrend dice(+1 sale, -1 scende) riguardo alla prossima ora: " + forecast.getAvg().toString(); // Telegram.sendMessage(new SendMessage(Telegram.props.getProperty(Telegram.PROPERTY_SCALP_CAVERNA), telegram_message)); } }
package jp.toastkid.gui.jfx.cssgen; import java.io.IOException; import java.net.URL; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.nio.file.StandardCopyOption; import java.util.HashMap; import java.util.Map; import java.util.ResourceBundle; import org.apache.commons.lang3.StringUtils; import org.fxmisc.richtext.CodeArea; import org.fxmisc.richtext.LineNumberFactory; import javafx.animation.Transition; import javafx.application.Application; import javafx.collections.ObservableList; import javafx.collections.ObservableMap; import javafx.fxml.FXML; import javafx.fxml.Initializable; import javafx.scene.control.ColorPicker; import javafx.scene.control.Label; import javafx.scene.control.Slider; import javafx.scene.control.TableColumn; import javafx.scene.control.TableView; import javafx.scene.control.TextField; import javafx.scene.control.TreeItem; import javafx.scene.control.TreeView; import javafx.scene.control.cell.PropertyValueFactory; import javafx.scene.input.KeyCode; import javafx.scene.input.KeyCodeCombination; import javafx.scene.input.KeyCombination; import javafx.scene.layout.Pane; import javafx.scene.paint.Color; import javafx.stage.Stage; import javafx.util.Duration; import jp.toastkid.gui.jfx.cssgen.model.Person; /** * Controller. * @author Toast kid * */ public class Controller implements Initializable { /** temp file path. */ private static final String TEMP_FILE_PATH = "generated.css"; /** Save shortcut. */ private static final KeyCombination APPLY_CONTENT = new KeyCodeCombination(KeyCode.ENTER, KeyCombination.CONTROL_DOWN); /** Save shortcut. */ private static final KeyCombination SAVE_AS = new KeyCodeCombination(KeyCode.F12); /** Edit shortcut. */ private static final KeyCombination SWITCH_EDITOR = new KeyCodeCombination(KeyCode.E, KeyCombination.CONTROL_DOWN); /** Root pane. */ @FXML private Pane root; /** firstColor. */ @FXML private ColorPicker mainColor; /** secondColor. */ @FXML private ColorPicker subColor; /** input color. */ @FXML private ColorPicker inputColor; /** sample view. */ @FXML private TreeView<String> tree; /** sample view. */ @FXML private TableView<Person> table; /** sample view. */ @FXML private TableColumn<Person, String> personId; /** sample view. */ @FXML private TableColumn<Person, String> personName; /** sample view. */ @FXML private TableColumn<Person, String> isActive; /** Opacity value slider. */ @FXML private Slider opacity; /** Opacity value indicator. */ @FXML private Label opacityValue; /** CSS file name. */ @FXML private TextField fileName; /** CSS text area. */ @FXML private CodeArea cssArea; /** for use apply stylesheet. */ private Stage stage; /** Opening animation. */ private Transition open; private Transition close; /** * save current state css as another file name. */ @FXML private void saveAs() { final String text = fileName.getText(); if (StringUtils.isBlank(text)) { return; } try { final Path source = Paths.get(TEMP_FILE_PATH); if (!Files.exists(source)) { change(); } final Path path = Paths.get(text.endsWith(".css") ? text : text + ".css"); Files.copy(source, path, StandardCopyOption.REPLACE_EXISTING); } catch (final IOException e) { e.printStackTrace(); } } /** * store messages to text file. * * @param content */ @FXML private void save(final String content) { try { Files.write(Paths.get(TEMP_FILE_PATH), content.getBytes()); } catch (final IOException e) { e.printStackTrace(); } cssArea.replaceText(content); } /** * color change. */ @FXML private void change() { final Color main = mainColor.getValue(); final Color sub = subColor.getValue(); final Color input = inputColor.getValue(); final Map<String, String> params = new HashMap<>(); params.put("main_rgb", ColorUtils.makeRgbStr(main)); params.put("sub_rgb", ColorUtils.makeRgbStr(sub)); params.put("main", ColorUtils.toRgbCode(main)); params.put("sub", ColorUtils.toRgbCode(sub)); params.put("main_dark", ColorUtils.toRgbCode(main.darker())); params.put("text", ColorUtils.toRgbCode(Color.BLACK)); params.put("text_focused", ColorUtils.toRgbCode(Color.WHITE)); params.put("input", ColorUtils.toRgbCode(input)); params.put("opacity", Double.toString(opacity.getValue())); try { save(Utilities.bindArgs("base.css", params)); } catch (final IOException e) { e.printStackTrace(); } setStyle("generated.css"); } /** * set stylesheet. * @param styleName */ private void setStyle(final String styleName) { final Path path = Paths.get(styleName); if (!Files.exists(path)) { return; } final ObservableList<String> stylesheets = stage.getScene().getStylesheets(); if (stylesheets != null) { stylesheets.clear(); } Application.setUserAgentStylesheet("MODENA"); stylesheets.add(path.toUri().toString()); } /** * back to default mode. */ @FXML private void backToDefault() { final ObservableList<String> stylesheets = stage.getScene().getStylesheets(); if (stylesheets != null) { stylesheets.clear(); } Application.setUserAgentStylesheet("MODENA"); mainColor.setValue(Color.WHITE); subColor.setValue(Color.WHITE); inputColor.setValue(Color.WHITE); } /** * Pass Stage object. * @param stage */ protected void setStage(final Stage stage) { this.stage = stage; initCodeArea(); final ObservableMap<KeyCombination,Runnable> accelerators = stage.getScene().getAccelerators(); accelerators.put(SAVE_AS, this::saveAs); accelerators.put(SWITCH_EDITOR, this::switchEditor); accelerators.put(APPLY_CONTENT, this::applyContent); } /** * Switch appearance editor area. */ private void switchEditor() { if (cssArea.isVisible()) { cssArea.setManaged(false); cssArea.setVisible(false); stage.setWidth(600.0); if (close == null) { close = makeSimpleElasticTransition(1000.0, 600.0); close.setCycleCount(1); } close.play(); return; } if (open == null) { open = makeSimpleElasticTransition(600.0, 1000.0);; open.setCycleCount(1); } cssArea.setManaged(true); cssArea.setVisible(true); stage.setWidth(1000.0); open.play(); } private Transition makeSimpleElasticTransition( final double startWidth, final double endWidth ) { return new Transition() { { setCycleDuration(Duration.millis(1000)); } @Override protected void interpolate(double frac) { final double gap = endWidth - startWidth; stage.setWidth(startWidth + gap * frac); } }; } private void applyContent() { save(cssArea.getText()); setStyle("generated.css"); } @SuppressWarnings("unchecked") @Override public void initialize(final URL location, final ResourceBundle resources) { final TreeItem<String> value = new TreeItem<String>("Root"); value.setExpanded(true); value.getChildren().addAll( new TreeItem<String>("Item 1"), new TreeItem<String>("Item 2"), new TreeItem<String>("Item 3") ); tree.setRoot(value); personId.setCellValueFactory(new PropertyValueFactory<Person, String>("id")); personName.setCellValueFactory(new PropertyValueFactory<Person, String>("name")); isActive.setCellValueFactory(new PropertyValueFactory<Person, String>("active")); table.getItems().addAll( new Person.Builder().setId(1L).setName("Alice").setActive(true).build(), new Person.Builder().setId(1L).setName("Bob").setActive(false).build(), new Person.Builder().setId(1L).setName("Charlie").setActive(true).build() ); opacity.setValue(1.0d); opacityValue.textProperty().bind(opacity.valueProperty().asString()); Runtime.getRuntime().addShutdownHook(new Thread(() -> { final Path path = Paths.get(TEMP_FILE_PATH); if (!Files.exists(path)) { return; } try { Files.delete(path); } catch (final Exception e) { e.printStackTrace(); } })); } /** * Initialize CSS' CodeArea. */ private void initCodeArea() { cssArea.setParagraphGraphicFactory(LineNumberFactory.get(cssArea)); cssArea.setManaged(false); cssArea.setVisible(false); } /** * close this app. */ @FXML private void close() { this.stage.close(); //System.exit(0); } }
package net.sf.jabref.logic.l10n; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import java.nio.charset.Charset; import java.util.*; public class Localization { private static final Log LOGGER = LogFactory.getLog(Localization.class); // Encodings public static final String[] ENCODINGS; public static final Map<String, String> ENCODING_NAMES_LOOKUP; private static final String RESOURCE_PREFIX = "l10n/JabRef"; private static final String MENU_RESOURCE_PREFIX = "l10n/Menu"; private static final String INTEGRITY_RESOURCE_PREFIX = "l10n/IntegrityMessage"; public static final String[] ALL_ENCODINGS = // (String[]) // Charset.availableCharsets().keySet().toArray(new // String[]{}); new String[] {"ISO8859_1", "UTF8", "UTF-16", "ASCII", "Cp1250", "Cp1251", "Cp1252", "Cp1253", "Cp1254", "Cp1257", "SJIS", "KOI8_R", // Cyrillic "EUC_JP", // Added Japanese encodings. "Big5", "Big5_HKSCS", "GBK", "ISO8859_2", "ISO8859_3", "ISO8859_4", "ISO8859_5", "ISO8859_6", "ISO8859_7", "ISO8859_8", "ISO8859_9", "ISO8859_13", "ISO8859_15"}; private static ResourceBundle messages; private static ResourceBundle menuTitles; private static ResourceBundle intMessages; static { // Build list of encodings, by filtering out all that are not supported // on this system: List<String> encodings = new ArrayList<String>(); for (String ALL_ENCODING : Localization.ALL_ENCODINGS) { if (Charset.isSupported(ALL_ENCODING)) { encodings.add(ALL_ENCODING); } } ENCODINGS = encodings.toArray(new String[encodings.size()]); // Build a map for translating Java encoding names into common encoding names: ENCODING_NAMES_LOOKUP = new HashMap<String, String>(); ENCODING_NAMES_LOOKUP.put("Cp1250", "windows-1250"); ENCODING_NAMES_LOOKUP.put("Cp1251", "windows-1251"); ENCODING_NAMES_LOOKUP.put("Cp1252", "windows-1252"); ENCODING_NAMES_LOOKUP.put("Cp1253", "windows-1253"); ENCODING_NAMES_LOOKUP.put("Cp1254", "windows-1254"); ENCODING_NAMES_LOOKUP.put("Cp1257", "windows-1257"); ENCODING_NAMES_LOOKUP.put("ISO8859_1", "ISO-8859-1"); ENCODING_NAMES_LOOKUP.put("ISO8859_2", "ISO-8859-2"); ENCODING_NAMES_LOOKUP.put("ISO8859_3", "ISO-8859-3"); ENCODING_NAMES_LOOKUP.put("ISO8859_4", "ISO-8859-4"); ENCODING_NAMES_LOOKUP.put("ISO8859_5", "ISO-8859-5"); ENCODING_NAMES_LOOKUP.put("ISO8859_6", "ISO-8859-6"); ENCODING_NAMES_LOOKUP.put("ISO8859_7", "ISO-8859-7"); ENCODING_NAMES_LOOKUP.put("ISO8859_8", "ISO-8859-8"); ENCODING_NAMES_LOOKUP.put("ISO8859_9", "ISO-8859-9"); ENCODING_NAMES_LOOKUP.put("ISO8859_13", "ISO-8859-13"); ENCODING_NAMES_LOOKUP.put("ISO8859_15", "ISO-8859-15"); ENCODING_NAMES_LOOKUP.put("KOI8_R", "KOI8-R"); ENCODING_NAMES_LOOKUP.put("UTF8", "UTF-8"); ENCODING_NAMES_LOOKUP.put("UTF-16", "UTF-16"); ENCODING_NAMES_LOOKUP.put("SJIS", "Shift_JIS"); ENCODING_NAMES_LOOKUP.put("GBK", "GBK"); ENCODING_NAMES_LOOKUP.put("Big5_HKSCS", "Big5-HKSCS"); ENCODING_NAMES_LOOKUP.put("Big5", "Big5"); ENCODING_NAMES_LOOKUP.put("EUC_JP", "EUC-JP"); ENCODING_NAMES_LOOKUP.put("ASCII", "US-ASCII"); } public static void setLanguage(String language, String country) { Locale locale = new Locale(language, country); messages = ResourceBundle.getBundle(RESOURCE_PREFIX, locale, new EncodingControl("UTF-8")); menuTitles = ResourceBundle.getBundle(MENU_RESOURCE_PREFIX, locale, new EncodingControl("UTF-8")); intMessages = ResourceBundle.getBundle(INTEGRITY_RESOURCE_PREFIX, locale, new EncodingControl("UTF-8")); // these checks are required as when the requested resource bundle is NOT found, the default locale is used as a fallback silently. if(!messages.getLocale().equals(locale)) { LOGGER.warn("tried loading <" + RESOURCE_PREFIX + "> for locale <" + locale + "> but had to fall back on default locale <" + Locale.getDefault() + ">"); } if(!menuTitles.getLocale().equals(locale)) { LOGGER.warn("tried loading <" + MENU_RESOURCE_PREFIX + "> for locale <" + locale + "> but had to fall back on default locale <" + Locale.getDefault() + ">"); } if(!intMessages.getLocale().equals(locale)) { LOGGER.warn("tried loading <" + INTEGRITY_RESOURCE_PREFIX + "> for locale <" + locale + "> but had to fall back on default locale <" + Locale.getDefault() + ">"); } Locale.setDefault(locale); javax.swing.JComponent.setDefaultLocale(locale); } public static String lang(String key, String... params) { String translation = null; try { if (messages != null) { translation = messages.getString(key.replaceAll(" ", "_")); } } catch (MissingResourceException ex) { LOGGER.warn("Warning: could not get translation for \"" + key + "\" for locale " + Locale.getDefault()); } if (translation == null) { translation = key; } if (translation != null && !translation.isEmpty()) { translation = translation.replaceAll("_", " "); StringBuffer sb = new StringBuffer(); boolean b = false; char c; for (int i = 0; i < translation.length(); ++i) { c = translation.charAt(i); if (c == '%') { b = true; } else { if (!b) { sb.append(c); } else { b = false; try { int index = Integer.parseInt(String.valueOf(c)); if (params != null && index >= 0 && index <= params.length) { sb.append(params[index]); } } catch (NumberFormatException e) { // append literally (for quoting) or insert special // symbol switch (c) { case 'c': // colon sb.append(':'); break; case 'e': // equal sb.append('='); break; default: // anything else, e.g. % sb.append(c); } } } } } return sb.toString(); } return key; } public static String lang(String key) { return lang(key, (String[]) null); } public static String menuTitle(String key) { String translation = null; try { if (messages != null) { translation = menuTitles.getString(key.replaceAll(" ", "_")); } } catch (MissingResourceException ex) { translation = key; } if (translation != null && !translation.isEmpty()) { return translation.replaceAll("_", " "); } else { return key; } } public static String getIntegrityMessage(String key) { String translation = null; try { if (intMessages != null) { translation = intMessages.getString(key); } } catch (MissingResourceException ex) { translation = key; LOGGER.warn("Warning: could not get menu item translation for \"" + key + "\""); } if (translation != null && !translation.isEmpty()) { return translation; } else { return key; } } }
package net.snowflake.client.core; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Strings; import net.snowflake.client.jdbc.ErrorCode; import net.snowflake.client.jdbc.SnowflakeDriver; import net.snowflake.client.jdbc.SnowflakeReauthenticationRequest; import net.snowflake.client.jdbc.SnowflakeSQLException; import net.snowflake.client.jdbc.SnowflakeType; import net.snowflake.client.jdbc.SnowflakeUtil; import net.snowflake.client.log.ArgSupplier; import net.snowflake.client.jdbc.telemetryOOB.TelemetryService; import net.snowflake.client.log.SFLogger; import net.snowflake.client.log.SFLoggerFactory; import net.snowflake.common.core.ClientAuthnDTO; import net.snowflake.common.core.ClientAuthnParameter; import net.snowflake.common.core.SqlState; import org.apache.http.HttpHeaders; import org.apache.http.client.config.RequestConfig; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.utils.URIBuilder; import org.apache.http.entity.StringEntity; import org.apache.http.message.BasicHeader; import org.apache.http.message.HeaderGroup; import org.jsoup.Jsoup; import org.jsoup.nodes.Document; import org.jsoup.select.Elements; import java.io.IOException; import java.net.MalformedURLException; import java.net.URI; import java.net.URISyntaxException; import java.net.URL; import java.nio.charset.Charset; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.UUID; import java.util.stream.Collectors; import java.util.stream.Stream; import static net.snowflake.client.core.SFTrustManager.resetOCSPResponseCacherServerURL; /** * Low level session util */ public class SessionUtil { private static final SFLogger logger = SFLoggerFactory.getLogger(SessionUtil.class); // Response Field Name private static final String SF_QUERY_DATABASE = "databaseName"; private static final String SF_QUERY_SCHEMA = "schemaName"; private static final String SF_QUERY_WAREHOUSE = "warehouse"; private static final String SF_QUERY_ROLE = "roleName"; // Request path private static final String SF_PATH_LOGIN_REQUEST = "/session/v1/login-request"; private static final String SF_PATH_TOKEN_REQUEST = "/session/token-request"; protected static final String SF_PATH_AUTHENTICATOR_REQUEST = "/session/authenticator-request"; public static final String SF_QUERY_SESSION_DELETE = "delete"; // Headers public static final String SF_HEADER_AUTHORIZATION = HttpHeaders.AUTHORIZATION; // Authentication type public static final String SF_HEADER_BASIC_AUTHTYPE = "Basic"; public static final String SF_HEADER_SNOWFLAKE_AUTHTYPE = "Snowflake"; public static final String SF_HEADER_TOKEN_TAG = "Token"; public static final String CLIENT_STORE_TEMPORARY_CREDENTIAL = "CLIENT_STORE_TEMPORARY_CREDENTIAL"; public static final String SERVICE_NAME = "SERVICE_NAME"; public static final String CLIENT_IN_BAND_TELEMETRY_ENABLED = "CLIENT_TELEMETRY_ENABLED"; public static final String CLIENT_OUT_OF_BAND_TELEMETRY_ENABLED = "CLIENT_OUT_OF_BAND_TELEMETRY_ENABLED"; public static final String CLIENT_RESULT_COLUMN_CASE_INSENSITIVE = "CLIENT_RESULT_COLUMN_CASE_INSENSITIVE"; public static final String JDBC_RS_COLUMN_CASE_INSENSITIVE = "JDBC_RS_COLUMN_CASE_INSENSITIVE"; public static final String CLIENT_RESULT_CHUNK_SIZE_JVM = "net.snowflake.jdbc.clientResultChunkSize"; public static final String CLIENT_RESULT_CHUNK_SIZE = "CLIENT_RESULT_CHUNK_SIZE"; public static final String CLIENT_MEMORY_LIMIT_JVM = "net.snowflake.jdbc.clientMemoryLimit"; public static final String CLIENT_MEMORY_LIMIT = "CLIENT_MEMORY_LIMIT"; public static final String CLIENT_PREFETCH_THREADS_JVM = "net.snowflake.jdbc.clientPrefetchThreads"; public static final String CLIENT_PREFETCH_THREADS = "CLIENT_PREFETCH_THREADS"; public static final String CLIENT_ENABLE_CONSERVATIVE_MEMORY_USAGE_JVM = "net.snowflake.jdbc.clientEnableConservativeMemoryUsage"; public static final String CLIENT_ENABLE_CONSERVATIVE_MEMORY_USAGE = "CLIENT_ENABLE_CONSERVATIVE_MEMORY_USAGE"; public static final String CACHE_FILE_NAME = "temporary_credential.json"; public static final String OCSP_FAIL_OPEN_JVM = "net.snowflake.jdbc.ocspFailOpen"; public static final String OCSP_FAIL_OPEN = "ocspFailOpen"; public static final String CLIENT_SESSION_KEEP_ALIVE_HEARTBEAT_FREQUENCY = "CLIENT_SESSION_KEEP_ALIVE_HEARTBEAT_FREQUENCY"; public static final String CLIENT_SFSQL = "CLIENT_SFSQL"; static final String SF_HEADER_SERVICE_NAME = "X-Snowflake-Service"; private static final String SF_PATH_SESSION = "/session"; public static long DEFAULT_CLIENT_MEMORY_LIMIT = 1536; public static int DEFAULT_CLIENT_PREFETCH_THREADS = 4; public static int DEFAULT_CLIENT_CHUNK_SIZE = 160; public static int MIN_CLIENT_CHUNK_SIZE = 48; public static int MAX_CLIENT_CHUNK_SIZE = 160; public static Map<String, String> JVM_PARAMS_TO_PARAMS = Stream.of( new String[][]{ {CLIENT_RESULT_CHUNK_SIZE_JVM, CLIENT_RESULT_CHUNK_SIZE}, {CLIENT_MEMORY_LIMIT_JVM, CLIENT_MEMORY_LIMIT}, {CLIENT_PREFETCH_THREADS_JVM, CLIENT_PREFETCH_THREADS}, {OCSP_FAIL_OPEN_JVM, OCSP_FAIL_OPEN}, {CLIENT_ENABLE_CONSERVATIVE_MEMORY_USAGE_JVM, CLIENT_ENABLE_CONSERVATIVE_MEMORY_USAGE} }).collect(Collectors.toMap(data -> data[0], data -> data[1])); private static ObjectMapper mapper = ObjectMapperFactory.getObjectMapper(); private static int DEFAULT_HEALTH_CHECK_INTERVAL = 45; // sec private static Set<String> STRING_PARAMS = new HashSet<>(Arrays.asList( "TIMEZONE", "TIMESTAMP_OUTPUT_FORMAT", "TIMESTAMP_NTZ_OUTPUT_FORMAT", "TIMESTAMP_LTZ_OUTPUT_FORMAT", "TIMESTAMP_TZ_OUTPUT_FORMAT", "DATE_OUTPUT_FORMAT", "TIME_OUTPUT_FORMAT", "BINARY_OUTPUT_FORMAT", "CLIENT_TIMESTAMP_TYPE_MAPPING", SERVICE_NAME)); private static final Set<String> INT_PARAMS = new HashSet<>(Arrays.asList( CLIENT_PREFETCH_THREADS, CLIENT_MEMORY_LIMIT, CLIENT_RESULT_CHUNK_SIZE, "CLIENT_STAGE_ARRAY_BINDING_THRESHOLD", "CLIENT_SESSION_KEEP_ALIVE_HEARTBEAT_FREQUENCY")); private static final Set<String> BOOLEAN_PARAMS = new HashSet<>(Arrays.asList( CLIENT_SESSION_KEEP_ALIVE_HEARTBEAT_FREQUENCY, "CLIENT_HONOR_CLIENT_TZ_FOR_TIMESTAMP_NTZ", "CLIENT_DISABLE_INCIDENTS", "CLIENT_SESSION_KEEP_ALIVE", CLIENT_IN_BAND_TELEMETRY_ENABLED, CLIENT_OUT_OF_BAND_TELEMETRY_ENABLED, CLIENT_STORE_TEMPORARY_CREDENTIAL, "JDBC_USE_JSON_PARSER", "AUTOCOMMIT", "JDBC_EFFICIENT_CHUNK_STORAGE", JDBC_RS_COLUMN_CASE_INSENSITIVE, CLIENT_RESULT_COLUMN_CASE_INSENSITIVE, "CLIENT_METADATA_REQUEST_USE_CONNECTION_CTX", "JDBC_TREAT_DECIMAL_AS_INT", "JDBC_ENABLE_COMBINED_DESCRIBE", CLIENT_ENABLE_CONSERVATIVE_MEMORY_USAGE)); /** * Returns Authenticator type * * @param loginInput login information * @return Authenticator type */ static private ClientAuthnDTO.AuthenticatorType getAuthenticator( SFLoginInput loginInput) { if (loginInput.getAuthenticator() != null) { if (loginInput.getAuthenticator().equalsIgnoreCase( ClientAuthnDTO.AuthenticatorType.EXTERNALBROWSER.name())) { // SAML 2.0 compliant service/application return ClientAuthnDTO.AuthenticatorType.EXTERNALBROWSER; } else if (loginInput.getAuthenticator().equalsIgnoreCase( ClientAuthnDTO.AuthenticatorType.OAUTH.name())) { // OAuth Authentication return ClientAuthnDTO.AuthenticatorType.OAUTH; } else if (loginInput.getAuthenticator().equalsIgnoreCase( ClientAuthnDTO.AuthenticatorType.SNOWFLAKE_JWT.name())) { return ClientAuthnDTO.AuthenticatorType.SNOWFLAKE_JWT; } else if (!loginInput.getAuthenticator().equalsIgnoreCase( ClientAuthnDTO.AuthenticatorType.SNOWFLAKE.name())) { // OKTA authenticator v1. This will be deprecated once externalbrowser // is in production. return ClientAuthnDTO.AuthenticatorType.OKTA; } } // authenticator is null, then jdbc will decide authenticator depends on // if privateKey is specified or not. If yes, authenticator type will be // SNOWFLAKE_JWT, otherwise it will use SNOWFLAKE. return loginInput.getPrivateKey() != null ? ClientAuthnDTO.AuthenticatorType.SNOWFLAKE_JWT : ClientAuthnDTO.AuthenticatorType.SNOWFLAKE; } /** * Open a new session * * @param loginInput login information * @return information get after login such as token information * @throws SFException if unexpected uri syntax * @throws SnowflakeSQLException if failed to establish connection with snowflake */ static public SFLoginOutput openSession(SFLoginInput loginInput) throws SFException, SnowflakeSQLException { AssertUtil.assertTrue(loginInput.getServerUrl() != null, "missing server URL for opening session"); AssertUtil.assertTrue(loginInput.getAppId() != null, "missing app id for opening session"); AssertUtil.assertTrue(loginInput.getLoginTimeout() >= 0, "negative login timeout for opening session"); final ClientAuthnDTO.AuthenticatorType authenticator = getAuthenticator( loginInput); if (!authenticator.equals(ClientAuthnDTO.AuthenticatorType.OAUTH)) { // OAuth does not require a username AssertUtil.assertTrue(loginInput.getUserName() != null, "missing user name for opening session"); } if (authenticator.equals(ClientAuthnDTO.AuthenticatorType.EXTERNALBROWSER)) { // force to set the flag. loginInput.getSessionParameters().put(CLIENT_STORE_TEMPORARY_CREDENTIAL, true); } else { // TODO: patch for now. We should update mergeProperteis // to normalize all parameters using STRING_PARAMS, INT_PARAMS and // BOOLEAN_PARAMS. Object value = loginInput.getSessionParameters().get( CLIENT_STORE_TEMPORARY_CREDENTIAL); if (value != null) { loginInput.getSessionParameters().put( CLIENT_STORE_TEMPORARY_CREDENTIAL, asBoolean(value)); } } boolean isClientStoreTemporaryCredential = asBoolean( loginInput.getSessionParameters().get(CLIENT_STORE_TEMPORARY_CREDENTIAL)); if (isClientStoreTemporaryCredential && CredentialManager.getInstance().fillCachedIdToken(loginInput)) { try { return issueSession(loginInput); } catch (SnowflakeReauthenticationRequest ex) { logger.debug("The token expired. errorCode. Reauthenticating..."); } } return newSession(loginInput); } static private boolean asBoolean(Object value) { if (value == null) { return false; } switch (value.getClass().getName()) { case "java.lang.Boolean": return (Boolean) value; case "java.lang.String": return Boolean.valueOf((String) value); } return false; } static private SFLoginOutput newSession(SFLoginInput loginInput) throws SFException, SnowflakeSQLException { // build URL for login request URIBuilder uriBuilder; URI loginURI; String tokenOrSamlResponse = null; String samlProofKey = null; boolean consentCacheIdToken = true; String sessionToken; String masterToken; String sessionDatabase; String sessionSchema; String sessionRole; String sessionWarehouse; long masterTokenValidityInSeconds; String idToken; String databaseVersion = null; int databaseMajorVersion = 0; int databaseMinorVersion = 0; String newClientForUpgrade = null; int healthCheckInterval = DEFAULT_HEALTH_CHECK_INTERVAL; int httpClientSocketTimeout = loginInput.getSocketTimeout(); final ClientAuthnDTO.AuthenticatorType authenticator = getAuthenticator( loginInput); Map<String, Object> commonParams; try { uriBuilder = new URIBuilder(loginInput.getServerUrl()); // add database name and schema name as query parameters if (loginInput.getDatabaseName() != null) { uriBuilder.addParameter(SF_QUERY_DATABASE, loginInput.getDatabaseName()); } if (loginInput.getSchemaName() != null) { uriBuilder.addParameter(SF_QUERY_SCHEMA, loginInput.getSchemaName()); } if (loginInput.getWarehouse() != null) { uriBuilder.addParameter(SF_QUERY_WAREHOUSE, loginInput.getWarehouse()); } if (loginInput.getRole() != null) { uriBuilder.addParameter(SF_QUERY_ROLE, loginInput.getRole()); } if (authenticator == ClientAuthnDTO.AuthenticatorType.EXTERNALBROWSER) { // SAML 2.0 compliant service/application SessionUtilExternalBrowser s = SessionUtilExternalBrowser.createInstance(loginInput); s.authenticate(); tokenOrSamlResponse = s.getToken(); samlProofKey = s.getProofKey(); consentCacheIdToken = s.isConsentCacheIdToken(); } else if (authenticator == ClientAuthnDTO.AuthenticatorType.OKTA) { // okta authenticator v1 tokenOrSamlResponse = getSamlResponseUsingOkta(loginInput); } else if (authenticator == ClientAuthnDTO.AuthenticatorType.SNOWFLAKE_JWT) { SessionUtilKeyPair s = new SessionUtilKeyPair(loginInput.getPrivateKey(), loginInput.getAccountName(), loginInput.getUserName()); loginInput.setToken(s.issueJwtToken()); } uriBuilder.addParameter(SFSession.SF_QUERY_REQUEST_ID, UUID.randomUUID().toString()); uriBuilder.setPath(SF_PATH_LOGIN_REQUEST); loginURI = uriBuilder.build(); } catch (URISyntaxException ex) { logger.error("Exception when building URL", ex); throw new SFException(ex, ErrorCode.INTERNAL_ERROR, "unexpected URI syntax exception:1"); } if (loginInput.getServerUrl().indexOf(".privatelink.snowflakecomputing.com") > 0) { // Privatelink uses special OCSP Cache server try { URL url = new URL(loginInput.getServerUrl()); String host = url.getHost(); logger.debug("HOST: {}", host); String ocspCacheServerUrl = String.format( "http://ocsp.%s/%s", host, SFTrustManager.CACHE_FILE_NAME); logger.debug("OCSP Cache Server for Privatelink: {}", ocspCacheServerUrl); resetOCSPResponseCacherServerURL(ocspCacheServerUrl); } catch (IOException ex) { throw new SFException(ex, ErrorCode.INTERNAL_ERROR, "unexpected URL syntax exception"); } } HttpPost postRequest = null; try { ClientAuthnDTO authnData = new ClientAuthnDTO(); Map<String, Object> data = new HashMap<>(); data.put(ClientAuthnParameter.CLIENT_APP_ID.name(), loginInput.getAppId()); /* * username is always included regardless of authenticator to identify * the user. */ data.put(ClientAuthnParameter.LOGIN_NAME.name(), loginInput.getUserName()); /* * only include password information in the request to GS if federated * authentication method is not specified. * When specified, this password information is really to be used to * authenticate with the IDP provider only, and GS should not have any * trace for this information. */ if (authenticator == ClientAuthnDTO.AuthenticatorType.SNOWFLAKE) { data.put(ClientAuthnParameter.PASSWORD.name(), loginInput.getPassword()); } else if (authenticator == ClientAuthnDTO.AuthenticatorType.EXTERNALBROWSER) { data.put(ClientAuthnParameter.AUTHENTICATOR.name(), ClientAuthnDTO.AuthenticatorType.EXTERNALBROWSER.name()); data.put(ClientAuthnParameter.PROOF_KEY.name(), samlProofKey); data.put(ClientAuthnParameter.TOKEN.name(), tokenOrSamlResponse); } else if (authenticator == ClientAuthnDTO.AuthenticatorType.OKTA) { data.put(ClientAuthnParameter.RAW_SAML_RESPONSE.name(), tokenOrSamlResponse); } else if (authenticator == ClientAuthnDTO.AuthenticatorType.OAUTH || authenticator == ClientAuthnDTO.AuthenticatorType.SNOWFLAKE_JWT) { data.put(ClientAuthnParameter.AUTHENTICATOR.name(), authenticator.name()); data.put(ClientAuthnParameter.TOKEN.name(), loginInput.getToken()); } Map<String, Object> clientEnv = new HashMap<String, Object>(); clientEnv.put("OS", System.getProperty("os.name")); clientEnv.put("OS_VERSION", System.getProperty("os.version")); clientEnv.put("JAVA_VERSION", System.getProperty("java.version")); clientEnv.put("JAVA_RUNTIME", System.getProperty("java.runtime.name")); clientEnv.put("JAVA_VM", System.getProperty("java.vm.name")); clientEnv.put("OCSP_MODE", loginInput.getOCSPMode().name()); if (loginInput.getApplication() != null) { clientEnv.put("APPLICATION", loginInput.getApplication()); } else { // When you add new client environment info, please add new keys to // messages_en_US.src.json so that they can be displayed properly in UI // detect app name String appName = System.getProperty("sun.java.command"); // remove the arguments if (appName != null) { if (appName.indexOf(" ") > 0) { appName = appName.substring(0, appName.indexOf(" ")); } clientEnv.put("APPLICATION", appName); } } // add properties from client info Properties clientInfo = loginInput.getClientInfo(); if (clientInfo != null) { for (Map.Entry<?, ?> property : clientInfo.entrySet()) { if (property != null && property.getKey() != null && property.getValue() != null) { clientEnv.put(property.getKey().toString(), property.getValue().toString()); } } } // SNOW-20103: track additional client info in session String clientInfoJSONStr = System.getProperty("snowflake.client.info"); if (clientInfoJSONStr != null) { JsonNode clientInfoJSON = null; try { clientInfoJSON = mapper.readTree(clientInfoJSONStr); } catch (Throwable ex) { logger.debug( "failed to process snowflake.client.info property as JSON: {}" , clientInfoJSONStr, ex); } if (clientInfoJSON != null) { Iterator<Map.Entry<String, JsonNode>> fields = clientInfoJSON.fields(); while (fields.hasNext()) { Map.Entry<String, JsonNode> field = fields.next(); clientEnv.put(field.getKey(), field.getValue().asText()); } } } data.put(ClientAuthnParameter.CLIENT_ENVIRONMENT.name(), clientEnv); // Initialize the session parameters Map<String, Object> sessionParameter = loginInput.getSessionParameters(); if (sessionParameter != null) { data.put(ClientAuthnParameter.SESSION_PARAMETERS.name(), loginInput .getSessionParameters()); } if (loginInput.getAccountName() != null) { data.put(ClientAuthnParameter.ACCOUNT_NAME.name(), loginInput.getAccountName()); } // Second Factor Authentication if (loginInput.isPasscodeInPassword()) { data.put(ClientAuthnParameter.EXT_AUTHN_DUO_METHOD.name(), "passcode"); } else if (loginInput.getPasscode() != null) { data.put(ClientAuthnParameter.EXT_AUTHN_DUO_METHOD.name(), "passcode"); data.put(ClientAuthnParameter.PASSCODE.name(), loginInput.getPasscode()); } else { data.put(ClientAuthnParameter.EXT_AUTHN_DUO_METHOD.name(), "push"); } data.put(ClientAuthnParameter.CLIENT_APP_VERSION.name(), loginInput.getAppVersion()); authnData.setData(data); String json = mapper.writeValueAsString(authnData); postRequest = new HttpPost(loginURI); // attach the login info json body to the post request StringEntity input = new StringEntity(json, Charset.forName("UTF-8")); input.setContentType("application/json"); postRequest.setEntity(input); postRequest.addHeader("accept", "application/json"); /* * HttpClient should take authorization header from char[] instead of * String. */ postRequest.setHeader(SF_HEADER_AUTHORIZATION, SF_HEADER_BASIC_AUTHTYPE); setServiceNameHeader(loginInput, postRequest); String theString = HttpUtil.executeRequest(postRequest, loginInput.getLoginTimeout(), 0, null); // general method, same as with data binding JsonNode jsonNode = mapper.readTree(theString); // check the success field first if (!jsonNode.path("success").asBoolean()) { logger.debug("response = {}", theString); String errorCode = jsonNode.path("code").asText(); throw new SnowflakeSQLException( SqlState.SQLCLIENT_UNABLE_TO_ESTABLISH_SQLCONNECTION, ErrorCode.CONNECTION_ERROR.getMessageCode(), errorCode, jsonNode.path("message").asText()); } // session token is in the data field of the returned json response sessionToken = jsonNode.path("data").path("token").asText(); masterToken = jsonNode.path("data").path("masterToken").asText(); idToken = nullStringAsEmptyString( jsonNode.path("data").path("idToken").asText()); masterTokenValidityInSeconds = jsonNode.path("data"). path("masterValidityInSeconds").asLong(); String serverVersion = jsonNode.path("data").path("serverVersion").asText(); JsonNode dbNode = jsonNode.path("data").path("sessionInfo").path("databaseName"); sessionDatabase = dbNode.isNull() ? null : dbNode.asText(); JsonNode schemaNode = jsonNode.path("data").path("sessionInfo").path("schemaName"); sessionSchema = schemaNode.isNull() ? null : schemaNode.asText(); JsonNode roleNode = jsonNode.path("data").path("sessionInfo").path("roleName"); sessionRole = roleNode.isNull() ? null : roleNode.asText(); JsonNode warehouseNode = jsonNode.path("data").path("sessionInfo").path("warehouseName"); sessionWarehouse = warehouseNode.isNull() ? null : warehouseNode.asText(); commonParams = SessionUtil.getCommonParams(jsonNode.path("data").path("parameters")); if (serverVersion != null) { logger.debug("server version = {}", serverVersion); if (serverVersion.indexOf(" ") > 0) { databaseVersion = serverVersion.substring(0, serverVersion.indexOf(" ")); } else { databaseVersion = serverVersion; } } else { logger.debug("server version is null"); } if (databaseVersion != null) { String[] components = databaseVersion.split("\\."); if (components != null && components.length >= 2) { try { databaseMajorVersion = Integer.parseInt(components[0]); databaseMinorVersion = Integer.parseInt(components[1]); } catch (Exception ex) { logger.error("Exception encountered when parsing server " + "version: {} Exception: {}", databaseVersion, ex.getMessage()); } } } else { logger.debug("database version is null"); } if (!jsonNode.path("data").path("newClientForUpgrade").isNull()) { newClientForUpgrade = jsonNode.path("data").path("newClientForUpgrade").asText(); logger.debug("new client: {}", newClientForUpgrade); } // get health check interval and adjust network timeouts if different int healthCheckIntervalFromGS = jsonNode.path("data").path("healthCheckInterval").asInt(); logger.debug( "health check interval = {}", healthCheckIntervalFromGS); if (healthCheckIntervalFromGS > 0 && healthCheckIntervalFromGS != healthCheckInterval) { // add health check interval to socket timeout httpClientSocketTimeout = loginInput.getSocketTimeout() + (healthCheckIntervalFromGS * 1000); final RequestConfig requestConfig = RequestConfig .copy(HttpUtil.getRequestConfigWithoutcookies()) .setConnectTimeout(loginInput.getConnectionTimeout()) .setSocketTimeout(httpClientSocketTimeout) .build(); HttpUtil.setRequestConfig(requestConfig); logger.debug( "adjusted connection timeout to = {}", loginInput.getConnectionTimeout()); logger.debug( "adjusted socket timeout to = {}", httpClientSocketTimeout); } } catch (SnowflakeSQLException ex) { throw ex; // must catch here to avoid Throwable to get the exception } catch (IOException ex) { logger.error("IOException when creating session: " + postRequest, ex); throw new SnowflakeSQLException(ex, SqlState.IO_ERROR, ErrorCode.NETWORK_ERROR.getMessageCode(), "Exception encountered when opening connection: " + ex.getMessage()); } catch (Throwable ex) { logger.error("Exception when creating session: " + postRequest, ex); throw new SnowflakeSQLException(ex, SqlState.SQLCLIENT_UNABLE_TO_ESTABLISH_SQLCONNECTION, ErrorCode.CONNECTION_ERROR.getMessageCode(), ErrorCode.CONNECTION_ERROR.getMessageCode(), ex.getMessage()); } SFLoginOutput ret = new SFLoginOutput(sessionToken, masterToken, masterTokenValidityInSeconds, idToken, databaseVersion, databaseMajorVersion, databaseMinorVersion, httpClientSocketTimeout, sessionDatabase, sessionSchema, sessionRole, sessionWarehouse, commonParams); ret.setUpdatedByTokenRequest(false); if (consentCacheIdToken) { CredentialManager.getInstance().writeTemporaryCredential(loginInput, ret); } return ret; } private static void setServiceNameHeader(SFLoginInput loginInput, HttpPost postRequest) { if (!Strings.isNullOrEmpty(loginInput.getServiceName())) { // service name is used to route a request to appropriate cluster. postRequest.setHeader(SF_HEADER_SERVICE_NAME, loginInput.getServiceName()); } } static private String nullStringAsEmptyString(String value) { if (Strings.isNullOrEmpty(value) || "null".equals(value)) { return ""; } return value; } /** * Delete the id token cache */ static public void deleteIdTokenCache() { CredentialManager.getInstance().deleteIdTokenCache(); } /** * Renew a session. * <p> * Use cases: * - Session and Master tokens are provided. No Id token: * - succeed in getting a new Session token. * - fail and raise SnowflakeReauthenticationRequest because Master * token expires. Since no id token exists, the exception is thrown * to the upstream. * - Session and Id tokens are provided. No Master token: * - fail and raise SnowflakeReauthenticationRequest and * issue a new Session token * - fail and raise SnowflakeReauthenticationRequest and fail * to issue a new Session token as the * * @param loginInput login information * @return login output * @throws SFException if unexpected uri information * @throws SnowflakeSQLException if failed to renew the session */ static public SFLoginOutput renewSession(SFLoginInput loginInput) throws SFException, SnowflakeSQLException { try { return tokenRequest(loginInput, TokenRequestType.RENEW); } catch (SnowflakeReauthenticationRequest ex) { if (Strings.isNullOrEmpty(loginInput.getIdToken())) { throw ex; } return tokenRequest(loginInput, TokenRequestType.ISSUE); } } /** * Issue a session * * @param loginInput login information * @return login output * @throws SFException if unexpected uri information * @throws SnowflakeSQLException if failed to renew the session */ static public SFLoginOutput issueSession(SFLoginInput loginInput) throws SFException, SnowflakeSQLException { return tokenRequest(loginInput, TokenRequestType.ISSUE); } static private SFLoginOutput tokenRequest( SFLoginInput loginInput, TokenRequestType requestType) throws SFException, SnowflakeSQLException { AssertUtil.assertTrue(loginInput.getServerUrl() != null, "missing server URL for tokenRequest"); if (requestType == TokenRequestType.RENEW) { AssertUtil.assertTrue(loginInput.getMasterToken() != null, "missing master token for tokenRequest"); AssertUtil.assertTrue(loginInput.getSessionToken() != null, "missing session token for tokenRequest"); } else if (requestType == TokenRequestType.ISSUE) { AssertUtil.assertTrue(loginInput.getIdToken() != null, "missing id token for tokenRequest"); } AssertUtil.assertTrue(loginInput.getLoginTimeout() >= 0, "negative login timeout for tokenRequest"); // build URL for login request URIBuilder uriBuilder; HttpPost postRequest; String sessionToken; String masterToken; try { uriBuilder = new URIBuilder(loginInput.getServerUrl()); uriBuilder.setPath(SF_PATH_TOKEN_REQUEST); uriBuilder.addParameter(SFSession.SF_QUERY_REQUEST_ID, UUID.randomUUID().toString()); postRequest = new HttpPost(uriBuilder.build()); } catch (URISyntaxException ex) { logger.error("Exception when creating http request", ex); throw new SFException(ex, ErrorCode.INTERNAL_ERROR, "unexpected URI syntax exception:3"); } try { // input json with old session token and request type, notice the // session token needs to be quoted. Map<String, String> payload = new HashMap<>(); String headerToken; if (requestType == TokenRequestType.RENEW) { headerToken = loginInput.getMasterToken(); payload.put("oldSessionToken", loginInput.getSessionToken()); } else { headerToken = loginInput.getIdToken(); payload.put("idToken", loginInput.getIdToken()); } payload.put("requestType", requestType.value); String json = mapper.writeValueAsString(payload); // attach the login info json body to the post request StringEntity input = new StringEntity(json, Charset.forName("UTF-8")); input.setContentType("application/json"); postRequest.setEntity(input); postRequest.addHeader("accept", "application/json"); postRequest.setHeader( SF_HEADER_AUTHORIZATION, SF_HEADER_SNOWFLAKE_AUTHTYPE + " " + SF_HEADER_TOKEN_TAG + "=\"" + headerToken + "\""); setServiceNameHeader(loginInput, postRequest); logger.debug( "request type: {}, old session token: {}, " + "master token: {}, id token: {}", requestType.value, (ArgSupplier) () -> loginInput.getSessionToken() != null ? "******" : null, (ArgSupplier) () -> loginInput.getMasterToken() != null ? "******" : null, (ArgSupplier) () -> loginInput.getIdToken() != null ? "******" : null); String theString = HttpUtil.executeRequest(postRequest, loginInput.getLoginTimeout(), 0, null); // general method, same as with data binding JsonNode jsonNode = mapper.readTree(theString); // check the success field first if (!jsonNode.path("success").asBoolean()) { logger.debug("response = {}", theString); String errorCode = jsonNode.path("code").asText(); String message = jsonNode.path("message").asText(); EventUtil.triggerBasicEvent( Event.EventType.NETWORK_ERROR, "SessionUtil:renewSession failure, error code=" + errorCode + ", message=" + message, true); SnowflakeUtil.checkErrorAndThrowExceptionIncludingReauth(jsonNode); } // session token is in the data field of the returned json response sessionToken = jsonNode.path("data").path("sessionToken").asText(); masterToken = jsonNode.path("data").path("masterToken").asText(); } catch (IOException ex) { logger.error("IOException when renewing session: " + postRequest, ex); // Any EventType.NETWORK_ERRORs should have been triggered before // exception was thrown. throw new SFException(ex, ErrorCode.NETWORK_ERROR, ex.getMessage()); } SFLoginOutput loginOutput = new SFLoginOutput(); loginOutput .setSessionToken(sessionToken) .setMasterToken(masterToken) .setUpdatedByTokenRequest(true) .setUpdatedByTokenRequestIssue(requestType == TokenRequestType.ISSUE); return loginOutput; } /** * Close a session * * @param loginInput login information * @throws SnowflakeSQLException if failed to close session * @throws SFException if failed to close session */ static public void closeSession(SFLoginInput loginInput) throws SFException, SnowflakeSQLException { logger.debug(" public void close() throws SFException"); // assert the following inputs are valid AssertUtil.assertTrue(loginInput.getServerUrl() != null, "missing server URL for closing session"); AssertUtil.assertTrue(loginInput.getSessionToken() != null, "missing session token for closing session"); AssertUtil.assertTrue(loginInput.getLoginTimeout() >= 0, "missing login timeout for closing session"); HttpPost postRequest = null; try { URIBuilder uriBuilder; uriBuilder = new URIBuilder(loginInput.getServerUrl()); uriBuilder.addParameter(SF_QUERY_SESSION_DELETE, "true"); uriBuilder.addParameter(SFSession.SF_QUERY_REQUEST_ID, UUID.randomUUID().toString()); uriBuilder.setPath(SF_PATH_SESSION); postRequest = new HttpPost(uriBuilder.build()); postRequest.setHeader(SF_HEADER_AUTHORIZATION, SF_HEADER_SNOWFLAKE_AUTHTYPE + " " + SF_HEADER_TOKEN_TAG + "=\"" + loginInput.getSessionToken() + "\""); setServiceNameHeader(loginInput, postRequest); String theString = HttpUtil.executeRequest(postRequest, loginInput.getLoginTimeout(), 0, null); JsonNode rootNode; logger.debug( "connection close response: {}", theString); rootNode = mapper.readTree(theString); SnowflakeUtil.checkErrorAndThrowException(rootNode); } catch (URISyntaxException ex) { throw new RuntimeException("unexpected URI syntax exception", ex); } catch (IOException ex) { logger.error("unexpected IO exception for: " + postRequest, ex); } catch (SnowflakeSQLException ex) { // ignore exceptions for session expiration exceptions and for // sessions that no longer exist if (ex.getErrorCode() != Constants.SESSION_EXPIRED_GS_CODE && ex.getErrorCode() != Constants.SESSION_GONE) { throw ex; } } } /** * Given access token, query IDP URL snowflake app to get SAML response * We also need to perform important client side validation: * validate the post back url come back with the SAML response * contains the same prefix as the Snowflake's server url, which is the * intended destination url to Snowflake. * Explanation: * This emulates the behavior of IDP initiated login flow in the user * browser where the IDP instructs the browser to POST the SAML * assertion to the specific SP endpoint. This is critical in * preventing a SAML assertion issued to one SP from being sent to * another SP. * * @param loginInput Login Info for the request * @param ssoUrl URL to use for SSO * @param oneTimeToken The token used for SSO * @return The response in HTML form * @throws SnowflakeSQLException Will be thrown if the destination URL in * the SAML assertion does not match */ private static String federatedFlowStep4( SFLoginInput loginInput, String ssoUrl, String oneTimeToken) throws SnowflakeSQLException { String responseHtml = ""; try { final URL url = new URL(ssoUrl); URI oktaGetUri = new URIBuilder() .setScheme(url.getProtocol()) .setHost(url.getHost()) .setPath(url.getPath()) .setParameter("RelayState", "%2Fsome%2Fdeep%2Flink") .setParameter("onetimetoken", oneTimeToken).build(); HttpGet httpGet = new HttpGet(oktaGetUri); HeaderGroup headers = new HeaderGroup(); headers.addHeader(new BasicHeader(HttpHeaders.ACCEPT, "*/*")); httpGet.setHeaders(headers.getAllHeaders()); responseHtml = HttpUtil.executeRequest(httpGet, loginInput.getLoginTimeout(), 0, null); // step 5 String postBackUrl = getPostBackUrlFromHTML(responseHtml); if (!isPrefixEqual(postBackUrl, loginInput.getServerUrl())) { logger.debug("The specified authenticator {} and the destination URL " + "in the SAML assertion {} do not match.", loginInput.getAuthenticator(), postBackUrl); throw new SnowflakeSQLException( SqlState.SQLCLIENT_UNABLE_TO_ESTABLISH_SQLCONNECTION, ErrorCode.IDP_INCORRECT_DESTINATION.getMessageCode()); } } catch (IOException | URISyntaxException ex) { handleFederatedFlowError(loginInput, ex); } return responseHtml; } /** * Query IDP token url to authenticate and retrieve access token * * @param loginInput The login info for the request * @param tokenUrl The URL used to retrieve the access token * @return Returns the one time token * @throws SnowflakeSQLException Will be thrown if the execute request fails */ private static String federatedFlowStep3(SFLoginInput loginInput, String tokenUrl) throws SnowflakeSQLException { String oneTimeToken = ""; try { URL url = new URL(tokenUrl); URI tokenUri = url.toURI(); final HttpPost postRequest = new HttpPost(tokenUri); StringEntity params = new StringEntity("{\"username\":\"" + loginInput.getUserName() + "\",\"password\":\"" + loginInput.getPassword() + "\"}"); postRequest.setEntity(params); HeaderGroup headers = new HeaderGroup(); headers.addHeader(new BasicHeader(HttpHeaders.ACCEPT, "application/json")); headers.addHeader(new BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json")); postRequest.setHeaders(headers.getAllHeaders()); final String idpResponse = HttpUtil.executeRequestWithoutCookies(postRequest, loginInput.getLoginTimeout(), 0, null); logger.debug("user is authenticated against {}.", loginInput.getAuthenticator()); // session token is in the data field of the returned json response final JsonNode jsonNode = mapper.readTree(idpResponse); oneTimeToken = jsonNode.get("cookieToken").asText(); } catch (IOException | URISyntaxException ex) { handleFederatedFlowError(loginInput, ex); } return oneTimeToken; } /** * Perform important client side validation: * validate both token url and sso url contains same prefix * (protocol + host + port) as the given authenticator url. * Explanation: * This provides a way for the user to 'authenticate' the IDP it is * sending his/her credentials to. Without such a check, the user could * be coerced to provide credentials to an IDP impersonator. * * @param loginInput The login info for the request * @param tokenUrl The token URL * @param ssoUrl The SSO URL * @throws SnowflakeSQLException Will be thrown if the prefix for the * tokenUrl and ssoUrl do not match */ private static void federatedFlowStep2( SFLoginInput loginInput, String tokenUrl, String ssoUrl) throws SnowflakeSQLException { try { if (!isPrefixEqual(loginInput.getAuthenticator(), tokenUrl) || !isPrefixEqual(loginInput.getAuthenticator(), ssoUrl)) { logger.debug("The specified authenticator {} is not supported.", loginInput.getAuthenticator()); throw new SnowflakeSQLException( SqlState.SQLCLIENT_UNABLE_TO_ESTABLISH_SQLCONNECTION, ErrorCode.IDP_CONNECTION_ERROR.getMessageCode()); } } catch (MalformedURLException ex) { handleFederatedFlowError(loginInput, ex); } } /** * Query Snowflake to obtain IDP token url and IDP SSO url * * @param loginInput The login info for the request * @throws SnowflakeSQLException Will be thrown if the execute request step * fails */ private static JsonNode federatedFlowStep1(SFLoginInput loginInput) throws SnowflakeSQLException { JsonNode dataNode = null; try { URIBuilder fedUriBuilder = new URIBuilder(loginInput.getServerUrl()); fedUriBuilder.setPath(SF_PATH_AUTHENTICATOR_REQUEST); URI fedUrlUri = fedUriBuilder.build(); Map<String, Object> data = new HashMap<>(); data.put(ClientAuthnParameter.ACCOUNT_NAME.name(), loginInput.getAccountName()); data.put(ClientAuthnParameter.AUTHENTICATOR.name(), loginInput.getAuthenticator()); data.put(ClientAuthnParameter.CLIENT_APP_ID.name(), loginInput.getAppId()); data.put(ClientAuthnParameter.CLIENT_APP_VERSION.name(), loginInput.getAppVersion()); ClientAuthnDTO authnData = new ClientAuthnDTO(); authnData.setData(data); String json = mapper.writeValueAsString(authnData); // attach the login info json body to the post request StringEntity input = new StringEntity(json, Charset.forName("UTF-8")); input.setContentType("application/json"); HttpPost postRequest = new HttpPost(fedUrlUri); postRequest.setEntity(input); postRequest.addHeader("accept", "application/json"); final String gsResponse = HttpUtil.executeRequest(postRequest, loginInput.getLoginTimeout(), 0, null); logger.debug("authenticator-request response: {}", gsResponse); JsonNode jsonNode = mapper.readTree(gsResponse); // check the success field first if (!jsonNode.path("success").asBoolean()) { logger.debug("response = {}", gsResponse); String errorCode = jsonNode.path("code").asText(); throw new SnowflakeSQLException( SqlState.SQLCLIENT_UNABLE_TO_ESTABLISH_SQLCONNECTION, ErrorCode.CONNECTION_ERROR.getMessageCode(), errorCode, jsonNode.path("message").asText()); } // session token is in the data field of the returned json response dataNode = jsonNode.path("data"); } catch (IOException | URISyntaxException ex) { handleFederatedFlowError(loginInput, ex); } return dataNode; } /** * Logs an error generated during the federated authentication flow and * re-throws it as a SnowflakeSQLException. * Note that we seperate IOExceptions since those tend to be network related. * * @param loginInput The login info from the request * @param ex The exception to process * @throws SnowflakeSQLException Will be thrown for all calls to this method */ private static void handleFederatedFlowError(SFLoginInput loginInput, Exception ex) throws SnowflakeSQLException { if (ex instanceof IOException) { logger.error("IOException when authenticating with " + loginInput.getAuthenticator(), ex); throw new SnowflakeSQLException(ex, SqlState.IO_ERROR, ErrorCode.NETWORK_ERROR.getMessageCode(), "Exception encountered when opening connection: " + ex.getMessage()); } logger.error("Exception when authenticating with " + loginInput.getAuthenticator(), ex); throw new SnowflakeSQLException(ex, SqlState.SQLCLIENT_UNABLE_TO_ESTABLISH_SQLCONNECTION, ErrorCode.CONNECTION_ERROR.getMessageCode(), ErrorCode.CONNECTION_ERROR.getMessageCode(), ex.getMessage()); } /** * FEDERATED FLOW * See SNOW-27798 for additional details. * * @param loginInput The login info from the request * @return saml response * @throws SnowflakeSQLException Will be thrown if any of the federated * steps fail */ static private String getSamlResponseUsingOkta(SFLoginInput loginInput) throws SnowflakeSQLException { JsonNode dataNode = federatedFlowStep1(loginInput); String tokenUrl = dataNode.path("tokenUrl").asText(); String ssoUrl = dataNode.path("ssoUrl").asText(); federatedFlowStep2(loginInput, tokenUrl, ssoUrl); final String oneTimeToken = federatedFlowStep3(loginInput, tokenUrl); final String responseHtml = federatedFlowStep4( loginInput, ssoUrl, oneTimeToken); return responseHtml; } /** * Verify if two input urls have the same protocol, host, and port. * * @param aUrlStr a source URL string * @param bUrlStr a target URL string * @return true if matched otherwise false * @throws MalformedURLException raises if a URL string is not valid. */ static boolean isPrefixEqual(String aUrlStr, String bUrlStr) throws MalformedURLException { URL aUrl = new URL(aUrlStr); URL bUrl = new URL(bUrlStr); int aPort = aUrl.getPort(); int bPort = bUrl.getPort(); if (aPort == -1 && "https".equals(aUrl.getProtocol())) { // default port number for HTTPS aPort = 443; } if (bPort == -1 && "https".equals(bUrl.getProtocol())) { // default port number for HTTPS bPort = 443; } // no default port number for HTTP is supported. return aUrl.getHost().equalsIgnoreCase(bUrl.getHost()) && aUrl.getProtocol().equalsIgnoreCase(bUrl.getProtocol()) && aPort == bPort; } /** * Extracts post back url from the HTML returned by the IDP * * @param html The HTML that we are parsing to find the post back url * @return The post back url */ static private String getPostBackUrlFromHTML(String html) { Document doc = Jsoup.parse(html); Elements e1 = doc.getElementsByTag("body"); Elements e2 = e1.get(0).getElementsByTag("form"); String postBackUrl = e2.first().attr("action"); return postBackUrl; } /** * Helper function to parse a JsonNode from a GS response * containing CommonParameters, emitting an EnumMap of parameters * * @param paramsNode parameters in JSON form * @return map object including key and value pairs */ public static Map<String, Object> getCommonParams(JsonNode paramsNode) { Map<String, Object> parameters = new HashMap<>(); for (JsonNode child : paramsNode) { // If there isn't a name then the response from GS must be erroneous. if (!child.hasNonNull("name")) { logger.error("Common Parameter JsonNode encountered with " + "no parameter name!"); continue; } // Look up the parameter based on the "name" attribute of the node. String paramName = child.path("name").asText(); // What type of value is it and what's the value? if (!child.hasNonNull("value")) { logger.debug("No value found for Common Parameter {}", child.path("name").asText()); continue; } if (STRING_PARAMS.contains(paramName.toUpperCase())) { parameters.put(paramName, child.path("value").asText()); } else if (INT_PARAMS.contains(paramName.toUpperCase())) { parameters.put(paramName, child.path("value").asInt()); } else if (BOOLEAN_PARAMS.contains(paramName.toUpperCase())) { parameters.put(paramName, child.path("value").asBoolean()); } else { logger.debug("Unknown Common Parameter: {}", paramName); } logger.debug("Parameter {}: {}", paramName, child.path("value").asText()); } return parameters; } public static void updateSfDriverParamValues( Map<String, Object> parameters, SFSession session) { for (Map.Entry<String, Object> entry : parameters.entrySet()) { logger.debug("processing parameter {}", entry.getKey()); if ("CLIENT_DISABLE_INCIDENTS".equalsIgnoreCase(entry.getKey())) { SnowflakeDriver.setDisableIncidents((Boolean) entry.getValue()); } else if ( "CLIENT_SESSION_KEEP_ALIVE".equalsIgnoreCase(entry.getKey())) { if (session != null) { session.setEnableHeartbeat((Boolean) entry.getValue()); } } else if ( "AUTOCOMMIT".equalsIgnoreCase(entry.getKey())) { boolean autoCommit = (Boolean) entry.getValue(); if (session != null && session.getAutoCommit() != autoCommit) { session.setAutoCommit(autoCommit); } } else if ( JDBC_RS_COLUMN_CASE_INSENSITIVE.equalsIgnoreCase(entry.getKey()) || CLIENT_RESULT_COLUMN_CASE_INSENSITIVE.equalsIgnoreCase(entry.getKey())) { if (session != null && !session.isResultColumnCaseInsensitive()) { session.setResultColumnCaseInsensitive((boolean) entry.getValue()); } } else if ("CLIENT_METADATA_REQUEST_USE_CONNECTION_CTX".equalsIgnoreCase(entry.getKey())) { if (session != null) { session.setMetadataRequestUseConnectionCtx((boolean) entry.getValue()); } } else if ("CLIENT_TIMESTAMP_TYPE_MAPPING".equalsIgnoreCase(entry.getKey())) { if (session != null) { session.setTimestampMappedType(SnowflakeType.valueOf( ((String) entry.getValue()).toUpperCase())); } } else if ("JDBC_TREAT_DECIMAL_AS_INT".equalsIgnoreCase(entry.getKey())) { if (session != null) { session.setJdbcTreatDecimalAsInt((boolean) entry.getValue()); } } else if ("JDBC_ENABLE_COMBINED_DESCRIBE".equalsIgnoreCase(entry.getKey())) { if (session != null) { session.setEnableCombineDescribe((boolean) entry.getValue()); } } else if (CLIENT_IN_BAND_TELEMETRY_ENABLED.equalsIgnoreCase(entry.getKey())) { if (session != null) { session.setClientTelemetryEnabled((boolean) entry.getValue()); } } else if ("CLIENT_STAGE_ARRAY_BINDING_THRESHOLD".equalsIgnoreCase(entry.getKey())) { if (session != null) { session.setArrayBindStageThreshold((int) entry.getValue()); } } else if (CLIENT_STORE_TEMPORARY_CREDENTIAL.equalsIgnoreCase(entry.getKey())) { if (session != null) { session.setStoreTemporaryCredential((boolean) entry.getValue()); } } else if (SERVICE_NAME.equalsIgnoreCase(entry.getKey())) { if (session != null) { session.setServiceName((String) entry.getValue()); } } else if (CLIENT_ENABLE_CONSERVATIVE_MEMORY_USAGE.equalsIgnoreCase(entry.getKey())) { if (session != null) { session.setEnableConservativeMemoryUsage((boolean) entry.getValue()); } } else if (CLIENT_MEMORY_LIMIT.equalsIgnoreCase(entry.getKey())) { if (session != null) { session.setClientMemoryLimit((int) entry.getValue()); } } else if (CLIENT_RESULT_CHUNK_SIZE.equalsIgnoreCase(entry.getKey())) { if (session != null) { session.setClientResultChunkSize((int) entry.getValue()); } } else if (CLIENT_PREFETCH_THREADS.equalsIgnoreCase(entry.getKey())) { if (session != null) { session.setClientPrefetchThreads((int) entry.getValue()); } } else if (CLIENT_OUT_OF_BAND_TELEMETRY_ENABLED.equalsIgnoreCase(entry.getKey())) { if ((boolean) entry.getValue()) { TelemetryService.getInstance().enable(); } else { TelemetryService.getInstance().disable(); } } } } enum TokenRequestType { RENEW("RENEW"), CLONE("CLONE"), ISSUE("ISSUE"); private String value; TokenRequestType(String value) { this.value = value; } } }
package net.spy.memcached; import net.spy.memcached.compat.SpyThread; import net.spy.memcached.compat.log.Logger; import net.spy.memcached.compat.log.LoggerFactory; import net.spy.memcached.internal.OperationFuture; import net.spy.memcached.metrics.MetricCollector; import net.spy.memcached.metrics.MetricType; import net.spy.memcached.ops.GetOperation; import net.spy.memcached.ops.KeyedOperation; import net.spy.memcached.ops.NoopOperation; import net.spy.memcached.ops.Operation; import net.spy.memcached.ops.OperationCallback; import net.spy.memcached.ops.OperationException; import net.spy.memcached.ops.OperationState; import net.spy.memcached.ops.OperationStatus; import net.spy.memcached.ops.TapOperation; import net.spy.memcached.ops.VBucketAware; import net.spy.memcached.protocol.binary.BinaryOperationFactory; import net.spy.memcached.protocol.binary.MultiGetOperationImpl; import net.spy.memcached.protocol.binary.TapAckOperationImpl; import net.spy.memcached.util.StringUtils; import java.io.IOException; import java.net.ConnectException; import java.net.InetSocketAddress; import java.net.SocketAddress; import java.net.SocketException; import java.nio.ByteBuffer; import java.nio.channels.CancelledKeyException; import java.nio.channels.ClosedChannelException; import java.nio.channels.ClosedSelectorException; import java.nio.channels.SelectionKey; import java.nio.channels.Selector; import java.nio.channels.SocketChannel; import java.text.MessageFormat; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.ConcurrentModificationException; import java.util.HashSet; import java.util.IdentityHashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.SortedMap; import java.util.TreeMap; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeUnit; /** * Main class for handling connections to a memcached cluster. */ public class MemcachedConnection extends SpyThread { /** * The number of empty selects we'll allow before assuming we may have * missed one and should check the current selectors. This generally * indicates a bug, but we'll check it nonetheless. */ private static final int DOUBLE_CHECK_EMPTY = 256; /** * The number of empty selects we'll allow before blowing up. It's too * easy to write a bug that causes it to loop uncontrollably. This helps * find those bugs and often works around them. */ private static final int EXCESSIVE_EMPTY = 0x1000000; /** * The default wakeup delay if not overridden by a system property. */ private static final int DEFAULT_WAKEUP_DELAY = 1000; /** * By default, do not bound the retry queue. */ private static final int DEFAULT_RETRY_QUEUE_SIZE = -1; /** * If an operation gets cloned more than this ceiling, cancel it for * safety reasons. */ private static final int MAX_CLONE_COUNT = 100; private static final String RECON_QUEUE_METRIC = "[MEM] Reconnecting Nodes (ReconnectQueue)"; private static final String SHUTD_QUEUE_METRIC = "[MEM] Shutting Down Nodes (NodesToShutdown)"; private static final String OVERALL_REQUEST_METRIC = "[MEM] Request Rate: All"; private static final String OVERALL_AVG_BYTES_WRITE_METRIC = "[MEM] Average Bytes written to OS per write"; private static final String OVERALL_AVG_BYTES_READ_METRIC = "[MEM] Average Bytes read from OS per read"; private static final String OVERALL_AVG_TIME_ON_WIRE_METRIC = "[MEM] Average Time on wire for operations (µs)"; private static final String OVERALL_RESPONSE_METRIC = "[MEM] Response Rate: All (Failure + Success + Retry)"; private static final String OVERALL_RESPONSE_RETRY_METRIC = "[MEM] Response Rate: Retry"; private static final String OVERALL_RESPONSE_FAIL_METRIC = "[MEM] Response Rate: Failure"; private static final String OVERALL_RESPONSE_SUCC_METRIC = "[MEM] Response Rate: Success"; /** * If the connection is alread shut down or shutting down. */ protected volatile boolean shutDown = false; /** * If true, optimization will collapse multiple sequential get ops. */ private final boolean shouldOptimize; /** * Holds the current {@link Selector} to use. */ protected Selector selector = null; /** * The {@link NodeLocator} to use for this connection. */ protected final NodeLocator locator; /** * The configured {@link FailureMode}. */ protected final FailureMode failureMode; /** * Maximum amount of time to wait between reconnect attempts. */ private final long maxDelay; /** * Contains the current number of empty select() calls, which could indicate * bugs. */ private int emptySelects = 0; /** * The buffer size that will be used when reading from the server. */ private final int bufSize; /** * The connection factory to create {@link MemcachedNode}s from. */ private final ConnectionFactory connectionFactory; /** * AddedQueue is used to track the QueueAttachments for which operations * have recently been queued. */ protected final ConcurrentLinkedQueue<MemcachedNode> addedQueue; /** * reconnectQueue contains the attachments that need to be reconnected. * The key is the time at which they are eligible for reconnect. */ private final SortedMap<Long, MemcachedNode> reconnectQueue; /** * True if not shutting down or shut down. */ protected volatile boolean running = true; /** * Holds all connection observers that get notified on connection status * changes. */ private final Collection<ConnectionObserver> connObservers = new ConcurrentLinkedQueue<ConnectionObserver>(); /** * The {@link OperationFactory} to clone or create operations. */ private final OperationFactory opFact; /** * The threshold for timeout exceptions. */ private final int timeoutExceptionThreshold; /** * Holds operations that need to be retried. */ private final List<Operation> retryOps; /** * Holds all nodes that are scheduled for shutdown. */ protected final ConcurrentLinkedQueue<MemcachedNode> nodesToShutdown; /** * If set to true, a proper check after finish connecting is done to see * if the node is not responding but really alive. */ private final boolean verifyAliveOnConnect; /** * The {@link ExecutorService} to use for callbacks. */ private final ExecutorService listenerExecutorService; /** * The {@link MetricCollector} to accumulate metrics (or dummy). */ protected final MetricCollector metrics; /** * The current type of metrics to collect. */ protected final MetricType metricType; /** * The selector wakeup delay, defaults to 1000ms. */ private final int wakeupDelay; /** * Optionally bound the retry queue if set via system property. */ private final int retryQueueSize; /** * Construct a {@link MemcachedConnection}. * * @param bufSize the size of the buffer used for reading from the server. * @param f the factory that will provide an operation queue. * @param a the addresses of the servers to connect to. * @param obs the initial observers to add. * @param fm the failure mode to use. * @param opfactory the operation factory. * @throws IOException if a connection attempt fails early */ public MemcachedConnection(final int bufSize, final ConnectionFactory f, final List<InetSocketAddress> a, final Collection<ConnectionObserver> obs, final FailureMode fm, final OperationFactory opfactory) throws IOException { connObservers.addAll(obs); reconnectQueue = new TreeMap<Long, MemcachedNode>(); addedQueue = new ConcurrentLinkedQueue<MemcachedNode>(); failureMode = fm; shouldOptimize = f.shouldOptimize(); maxDelay = TimeUnit.SECONDS.toMillis(f.getMaxReconnectDelay()); opFact = opfactory; timeoutExceptionThreshold = f.getTimeoutExceptionThreshold(); selector = Selector.open(); retryOps = Collections.synchronizedList(new ArrayList<Operation>()); nodesToShutdown = new ConcurrentLinkedQueue<MemcachedNode>(); listenerExecutorService = f.getListenerExecutorService(); this.bufSize = bufSize; this.connectionFactory = f; String verifyAlive = System.getProperty("net.spy.verifyAliveOnConnect"); if(verifyAlive != null && verifyAlive.equals("true")) { verifyAliveOnConnect = true; } else { verifyAliveOnConnect = false; } wakeupDelay = Integer.parseInt( System.getProperty("net.spy.wakeupDelay", Integer.toString(DEFAULT_WAKEUP_DELAY))); retryQueueSize = Integer.parseInt(System.getProperty("net.spy.retryQueueSize", Integer.toString(DEFAULT_RETRY_QUEUE_SIZE))); getLogger().info("Setting retryQueueSize to " + retryQueueSize); List<MemcachedNode> connections = createConnections(a); locator = f.createLocator(connections); metrics = f.getMetricCollector(); metricType = f.enableMetrics(); registerMetrics(); setName("Memcached IO over " + this); setDaemon(f.isDaemon()); start(); } /** * Register Metrics for collection. * * Note that these Metrics may or may not take effect, depending on the * {@link MetricCollector} implementation. This can be controlled from * the {@link DefaultConnectionFactory}. */ protected void registerMetrics() { if (metricType.equals(MetricType.DEBUG) || metricType.equals(MetricType.PERFORMANCE)) { metrics.addHistogram(OVERALL_AVG_BYTES_READ_METRIC); metrics.addHistogram(OVERALL_AVG_BYTES_WRITE_METRIC); metrics.addHistogram(OVERALL_AVG_TIME_ON_WIRE_METRIC); metrics.addMeter(OVERALL_RESPONSE_METRIC); metrics.addMeter(OVERALL_REQUEST_METRIC); if (metricType.equals(MetricType.DEBUG)) { metrics.addCounter(RECON_QUEUE_METRIC); metrics.addCounter(SHUTD_QUEUE_METRIC); metrics.addMeter(OVERALL_RESPONSE_RETRY_METRIC); metrics.addMeter(OVERALL_RESPONSE_SUCC_METRIC); metrics.addMeter(OVERALL_RESPONSE_FAIL_METRIC); } } } /** * Create connections for the given list of addresses. * * @param addrs the list of addresses to connect to. * @return addrs list of {@link MemcachedNode}s. * @throws IOException if connecting was not successful. */ protected List<MemcachedNode> createConnections( final Collection<InetSocketAddress> addrs) throws IOException { List<MemcachedNode> connections = new ArrayList<MemcachedNode>(addrs.size()); for (SocketAddress sa : addrs) { SocketChannel ch = SocketChannel.open(); ch.configureBlocking(false); MemcachedNode qa = connectionFactory.createMemcachedNode(sa, ch, bufSize); qa.setConnection(this); int ops = 0; ch.socket().setTcpNoDelay(!connectionFactory.useNagleAlgorithm()); try { if (ch.connect(sa)) { getLogger().info("Connected to %s immediately", qa); connected(qa); } else { getLogger().info("Added %s to connect queue", qa); ops = SelectionKey.OP_CONNECT; } selector.wakeup(); qa.setSk(ch.register(selector, ops, qa)); assert ch.isConnected() || qa.getSk().interestOps() == SelectionKey.OP_CONNECT : "Not connected, and not wanting to connect"; } catch (SocketException e) { getLogger().warn("Socket error on initial connect", e); queueReconnect(qa); } connections.add(qa); } return connections; } /** * Make sure that the current selectors make sense. * * @return true if they do. */ private boolean selectorsMakeSense() { for (MemcachedNode qa : locator.getAll()) { if (qa.getSk() != null && qa.getSk().isValid()) { if (qa.getChannel().isConnected()) { int sops = qa.getSk().interestOps(); int expected = 0; if (qa.hasReadOp()) { expected |= SelectionKey.OP_READ; } if (qa.hasWriteOp()) { expected |= SelectionKey.OP_WRITE; } if (qa.getBytesRemainingToWrite() > 0) { expected |= SelectionKey.OP_WRITE; } assert sops == expected : "Invalid ops: " + qa + ", expected " + expected + ", got " + sops; } else { int sops = qa.getSk().interestOps(); assert sops == SelectionKey.OP_CONNECT : "Not connected, and not watching for connect: " + sops; } } } getLogger().debug("Checked the selectors."); return true; } /** * Handle all IO that flows through the connection. * * This method is called in an endless loop, listens on NIO selectors and * dispatches the underlying read/write calls if needed. */ public void handleIO() throws IOException { if (shutDown) { getLogger().debug("No IO while shut down."); return; } handleInputQueue(); getLogger().debug("Done dealing with queue."); long delay = wakeupDelay; if (!reconnectQueue.isEmpty()) { long now = System.currentTimeMillis(); long then = reconnectQueue.firstKey(); delay = Math.max(then - now, 1); } getLogger().debug("Selecting with delay of %sms", delay); assert selectorsMakeSense() : "Selectors don't make sense."; int selected = selector.select(delay); if (shutDown) { return; } else if (selected == 0 && addedQueue.isEmpty()) { handleWokenUpSelector(); } else if (selector.selectedKeys().isEmpty()) { handleEmptySelects(); } else { getLogger().debug("Selected %d, selected %d keys", selected, selector.selectedKeys().size()); emptySelects = 0; Iterator<SelectionKey> iterator = selector.selectedKeys().iterator(); while(iterator.hasNext()) { SelectionKey sk = iterator.next(); handleIO(sk); iterator.remove(); } } handleOperationalTasks(); } /** * Helper method which gets called if the selector is woken up because of the * timeout setting, if has been interrupted or if happens during regular * write operation phases. * * <p>This method can be overriden by child implementations to handle custom * behavior on a manually woken selector, like sending pings through the * channels to make sure they are alive.</p> * * <p>Note that there is no guarantee that this method is at all or in the * regular interval called, so all overriding implementations need to take * that into account. Also, it needs to take into account that it may be * called very often under heavy workloads, so it should not perform extensive * tasks in the same thread.</p> */ protected void handleWokenUpSelector() { } /** * Helper method for {@link #handleIO()} to encapsulate everything that * needs to be checked on a regular basis that has nothing to do directly * with reading and writing data. * * @throws IOException if an error happens during shutdown queue handling. */ private void handleOperationalTasks() throws IOException { checkPotentiallyTimedOutConnection(); if (!shutDown && !reconnectQueue.isEmpty()) { attemptReconnects(); } if (!retryOps.isEmpty()) { ArrayList<Operation> operations = new ArrayList<Operation>(retryOps); retryOps.clear(); redistributeOperations(operations); } handleShutdownQueue(); } /** * Helper method for {@link #handleIO()} to handle empty select calls. */ private void handleEmptySelects() { getLogger().debug("No selectors ready, interrupted: " + Thread.interrupted()); if (++emptySelects > DOUBLE_CHECK_EMPTY) { for (SelectionKey sk : selector.keys()) { getLogger().debug("%s has %s, interested in %s", sk, sk.readyOps(), sk.interestOps()); if (sk.readyOps() != 0) { getLogger().debug("%s has a ready op, handling IO", sk); handleIO(sk); } else { lostConnection((MemcachedNode) sk.attachment()); } } assert emptySelects < EXCESSIVE_EMPTY : "Too many empty selects"; } } /** * Check if nodes need to be shut down and do so if needed. * * @throws IOException if the channel could not be closed properly. */ private void handleShutdownQueue() throws IOException { for (MemcachedNode qa : nodesToShutdown) { if (!addedQueue.contains(qa)) { nodesToShutdown.remove(qa); metrics.decrementCounter(SHUTD_QUEUE_METRIC); Collection<Operation> notCompletedOperations = qa.destroyInputQueue(); if (qa.getChannel() != null) { qa.getChannel().close(); qa.setSk(null); if (qa.getBytesRemainingToWrite() > 0) { getLogger().warn("Shut down with %d bytes remaining to write", qa.getBytesRemainingToWrite()); } getLogger().debug("Shut down channel %s", qa.getChannel()); } redistributeOperations(notCompletedOperations); } } } /** * Check if one or more nodes exceeded the timeout Threshold. */ private void checkPotentiallyTimedOutConnection() { boolean stillCheckingTimeouts = true; while (stillCheckingTimeouts) { try { for (SelectionKey sk : selector.keys()) { MemcachedNode mn = (MemcachedNode) sk.attachment(); if (mn.getContinuousTimeout() > timeoutExceptionThreshold) { getLogger().warn("%s exceeded continuous timeout threshold", sk); lostConnection(mn); } } stillCheckingTimeouts = false; } catch(ConcurrentModificationException e) { getLogger().warn("Retrying selector keys after " + "ConcurrentModificationException caught", e); continue; } } } /** * Handle any requests that have been made against the client. */ private void handleInputQueue() { if (!addedQueue.isEmpty()) { getLogger().debug("Handling queue"); Collection<MemcachedNode> toAdd = new HashSet<MemcachedNode>(); Collection<MemcachedNode> todo = new HashSet<MemcachedNode>(); MemcachedNode qaNode; while ((qaNode = addedQueue.poll()) != null) { todo.add(qaNode); } for (MemcachedNode node : todo) { boolean readyForIO = false; if (node.isActive()) { if (node.getCurrentWriteOp() != null) { readyForIO = true; getLogger().debug("Handling queued write %s", node); } } else { toAdd.add(node); } node.copyInputQueue(); if (readyForIO) { try { if (node.getWbuf().hasRemaining()) { handleWrites(node); } } catch (IOException e) { getLogger().warn("Exception handling write", e); lostConnection(node); } } node.fixupOps(); } addedQueue.addAll(toAdd); } } /** * Add a connection observer. * * @return whether the observer was successfully added. */ public boolean addObserver(final ConnectionObserver obs) { return connObservers.add(obs); } /** * Remove a connection observer. * * @return true if the observer existed and now doesn't. */ public boolean removeObserver(final ConnectionObserver obs) { return connObservers.remove(obs); } /** * Indicate a successful connect to the given node. * * @param node the node which was successfully connected. */ private void connected(final MemcachedNode node) { assert node.getChannel().isConnected() : "Not connected."; int rt = node.getReconnectCount(); node.connected(); for (ConnectionObserver observer : connObservers) { observer.connectionEstablished(node.getSocketAddress(), rt); } } /** * Indicate a lost connection to the given node. * * @param node the node where the connection was lost. */ private void lostConnection(final MemcachedNode node) { queueReconnect(node); for (ConnectionObserver observer : connObservers) { observer.connectionLost(node.getSocketAddress()); } } /** * Makes sure that the given node belongs to the current cluster. * * Before trying to connect to a node, make sure it actually belongs to the * currently connected cluster. */ boolean belongsToCluster(final MemcachedNode node) { for (MemcachedNode n : locator.getAll()) { if (n.getSocketAddress().equals(node.getSocketAddress())) { return true; } } return false; } /** * Handle IO for a specific selector. * * Any IOException will cause a reconnect. Note that this code makes sure * that the corresponding node is not only able to connect, but also able to * respond in a correct fashion (if verifyAliveOnConnect is set to true * through a property). This is handled by issuing a dummy * version/noop call and making sure it returns in a correct and timely * fashion. * * @param sk the selector to handle IO against. */ private void handleIO(final SelectionKey sk) { MemcachedNode node = (MemcachedNode) sk.attachment(); try { getLogger().debug("Handling IO for: %s (r=%s, w=%s, c=%s, op=%s)", sk, sk.isReadable(), sk.isWritable(), sk.isConnectable(), sk.attachment()); if (sk.isConnectable() && belongsToCluster(node)) { getLogger().debug("Connection state changed for %s", sk); final SocketChannel channel = node.getChannel(); if (channel.finishConnect()) { finishConnect(sk, node); } else { assert !channel.isConnected() : "connected"; } } else { handleReadsAndWrites(sk, node); } } catch (ClosedChannelException e) { if (!shutDown) { getLogger().info("Closed channel and not shutting down. Queueing" + " reconnect on %s", node, e); lostConnection(node); } } catch (ConnectException e) { getLogger().info("Reconnecting due to failure to connect to %s", node, e); queueReconnect(node); } catch (OperationException e) { node.setupForAuth(); getLogger().info("Reconnection due to exception handling a memcached " + "operation on %s. This may be due to an authentication failure.", node, e); lostConnection(node); } catch (Exception e) { node.setupForAuth(); getLogger().info("Reconnecting due to exception on %s", node, e); lostConnection(node); } node.fixupOps(); } /** * A helper method for {@link #handleIO(java.nio.channels.SelectionKey)} to * handle reads and writes if appropriate. * * @param sk the selection key to use. * @param node th enode to read write from. * @throws IOException if an error occurs during read/write. */ private void handleReadsAndWrites(final SelectionKey sk, final MemcachedNode node) throws IOException { if (sk.isValid() && sk.isReadable()) { handleReads(node); } if (sk.isValid() && sk.isWritable()) { handleWrites(node); } } /** * Finish the connect phase and potentially verify its liveness. * * @param sk the selection key for the node. * @param node the actual node. * @throws IOException if something goes wrong during reading/writing. */ private void finishConnect(final SelectionKey sk, final MemcachedNode node) throws IOException { if (verifyAliveOnConnect) { final CountDownLatch latch = new CountDownLatch(1); final OperationFuture<Boolean> rv = new OperationFuture<Boolean>("noop", latch, 2500, listenerExecutorService); NoopOperation testOp = opFact.noop(new OperationCallback() { public void receivedStatus(OperationStatus status) { rv.set(status.isSuccess(), status); } @Override public void complete() { latch.countDown(); } }); testOp.setHandlingNode(node); testOp.initialize(); checkState(); insertOperation(node, testOp); node.copyInputQueue(); boolean done = false; if (sk.isValid()) { long timeout = TimeUnit.MILLISECONDS.toNanos( connectionFactory.getOperationTimeout()); long stop = System.nanoTime() + timeout; while (stop > System.nanoTime()) { handleWrites(node); handleReads(node); if(done = (latch.getCount() == 0)) { break; } } } if (!done || testOp.isCancelled() || testOp.hasErrored() || testOp.isTimedOut()) { throw new ConnectException("Could not send noop upon connect! " + "This may indicate a running, but not responding memcached " + "instance."); } } connected(node); addedQueue.offer(node); if (node.getWbuf().hasRemaining()) { handleWrites(node); } } /** * Handle pending writes for the given node. * * @param node the node to handle writes for. * @throws IOException can be raised during writing failures. */ private void handleWrites(final MemcachedNode node) throws IOException { node.fillWriteBuffer(shouldOptimize); boolean canWriteMore = node.getBytesRemainingToWrite() > 0; while (canWriteMore) { int wrote = node.writeSome(); metrics.updateHistogram(OVERALL_AVG_BYTES_WRITE_METRIC, wrote); node.fillWriteBuffer(shouldOptimize); canWriteMore = wrote > 0 && node.getBytesRemainingToWrite() > 0; } } /** * Handle pending reads for the given node. * * @param node the node to handle reads for. * @throws IOException can be raised during reading failures. */ private void handleReads(final MemcachedNode node) throws IOException { Operation currentOp = node.getCurrentReadOp(); if (currentOp instanceof TapAckOperationImpl) { node.removeCurrentReadOp(); return; } ByteBuffer rbuf = node.getRbuf(); final SocketChannel channel = node.getChannel(); int read = channel.read(rbuf); metrics.updateHistogram(OVERALL_AVG_BYTES_READ_METRIC, read); if (read < 0) { currentOp = handleReadsWhenChannelEndOfStream(currentOp, node, rbuf); } while (read > 0) { getLogger().debug("Read %d bytes", read); rbuf.flip(); while (rbuf.remaining() > 0) { if (currentOp == null) { throw new IllegalStateException("No read operation."); } long timeOnWire = System.nanoTime() - currentOp.getWriteCompleteTimestamp(); metrics.updateHistogram(OVERALL_AVG_TIME_ON_WIRE_METRIC, (int)(timeOnWire / 1000)); metrics.markMeter(OVERALL_RESPONSE_METRIC); synchronized(currentOp) { readBufferAndLogMetrics(currentOp, rbuf, node); } currentOp = node.getCurrentReadOp(); } rbuf.clear(); read = channel.read(rbuf); node.completedRead(); } } /** * Read from the buffer and add metrics information. * * @param currentOp the current operation to read. * @param rbuf the read buffer to read from. * @param node the node to read from. * @throws IOException if reading was not successful. */ private void readBufferAndLogMetrics(final Operation currentOp, final ByteBuffer rbuf, final MemcachedNode node) throws IOException { currentOp.readFromBuffer(rbuf); if (currentOp.getState() == OperationState.COMPLETE) { getLogger().debug("Completed read op: %s and giving the next %d " + "bytes", currentOp, rbuf.remaining()); Operation op = node.removeCurrentReadOp(); assert op == currentOp : "Expected to pop " + currentOp + " got " + op; if (op.hasErrored()) { metrics.markMeter(OVERALL_RESPONSE_FAIL_METRIC); } else { metrics.markMeter(OVERALL_RESPONSE_SUCC_METRIC); } } else if (currentOp.getState() == OperationState.RETRY) { handleRetryInformation(currentOp.getErrorMsg()); getLogger().debug("Reschedule read op due to NOT_MY_VBUCKET error: " + "%s ", currentOp); ((VBucketAware) currentOp).addNotMyVbucketNode( currentOp.getHandlingNode()); Operation op = node.removeCurrentReadOp(); assert op == currentOp : "Expected to pop " + currentOp + " got " + op; retryOperation(currentOp); metrics.markMeter(OVERALL_RESPONSE_RETRY_METRIC); } } /** * Deal with an operation where the channel reached the end of a stream. * * @param currentOp the current operation to read. * @param node the node for that operation. * @param rbuf the read buffer. * * @return the next operation on the node to read. * @throws IOException if disconnect while reading. */ private Operation handleReadsWhenChannelEndOfStream(final Operation currentOp, final MemcachedNode node, final ByteBuffer rbuf) throws IOException { if (currentOp instanceof TapOperation) { currentOp.getCallback().complete(); ((TapOperation) currentOp).streamClosed(OperationState.COMPLETE); getLogger().debug("Completed read op: %s and giving the next %d bytes", currentOp, rbuf.remaining()); Operation op = node.removeCurrentReadOp(); assert op == currentOp : "Expected to pop " + currentOp + " got " + op; return node.getCurrentReadOp(); } else { throw new IOException("Disconnected unexpected, will reconnect."); } } /** * Convert the {@link ByteBuffer} into a string for easier debugging. * * @param b the buffer to debug. * @param size the size of the buffer. * @return the stringified {@link ByteBuffer}. */ static String dbgBuffer(ByteBuffer b, int size) { StringBuilder sb = new StringBuilder(); byte[] bytes = b.array(); for (int i = 0; i < size; i++) { char ch = (char) bytes[i]; if (Character.isWhitespace(ch) || Character.isLetterOrDigit(ch)) { sb.append(ch); } else { sb.append("\\x"); sb.append(Integer.toHexString(bytes[i] & 0xff)); } } return sb.toString(); } /** * Optionally handle retry (NOT_MY_VBUKET) responses. * * This method can be overridden in subclasses to handle the content * of the retry message appropriately. * * @param retryMessage the body of the retry message. */ protected void handleRetryInformation(final byte[] retryMessage) { getLogger().debug("Got RETRY message: " + new String(retryMessage) + ", but not handled."); } /** * Enqueue the given {@link MemcachedNode} for reconnect. * * @param node the node to reconnect. */ protected void queueReconnect(final MemcachedNode node) { if (shutDown) { return; } getLogger().warn("Closing, and reopening %s, attempt %d.", node, node.getReconnectCount()); if (node.getSk() != null) { node.getSk().cancel(); assert !node.getSk().isValid() : "Cancelled selection key is valid"; } node.reconnecting(); try { if (node.getChannel() != null && node.getChannel().socket() != null) { node.getChannel().socket().close(); } else { getLogger().info("The channel or socket was null for %s", node); } } catch (IOException e) { getLogger().warn("IOException trying to close a socket", e); } node.setChannel(null); long delay = (long) Math.min(maxDelay, Math.pow(2, node.getReconnectCount()) * 1000); long reconnectTime = System.currentTimeMillis() + delay; while (reconnectQueue.containsKey(reconnectTime)) { reconnectTime++; } reconnectQueue.put(reconnectTime, node); metrics.incrementCounter(RECON_QUEUE_METRIC); node.setupResend(); if (failureMode == FailureMode.Redistribute) { redistributeOperations(node.destroyInputQueue()); } else if (failureMode == FailureMode.Cancel) { cancelOperations(node.destroyInputQueue()); } } /** * Cancel the given collection of operations. * * @param ops the list of operations to cancel. */ private void cancelOperations(final Collection<Operation> ops) { for (Operation op : ops) { op.cancel(); } } /** * Redistribute the given list of operations to (potentially) other nodes. * * Note that operations can only be redistributed if they have not been * cancelled already, timed out already or do not have definite targets * (a key). * * @param ops the operations to redistribute. */ public void redistributeOperations(final Collection<Operation> ops) { for (Operation op : ops) { redistributeOperation(op); } } /** * Redistribute the given operation to (potentially) other nodes. * * Note that operations can only be redistributed if they have not been * cancelled already, timed out already or do not have definite targets * (a key). * * @param op the operation to redistribute. */ public void redistributeOperation(Operation op) { if (op.isCancelled() || op.isTimedOut()) { return; } if (op.getCloneCount() >= MAX_CLONE_COUNT) { getLogger().warn("Cancelling operation " + op + "because it has been " + "retried (cloned) more than " + MAX_CLONE_COUNT + "times."); op.cancel(); return; } // The operation gets redistributed but has never been actually written, // it we just straight re-add it without cloning. if (op.getState() == OperationState.WRITE_QUEUED && op.getHandlingNode() != null) { addOperation(op.getHandlingNode(), op); return; } if (op instanceof MultiGetOperationImpl) { for (String key : ((MultiGetOperationImpl) op).getRetryKeys()) { addOperation(key, opFact.get(key, (GetOperation.Callback) op.getCallback())); } } else if (op instanceof KeyedOperation) { KeyedOperation ko = (KeyedOperation) op; int added = 0; for (Operation newop : opFact.clone(ko)) { if (newop instanceof KeyedOperation) { KeyedOperation newKeyedOp = (KeyedOperation) newop; for (String k : newKeyedOp.getKeys()) { addOperation(k, newop); op.addClone(newop); newop.setCloneCount(op.getCloneCount()+1); } } else { newop.cancel(); getLogger().warn("Could not redistribute cloned non-keyed " + "operation", newop); } added++; } assert added > 0 : "Didn't add any new operations when redistributing"; } else { op.cancel(); } } /** * Attempt to reconnect {@link MemcachedNode}s in the reconnect queue. * * If the {@link MemcachedNode} does not belong to the cluster list anymore, * the reconnect attempt is cancelled. If it does, the code tries to * reconnect immediately and if this is not possible it waits until the * connection information arrives. * * Note that if a socket error arises during reconnect, the node is scheduled * for re-reconnect immediately. */ private void attemptReconnects() { final long now = System.currentTimeMillis(); final Map<MemcachedNode, Boolean> seen = new IdentityHashMap<MemcachedNode, Boolean>(); final List<MemcachedNode> rereQueue = new ArrayList<MemcachedNode>(); SocketChannel ch = null; Iterator<MemcachedNode> i = reconnectQueue.headMap(now).values().iterator(); while(i.hasNext()) { final MemcachedNode node = i.next(); i.remove(); metrics.decrementCounter(RECON_QUEUE_METRIC); try { if (!belongsToCluster(node)) { getLogger().debug("Node does not belong to cluster anymore, " + "skipping reconnect: %s", node); continue; } if (!seen.containsKey(node)) { seen.put(node, Boolean.TRUE); getLogger().info("Reconnecting %s", node); ch = SocketChannel.open(); ch.configureBlocking(false); ch.socket().setTcpNoDelay(!connectionFactory.useNagleAlgorithm()); int ops = 0; if (ch.connect(node.getSocketAddress())) { connected(node); addedQueue.offer(node); getLogger().info("Immediately reconnected to %s", node); assert ch.isConnected(); } else { ops = SelectionKey.OP_CONNECT; } node.registerChannel(ch, ch.register(selector, ops, node)); assert node.getChannel() == ch : "Channel was lost."; } else { getLogger().debug("Skipping duplicate reconnect request for %s", node); } } catch (SocketException e) { getLogger().warn("Error on reconnect", e); rereQueue.add(node); } catch (Exception e) { getLogger().error("Exception on reconnect, lost node %s", node, e); } finally { potentiallyCloseLeakingChannel(ch, node); } } for (MemcachedNode n : rereQueue) { queueReconnect(n); } } /** * Make sure channel connections are not leaked and properly close under * faulty reconnect cirumstances. * * @param ch the channel to potentially close. * @param node the node to which the channel should be bound to. */ private void potentiallyCloseLeakingChannel(final SocketChannel ch, final MemcachedNode node) { if (ch != null && !ch.isConnected() && !ch.isConnectionPending()) { try { ch.close(); } catch (IOException e) { getLogger().error("Exception closing channel: %s", node, e); } } } /** * Returns the {@link NodeLocator} in use for this connection. * * @return the current {@link NodeLocator}. */ public NodeLocator getLocator() { return locator; } /** * Enqueue the given {@link Operation} with the used key. * * @param key the key to use. * @param o the {@link Operation} to enqueue. */ public void enqueueOperation(final String key, final Operation o) { checkState(); StringUtils.validateKey(key, opFact instanceof BinaryOperationFactory); addOperation(key, o); } /** * Add an operation to a connection identified by the given key. * * If the {@link MemcachedNode} is active or the {@link FailureMode} is set * to retry, the primary node will be used for that key. If the primary * node is not available and the {@link FailureMode} cancel is used, the * operation will be cancelled without further retry. * * For any other {@link FailureMode} mechanisms (Redistribute), another * possible node is used (only if its active as well). If no other active * node could be identified, the original primary node is used and retried. * * @param key the key the operation is operating upon. * @param o the operation to add. */ protected void addOperation(final String key, final Operation o) { MemcachedNode placeIn = null; MemcachedNode primary = locator.getPrimary(key); if (primary.isActive() || failureMode == FailureMode.Retry) { placeIn = primary; } else if (failureMode == FailureMode.Cancel) { o.cancel(); } else { Iterator<MemcachedNode> i = locator.getSequence(key); while (placeIn == null && i.hasNext()) { MemcachedNode n = i.next(); if (n.isActive()) { placeIn = n; } } if (placeIn == null) { placeIn = primary; this.getLogger().warn("Could not redistribute to another node, " + "retrying primary node for %s.", key); } } assert o.isCancelled() || placeIn != null : "No node found for key " + key; if (placeIn != null) { addOperation(placeIn, o); } else { assert o.isCancelled() : "No node found for " + key + " (and not " + "immediately cancelled)"; } } /** * Insert an operation on the given node to the beginning of the queue. * * @param node the node where to insert the {@link Operation}. * @param o the operation to insert. */ public void insertOperation(final MemcachedNode node, final Operation o) { o.setHandlingNode(node); o.initialize(); node.insertOp(o); addedQueue.offer(node); metrics.markMeter(OVERALL_REQUEST_METRIC); Selector s = selector.wakeup(); assert s == selector : "Wakeup returned the wrong selector."; getLogger().debug("Added %s to %s", o, node); } /** * Enqueue an operation on the given node. * * @param node the node where to enqueue the {@link Operation}. * @param o the operation to add. */ protected void addOperation(final MemcachedNode node, final Operation o) { if (!node.isAuthenticated()) { retryOperation(o); return; } o.setHandlingNode(node); o.initialize(); node.addOp(o); addedQueue.offer(node); metrics.markMeter(OVERALL_REQUEST_METRIC); Selector s = selector.wakeup(); assert s == selector : "Wakeup returned the wrong selector."; getLogger().debug("Added %s to %s", o, node); } /** * Enqueue the given list of operations on each handling node. * * @param ops the operations for each node. */ public void addOperations(final Map<MemcachedNode, Operation> ops) { for (Map.Entry<MemcachedNode, Operation> me : ops.entrySet()) { addOperation(me.getKey(), me.getValue()); } } /** * Broadcast an operation to all nodes. * * @return a {@link CountDownLatch} that will be counted down when the * operations are complete. */ public CountDownLatch broadcastOperation(final BroadcastOpFactory of) { return broadcastOperation(of, locator.getAll()); } /** * Broadcast an operation to a collection of nodes. * * @return a {@link CountDownLatch} that will be counted down when the * operations are complete. */ public CountDownLatch broadcastOperation(final BroadcastOpFactory of, final Collection<MemcachedNode> nodes) { final CountDownLatch latch = new CountDownLatch(nodes.size()); for (MemcachedNode node : nodes) { getLogger().debug("broadcast Operation: node = " + node); Operation op = of.newOp(node, latch); op.initialize(); node.addOp(op); op.setHandlingNode(node); addedQueue.offer(node); metrics.markMeter(OVERALL_REQUEST_METRIC); } Selector s = selector.wakeup(); assert s == selector : "Wakeup returned the wrong selector."; return latch; } /** * Shut down all connections and do not accept further incoming ops. */ public void shutdown() throws IOException { shutDown = true; try { Selector s = selector.wakeup(); assert s == selector : "Wakeup returned the wrong selector."; for (MemcachedNode node : locator.getAll()) { if (node.getChannel() != null) { node.getChannel().close(); node.setSk(null); if (node.getBytesRemainingToWrite() > 0) { getLogger().warn("Shut down with %d bytes remaining to write", node.getBytesRemainingToWrite()); } getLogger().debug("Shut down channel %s", node.getChannel()); } } selector.close(); getLogger().debug("Shut down selector %s", selector); } finally { running = false; } } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{MemcachedConnection to"); for (MemcachedNode qa : locator.getAll()) { sb.append(" ").append(qa.getSocketAddress()); } sb.append("}"); return sb.toString(); } /** * Construct a String containing information about all nodes and their state. * * @return a stringified representation of the connection status. */ public String connectionsStatus() { StringBuilder connStatus = new StringBuilder(); connStatus.append("Connection Status {"); for (MemcachedNode node : locator.getAll()) { connStatus .append(" ") .append(node.getSocketAddress()) .append(" active: ") .append(node.isActive()) .append(", authed: ") .append(node.isAuthenticated()) .append(MessageFormat.format(", last read: {0} ms ago", node.lastReadDelta())); } connStatus.append(" }"); return connStatus.toString(); } /** * Increase the timeout counter for the given handling node. * * @param op the operation to grab the node from. */ public static void opTimedOut(final Operation op) { MemcachedConnection.setTimeout(op, true); } /** * Reset the timeout counter for the given handling node. * * @param op the operation to grab the node from. */ public static void opSucceeded(final Operation op) { MemcachedConnection.setTimeout(op, false); } /** * Set the continuous timeout on an operation. * * Ignore operations which have no handling nodes set yet (which may happen before nodes are properly * authenticated). * * @param op the operation to use. * @param isTimeout is timed out or not. */ private static void setTimeout(final Operation op, final boolean isTimeout) { Logger logger = LoggerFactory.getLogger(MemcachedConnection.class); try { if (op == null || op.isTimedOutUnsent()) { return; } MemcachedNode node = op.getHandlingNode(); if (node != null) { node.setContinuousTimeout(isTimeout); } } catch (Exception e) { logger.error(e.getMessage()); } } protected void checkState() { if (shutDown) { throw new IllegalStateException("Shutting down"); } assert isAlive() : "IO Thread is not running."; } /** * Handle IO as long as the application is running. */ @Override public void run() { while (running) { try { handleIO(); } catch (IOException e) { logRunException(e); } catch (CancelledKeyException e) { logRunException(e); } catch (ClosedSelectorException e) { logRunException(e); } catch (IllegalStateException e) { logRunException(e); } catch (ConcurrentModificationException e) { logRunException(e); } } getLogger().info("Shut down memcached client"); } /** * Log a exception to different levels depending on the state. * * Exceptions get logged at debug level when happening during shutdown, but * at warning level when operating normally. * * @param e the exception to log. */ private void logRunException(final Exception e) { if (shutDown) { getLogger().debug("Exception occurred during shutdown", e); } else { getLogger().warn("Problem handling memcached IO", e); } } /** * Returns whether the connection is shut down or not. * * @return true if the connection is shut down, false otherwise. */ public boolean isShutDown() { return shutDown; } /** * Add a operation to the retry queue. * * If the retry queue size is bounded and the size of the queue is reaching * that boundary, the operation is cancelled rather than added to the * retry queue. * * @param op the operation to retry. */ public void retryOperation(Operation op) { if (retryQueueSize >= 0 && retryOps.size() >= retryQueueSize) { if (!op.isCancelled()) { op.cancel(); } } retryOps.add(op); } }
package net.spy.memcached; import java.io.IOException; import java.net.ConnectException; import java.net.InetSocketAddress; import java.net.SocketAddress; import java.net.SocketException; import java.nio.ByteBuffer; import java.nio.channels.CancelledKeyException; import java.nio.channels.ClosedChannelException; import java.nio.channels.ClosedSelectorException; import java.nio.channels.SelectionKey; import java.nio.channels.Selector; import java.nio.channels.SocketChannel; import java.util.ArrayList; import java.util.Collection; import java.util.HashSet; import java.util.IdentityHashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.SortedMap; import java.util.TreeMap; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.CountDownLatch; import net.spy.memcached.compat.SpyThread; import net.spy.memcached.compat.log.LoggerFactory; import net.spy.memcached.ops.KeyedOperation; import net.spy.memcached.ops.Operation; import net.spy.memcached.ops.OperationException; import net.spy.memcached.ops.OperationState; import net.spy.memcached.ops.TapOperation; import net.spy.memcached.ops.VBucketAware; import net.spy.memcached.protocol.binary.TapAckOperationImpl; import net.spy.memcached.util.StringUtils; /** * Connection to a cluster of memcached servers. */ public class MemcachedConnection extends SpyThread { // The number of empty selects we'll allow before assuming we may have // missed one and should check the current selectors. This generally // indicates a bug, but we'll check it nonetheless. private static final int DOUBLE_CHECK_EMPTY = 256; // The number of empty selects we'll allow before blowing up. It's too // easy to write a bug that causes it to loop uncontrollably. This helps // find those bugs and often works around them. private static final int EXCESSIVE_EMPTY = 0x1000000; protected volatile boolean shutDown = false; // If true, optimization will collapse multiple sequential get ops private final boolean shouldOptimize; protected Selector selector = null; protected final NodeLocator locator; protected final FailureMode failureMode; // maximum amount of time to wait between reconnect attempts private final long maxDelay; private int emptySelects = 0; private final int bufSize; private final ConnectionFactory connectionFactory; // AddedQueue is used to track the QueueAttachments for which operations // have recently been queued. protected final ConcurrentLinkedQueue<MemcachedNode> addedQueue; // reconnectQueue contains the attachments that need to be reconnected // The key is the time at which they are eligible for reconnect private final SortedMap<Long, MemcachedNode> reconnectQueue; protected volatile boolean running = true; private final Collection<ConnectionObserver> connObservers = new ConcurrentLinkedQueue<ConnectionObserver>(); private final OperationFactory opFact; private final int timeoutExceptionThreshold; private final Collection<Operation> retryOps; protected final ConcurrentLinkedQueue<MemcachedNode> nodesToShutdown; /** * Construct a memcached connection. * * @param bufSize the size of the buffer used for reading from the server * @param f the factory that will provide an operation queue * @param a the addresses of the servers to connect to * * @throws IOException if a connection attempt fails early */ public MemcachedConnection(int bufSize, ConnectionFactory f, List<InetSocketAddress> a, Collection<ConnectionObserver> obs, FailureMode fm, OperationFactory opfactory) throws IOException { connObservers.addAll(obs); reconnectQueue = new TreeMap<Long, MemcachedNode>(); addedQueue = new ConcurrentLinkedQueue<MemcachedNode>(); failureMode = fm; shouldOptimize = f.shouldOptimize(); maxDelay = f.getMaxReconnectDelay(); opFact = opfactory; timeoutExceptionThreshold = f.getTimeoutExceptionThreshold(); selector = Selector.open(); retryOps = new ArrayList<Operation>(); nodesToShutdown = new ConcurrentLinkedQueue<MemcachedNode>(); this.bufSize = bufSize; this.connectionFactory = f; List<MemcachedNode> connections = createConnections(a); locator = f.createLocator(connections); setName("Memcached IO over " + this); setDaemon(f.isDaemon()); start(); } protected List<MemcachedNode> createConnections( final Collection<InetSocketAddress> a) throws IOException { List<MemcachedNode> connections = new ArrayList<MemcachedNode>(a.size()); for (SocketAddress sa : a) { SocketChannel ch = SocketChannel.open(); ch.configureBlocking(false); MemcachedNode qa = this.connectionFactory.createMemcachedNode(sa, ch, bufSize); int ops = 0; ch.socket().setTcpNoDelay(!this.connectionFactory.useNagleAlgorithm()); // Initially I had attempted to skirt this by queueing every // connect, but it considerably slowed down start time. try { if (ch.connect(sa)) { getLogger().info("Connected to %s immediately", qa); connected(qa); } else { getLogger().info("Added %s to connect queue", qa); ops = SelectionKey.OP_CONNECT; } qa.setSk(ch.register(selector, ops, qa)); assert ch.isConnected() || qa.getSk().interestOps() == SelectionKey.OP_CONNECT : "Not connected, and not wanting to connect"; } catch (SocketException e) { getLogger().warn("Socket error on initial connect", e); queueReconnect(qa); } connections.add(qa); } return connections; } private boolean selectorsMakeSense() { for (MemcachedNode qa : locator.getAll()) { if (qa.getSk() != null && qa.getSk().isValid()) { if (qa.getChannel().isConnected()) { int sops = qa.getSk().interestOps(); int expected = 0; if (qa.hasReadOp()) { expected |= SelectionKey.OP_READ; } if (qa.hasWriteOp()) { expected |= SelectionKey.OP_WRITE; } if (qa.getBytesRemainingToWrite() > 0) { expected |= SelectionKey.OP_WRITE; } assert sops == expected : "Invalid ops: " + qa + ", expected " + expected + ", got " + sops; } else { int sops = qa.getSk().interestOps(); assert sops == SelectionKey.OP_CONNECT : "Not connected, and not watching for connect: " + sops; } } } getLogger().debug("Checked the selectors."); return true; } /** * MemcachedClient calls this method to handle IO over the connections. */ public void handleIO() throws IOException { if (shutDown) { throw new IOException("No IO while shut down"); } // Deal with all of the stuff that's been added, but may not be marked // writable. handleInputQueue(); getLogger().debug("Done dealing with queue."); long delay = 0; if (!reconnectQueue.isEmpty()) { long now = System.currentTimeMillis(); long then = reconnectQueue.firstKey(); delay = Math.max(then - now, 1); } getLogger().debug("Selecting with delay of %sms", delay); assert selectorsMakeSense() : "Selectors don't make sense."; int selected = selector.select(delay); Set<SelectionKey> selectedKeys = selector.selectedKeys(); if (selectedKeys.isEmpty() && !shutDown) { getLogger().debug("No selectors ready, interrupted: " + Thread.interrupted()); if (++emptySelects > DOUBLE_CHECK_EMPTY) { for (SelectionKey sk : selector.keys()) { getLogger().info("%s has %s, interested in %s", sk, sk.readyOps(), sk.interestOps()); if (sk.readyOps() != 0) { getLogger().info("%s has a ready op, handling IO", sk); handleIO(sk); } else { lostConnection((MemcachedNode) sk.attachment()); } } assert emptySelects < EXCESSIVE_EMPTY : "Too many empty selects"; } } else { getLogger().debug("Selected %d, selected %d keys", selected, selectedKeys.size()); emptySelects = 0; for (SelectionKey sk : selectedKeys) { handleIO(sk); } selectedKeys.clear(); } // see if any connections blew up with large number of timeouts for (SelectionKey sk : selector.keys()) { MemcachedNode mn = (MemcachedNode) sk.attachment(); if (mn.getContinuousTimeout() > timeoutExceptionThreshold) { getLogger().warn("%s exceeded continuous timeout threshold", sk); lostConnection(mn); } } if (!shutDown && !reconnectQueue.isEmpty()) { attemptReconnects(); } // rehash operations that in retry state redistributeOperations(retryOps); retryOps.clear(); // try to shutdown odd nodes for (MemcachedNode qa : nodesToShutdown) { if (!addedQueue.contains(qa)) { nodesToShutdown.remove(qa); Collection<Operation> notCompletedOperations = qa.destroyInputQueue(); if (qa.getChannel() != null) { qa.getChannel().close(); qa.setSk(null); if (qa.getBytesRemainingToWrite() > 0) { getLogger().warn("Shut down with %d bytes remaining to write", qa.getBytesRemainingToWrite()); } getLogger().debug("Shut down channel %s", qa.getChannel()); } redistributeOperations(notCompletedOperations); } } } // Handle any requests that have been made against the client. private void handleInputQueue() { if (!addedQueue.isEmpty()) { getLogger().debug("Handling queue"); // If there's stuff in the added queue. Try to process it. Collection<MemcachedNode> toAdd = new HashSet<MemcachedNode>(); // Transfer the queue into a hashset. There are very likely more // additions than there are nodes. Collection<MemcachedNode> todo = new HashSet<MemcachedNode>(); MemcachedNode qaNode = null; while ((qaNode = addedQueue.poll()) != null) { todo.add(qaNode); } // Now process the queue. for (MemcachedNode qa : todo) { boolean readyForIO = false; if (qa.isActive()) { if (qa.getCurrentWriteOp() != null) { readyForIO = true; getLogger().debug("Handling queued write %s", qa); } } else { toAdd.add(qa); } qa.copyInputQueue(); if (readyForIO) { try { if (qa.getWbuf().hasRemaining()) { handleWrites(qa.getSk(), qa); } } catch (IOException e) { getLogger().warn("Exception handling write", e); lostConnection(qa); } } qa.fixupOps(); } addedQueue.addAll(toAdd); } } /** * Add a connection observer. * * @return whether the observer was successfully added */ public boolean addObserver(ConnectionObserver obs) { return connObservers.add(obs); } /** * Remove a connection observer. * * @return true if the observer existed and now doesn't */ public boolean removeObserver(ConnectionObserver obs) { return connObservers.remove(obs); } private void connected(MemcachedNode qa) { assert qa.getChannel().isConnected() : "Not connected."; int rt = qa.getReconnectCount(); qa.connected(); for (ConnectionObserver observer : connObservers) { observer.connectionEstablished(qa.getSocketAddress(), rt); } } private void lostConnection(MemcachedNode qa) { queueReconnect(qa); for (ConnectionObserver observer : connObservers) { observer.connectionLost(qa.getSocketAddress()); } } // Handle IO for a specific selector. Any IOException will cause a // reconnect private void handleIO(SelectionKey sk) { MemcachedNode qa = (MemcachedNode) sk.attachment(); try { getLogger().debug("Handling IO for: %s (r=%s, w=%s, c=%s, op=%s)", sk, sk.isReadable(), sk.isWritable(), sk.isConnectable(), sk.attachment()); if (sk.isConnectable()) { getLogger().info("Connection state changed for %s", sk); final SocketChannel channel = qa.getChannel(); if (channel.finishConnect()) { connected(qa); addedQueue.offer(qa); if (qa.getWbuf().hasRemaining()) { handleWrites(sk, qa); } } else { assert !channel.isConnected() : "connected"; } } else { if (sk.isValid() && sk.isReadable()) { handleReads(sk, qa); } if (sk.isValid() && sk.isWritable()) { handleWrites(sk, qa); } } } catch (ClosedChannelException e) { // Note, not all channel closes end up here if (!shutDown) { getLogger().info("Closed channel and not shutting down. Queueing" + " reconnect on %s", qa, e); lostConnection(qa); } } catch (ConnectException e) { // Failures to establish a connection should attempt a reconnect // without signaling the observers. getLogger().info("Reconnecting due to failure to connect to %s", qa, e); queueReconnect(qa); } catch (OperationException e) { qa.setupForAuth(); // noop if !shouldAuth getLogger().info("Reconnection due to exception handling a memcached " + "operation on %s. This may be due to an authentication failure.", qa, e); lostConnection(qa); } catch (Exception e) { // Any particular error processing an item should simply // cause us to reconnect to the server. // One cause is just network oddness or servers // restarting, which lead here with IOException qa.setupForAuth(); // noop if !shouldAuth getLogger().info("Reconnecting due to exception on %s", qa, e); lostConnection(qa); } qa.fixupOps(); } private void handleWrites(SelectionKey sk, MemcachedNode qa) throws IOException { qa.fillWriteBuffer(shouldOptimize); boolean canWriteMore = qa.getBytesRemainingToWrite() > 0; while (canWriteMore) { int wrote = qa.writeSome(); qa.fillWriteBuffer(shouldOptimize); canWriteMore = wrote > 0 && qa.getBytesRemainingToWrite() > 0; } } private void handleReads(SelectionKey sk, MemcachedNode qa) throws IOException { Operation currentOp = qa.getCurrentReadOp(); // If it's a tap ack there is no response if (currentOp instanceof TapAckOperationImpl) { qa.removeCurrentReadOp(); return; } ByteBuffer rbuf = qa.getRbuf(); final SocketChannel channel = qa.getChannel(); int read = channel.read(rbuf); if (read < 0) { if (currentOp instanceof TapOperation) { // If were doing tap then we won't throw an exception currentOp.getCallback().complete(); ((TapOperation) currentOp).streamClosed(OperationState.COMPLETE); getLogger().debug("Completed read op: %s and giving the next %d bytes", currentOp, rbuf.remaining()); Operation op = qa.removeCurrentReadOp(); assert op == currentOp : "Expected to pop " + currentOp + " got " + op; currentOp = qa.getCurrentReadOp(); } else { // our model is to keep the connection alive for future ops // so we'll queue a reconnect if disconnected via an IOException throw new IOException("Disconnected unexpected, will reconnect."); } } while (read > 0) { getLogger().debug("Read %d bytes", read); rbuf.flip(); while (rbuf.remaining() > 0) { if (currentOp == null) { throw new IllegalStateException("No read operation."); } synchronized(currentOp) { currentOp.readFromBuffer(rbuf); if (currentOp.getState() == OperationState.COMPLETE) { getLogger().debug("Completed read op: %s and giving the next %d " + "bytes", currentOp, rbuf.remaining()); Operation op = qa.removeCurrentReadOp(); assert op == currentOp : "Expected to pop " + currentOp + " got " + op; } else if (currentOp.getState() == OperationState.RETRY) { getLogger().debug("Reschedule read op due to NOT_MY_VBUCKET error: " + "%s ", currentOp); ((VBucketAware) currentOp).addNotMyVbucketNode( currentOp.getHandlingNode()); Operation op = qa.removeCurrentReadOp(); assert op == currentOp : "Expected to pop " + currentOp + " got " + op; retryOps.add(currentOp); } } currentOp=qa.getCurrentReadOp(); } rbuf.clear(); read = channel.read(rbuf); } } // Make a debug string out of the given buffer's values static String dbgBuffer(ByteBuffer b, int size) { StringBuilder sb = new StringBuilder(); byte[] bytes = b.array(); for (int i = 0; i < size; i++) { char ch = (char) bytes[i]; if (Character.isWhitespace(ch) || Character.isLetterOrDigit(ch)) { sb.append(ch); } else { sb.append("\\x"); sb.append(Integer.toHexString(bytes[i] & 0xff)); } } return sb.toString(); } protected void queueReconnect(MemcachedNode qa) { if (!shutDown) { getLogger().warn("Closing, and reopening %s, attempt %d.", qa, qa.getReconnectCount()); if (qa.getSk() != null) { qa.getSk().cancel(); assert !qa.getSk().isValid() : "Cancelled selection key is valid"; } qa.reconnecting(); try { if (qa.getChannel() != null && qa.getChannel().socket() != null) { qa.getChannel().socket().close(); } else { getLogger().info("The channel or socket was null for %s", qa); } } catch (IOException e) { getLogger().warn("IOException trying to close a socket", e); } qa.setChannel(null); long delay = (long) Math.min(maxDelay, Math.pow(2, qa.getReconnectCount())) * 1000; long reconTime = System.currentTimeMillis() + delay; // Avoid potential condition where two connections are scheduled // for reconnect at the exact same time. This is expected to be // a rare situation. while (reconnectQueue.containsKey(reconTime)) { reconTime++; } reconnectQueue.put(reconTime, qa); // Need to do a little queue management. qa.setupResend(); if (failureMode == FailureMode.Redistribute) { redistributeOperations(qa.destroyInputQueue()); } else if (failureMode == FailureMode.Cancel) { cancelOperations(qa.destroyInputQueue()); } } } private void cancelOperations(Collection<Operation> ops) { for (Operation op : ops) { op.cancel(); } } private void redistributeOperations(Collection<Operation> ops) { for (Operation op : ops) { if (op.isCancelled() || op.isTimedOut()) { continue; } if (op instanceof KeyedOperation) { KeyedOperation ko = (KeyedOperation) op; int added = 0; for (String k : ko.getKeys()) { for (Operation newop : opFact.clone(ko)) { addOperation(k, newop); added++; } } assert added > 0 : "Didn't add any new operations when redistributing"; } else { // Cancel things that don't have definite targets. op.cancel(); } } } private void attemptReconnects() throws IOException { final long now = System.currentTimeMillis(); final Map<MemcachedNode, Boolean> seen = new IdentityHashMap<MemcachedNode, Boolean>(); final List<MemcachedNode> rereQueue = new ArrayList<MemcachedNode>(); SocketChannel ch = null; for (Iterator<MemcachedNode> i = reconnectQueue.headMap(now).values().iterator(); i.hasNext();) { final MemcachedNode qa = i.next(); i.remove(); try { if (!seen.containsKey(qa)) { seen.put(qa, Boolean.TRUE); getLogger().info("Reconnecting %s", qa); ch = SocketChannel.open(); ch.configureBlocking(false); int ops = 0; if (ch.connect(qa.getSocketAddress())) { getLogger().info("Immediately reconnected to %s", qa); assert ch.isConnected(); } else { ops = SelectionKey.OP_CONNECT; } qa.registerChannel(ch, ch.register(selector, ops, qa)); assert qa.getChannel() == ch : "Channel was lost."; } else { getLogger().debug("Skipping duplicate reconnect request for %s", qa); } } catch (SocketException e) { getLogger().warn("Error on reconnect", e); rereQueue.add(qa); } catch (Exception e) { getLogger().error("Exception on reconnect, lost node %s", qa, e); } finally { // it's possible that above code will leak file descriptors under // abnormal // conditions (when ch.open() fails and throws IOException. // always close non connected channel if (ch != null && !ch.isConnected() && !ch.isConnectionPending()) { try { ch.close(); } catch (IOException x) { getLogger().error("Exception closing channel: %s", qa, x); } } } } // Requeue any fast-failed connects. for (MemcachedNode n : rereQueue) { queueReconnect(n); } } /** * Get the node locator used by this connection. */ public NodeLocator getLocator() { return locator; } public void enqueueOperation(String key, Operation o) { StringUtils.validateKey(key); checkState(); addOperation(key, o); } /** * Add an operation to the given connection. * * @param key the key the operation is operating upon * @param o the operation */ protected void addOperation(final String key, final Operation o) { MemcachedNode placeIn = null; MemcachedNode primary = locator.getPrimary(key); if (primary.isActive() || failureMode == FailureMode.Retry) { placeIn = primary; } else if (failureMode == FailureMode.Cancel) { o.cancel(); } else { // Look for another node in sequence that is ready. for (Iterator<MemcachedNode> i = locator.getSequence(key); placeIn == null && i.hasNext();) { MemcachedNode n = i.next(); if (n.isActive()) { placeIn = n; } } // If we didn't find an active node, queue it in the primary node // and wait for it to come back online. if (placeIn == null) { placeIn = primary; this.getLogger().warn( "Could not redistribute " + "to another node, retrying primary node for %s.", key); } } assert o.isCancelled() || placeIn != null : "No node found for key " + key; if (placeIn != null) { addOperation(placeIn, o); } else { assert o.isCancelled() : "No node found for " + key + " (and not immediately cancelled)"; } } public void insertOperation(final MemcachedNode node, final Operation o) { o.setHandlingNode(node); o.initialize(); node.insertOp(o); addedQueue.offer(node); Selector s = selector.wakeup(); assert s == selector : "Wakeup returned the wrong selector."; getLogger().debug("Added %s to %s", o, node); } protected void addOperation(final MemcachedNode node, final Operation o) { o.setHandlingNode(node); o.initialize(); node.addOp(o); addedQueue.offer(node); Selector s = selector.wakeup(); assert s == selector : "Wakeup returned the wrong selector."; getLogger().debug("Added %s to %s", o, node); } public void addOperations(final Map<MemcachedNode, Operation> ops) { for (Map.Entry<MemcachedNode, Operation> me : ops.entrySet()) { final MemcachedNode node = me.getKey(); Operation o = me.getValue(); o.setHandlingNode(node); o.initialize(); node.addOp(o); addedQueue.offer(node); } Selector s = selector.wakeup(); assert s == selector : "Wakeup returned the wrong selector."; } /** * Broadcast an operation to all nodes. */ public CountDownLatch broadcastOperation(BroadcastOpFactory of) { return broadcastOperation(of, locator.getAll()); } /** * Broadcast an operation to a specific collection of nodes. */ public CountDownLatch broadcastOperation(final BroadcastOpFactory of, Collection<MemcachedNode> nodes) { final CountDownLatch latch = new CountDownLatch(locator.getAll().size()); for (MemcachedNode node : nodes) { Operation op = of.newOp(node, latch); op.initialize(); node.addOp(op); op.setHandlingNode(node); addedQueue.offer(node); } Selector s = selector.wakeup(); assert s == selector : "Wakeup returned the wrong selector."; return latch; } /** * Shut down all of the connections. */ public void shutdown() throws IOException { shutDown = true; Selector s = selector.wakeup(); assert s == selector : "Wakeup returned the wrong selector."; for (MemcachedNode qa : locator.getAll()) { if (qa.getChannel() != null) { qa.getChannel().close(); qa.setSk(null); if (qa.getBytesRemainingToWrite() > 0) { getLogger().warn("Shut down with %d bytes remaining to write", qa.getBytesRemainingToWrite()); } getLogger().debug("Shut down channel %s", qa.getChannel()); } } running = false; selector.close(); getLogger().debug("Shut down selector %s", selector); } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{MemcachedConnection to"); for (MemcachedNode qa : locator.getAll()) { sb.append(" "); sb.append(qa.getSocketAddress()); } sb.append("}"); return sb.toString(); } /** * helper method: increase timeout count on node attached to this op. * * @param op */ public static void opTimedOut(Operation op) { MemcachedConnection.setTimeout(op, true); } /** * helper method: reset timeout counter. * * @param op */ public static void opSucceeded(Operation op) { MemcachedConnection.setTimeout(op, false); } /** * helper method: do some error checking and set timeout boolean. * * @param op * @param isTimeout */ private static void setTimeout(Operation op, boolean isTimeout) { try { if (op == null || op.isTimedOutUnsent()) { return; // op may be null in some cases, e.g. flush } MemcachedNode node = op.getHandlingNode(); if (node == null) { LoggerFactory.getLogger(MemcachedConnection.class).warn( "handling node for operation is not set"); } else { node.setContinuousTimeout(isTimeout); } } catch (Exception e) { LoggerFactory.getLogger(MemcachedConnection.class).error(e.getMessage()); } } protected void checkState() { if (shutDown) { throw new IllegalStateException("Shutting down"); } assert isAlive() : "IO Thread is not running."; } /** * Infinitely loop processing IO. */ @Override public void run() { while (running) { try { handleIO(); } catch (IOException e) { logRunException(e); } catch (CancelledKeyException e) { logRunException(e); } catch (ClosedSelectorException e) { logRunException(e); } catch (IllegalStateException e) { logRunException(e); } } getLogger().info("Shut down memcached client"); } private void logRunException(Exception e) { if (shutDown) { // There are a couple types of errors that occur during the // shutdown sequence that are considered OK. Log at debug. getLogger().debug("Exception occurred during shutdown", e); } else { getLogger().warn("Problem handling memcached IO", e); } } }
package org.almibe.multipage; import javafx.beans.property.ObjectProperty; import javafx.beans.property.ReadOnlyListProperty; import javafx.beans.property.SimpleObjectProperty; import javafx.collections.FXCollections; import javafx.collections.ObservableList; import javafx.scene.control.Control; import javafx.scene.control.Skin; import org.almibe.multipage.skins.MultiPageDisplaySkin; public class MultiPageDisplay extends Control { private final ObjectProperty<Page> selectedPage = new SimpleObjectProperty<>(); private final DefaultPageFactory defaultPageFactory; private final MultiPageDisplaySkin multiPageDisplaySkin; public MultiPageDisplay(DefaultPageFactory defaultPageFactory) { multiPageDisplaySkin = new MultiPageDisplaySkin(this); this.defaultPageFactory = defaultPageFactory; } @Override protected Skin<?> createDefaultSkin() { return multiPageDisplaySkin; } public ReadOnlyListProperty<Page> getPages() { return multiPageDisplaySkin.getPages(); } public void addPage(Page page) { multiPageDisplaySkin.addPage(page); } public void removePage(Page page) { multiPageDisplaySkin.removePage(page); } public Page getSelectedPage() { return selectedPage.get(); } public ObjectProperty<Page> selectedPageProperty() { return selectedPage; } public DefaultPageFactory getDefaultPageFactory() { return defaultPageFactory; } public void setSelectedPage(Page selectedPage) { this.selectedPage.set(selectedPage); } }
package org.bitvector.microservice2; import akka.actor.UntypedActor; import akka.event.Logging; import akka.event.LoggingAdapter; import io.undertow.Handlers; import io.undertow.Undertow; import io.undertow.server.HttpHandler; import io.undertow.server.HttpServerExchange; import io.undertow.server.RoutingHandler; import io.undertow.util.Headers; import io.undertow.util.Methods; import java.io.Serializable; public class HttpActor extends UntypedActor { private LoggingAdapter log = Logging.getLogger(getContext().system(), this); private Undertow server = null; private void start() { RoutingHandler rootHandler = Handlers.routing() .add(Methods.GET, "/foo", new HttpHandler() { @Override public void handleRequest(HttpServerExchange exchange) throws Exception { exchange.getResponseHeaders().put(Headers.CONTENT_TYPE, "text/plain"); exchange.getResponseSender().send("foo"); } }) .add(Methods.GET, "/foo/{id}", new HttpHandler() { @Override public void handleRequest(HttpServerExchange exchange) throws Exception { exchange.getResponseHeaders().put(Headers.CONTENT_TYPE, "text/plain"); exchange.getResponseSender().send("foo" + exchange.getQueryParameters().get("id")); } }); server = Undertow.builder() .addHttpListener(8080, "0.0.0.0", rootHandler) .build(); server.start(); log.info("HttpActor Started"); } private void stop() { server.stop(); log.info("HttpActor Stopped"); } public void onReceive(Object message) throws Exception { if (message instanceof Start) { this.start(); } else if (message instanceof Stop) { this.stop(); } else { unhandled(message); } } public static class Start implements Serializable { } public static class Stop implements Serializable { } }
package org.bitvector.microservice2; import akka.actor.AbstractActor; import akka.event.Logging; import akka.event.LoggingAdapter; import akka.japi.pf.ReceiveBuilder; import io.jsonwebtoken.Claims; import io.jsonwebtoken.Jwts; import io.jsonwebtoken.SignatureAlgorithm; import io.undertow.Handlers; import io.undertow.Undertow; import io.undertow.UndertowOptions; import io.undertow.server.HttpServerExchange; import io.undertow.server.RoutingHandler; import io.undertow.server.handlers.Cookie; import io.undertow.util.Cookies; import io.undertow.util.Headers; import io.undertow.util.Methods; import io.undertow.util.StatusCodes; import org.apache.shiro.SecurityUtils; import org.apache.shiro.authc.UsernamePasswordToken; import org.apache.shiro.config.IniSecurityManagerFactory; import org.apache.shiro.mgt.SecurityManager; import org.apache.shiro.session.Session; import org.apache.shiro.subject.Subject; import org.apache.shiro.util.Factory; import java.io.Serializable; import java.nio.charset.Charset; import java.util.Base64; import java.util.Date; import java.util.Objects; public class HttpActor extends AbstractActor { private LoggingAdapter log = Logging.getLogger(getContext().system(), this); private SettingsImpl settings = Settings.get(getContext().system()); private Undertow server; private Factory<SecurityManager> factory = new IniSecurityManagerFactory("classpath:shiro.ini"); private SecurityManager securityManager = factory.getInstance(); public HttpActor() { receive(ReceiveBuilder .match(Start.class, this::doStart) .match(Stop.class, this::doStop) .matchAny(obj -> log.error("HttpActor received unknown message " + obj.toString())) .build() ); } private void doStart(Start msg) { log.info("HttpActor received start"); SecurityUtils.setSecurityManager(securityManager); ProductCtrl productCtrl = new ProductCtrl(getContext()); RoutingHandler rootHandler = Handlers.routing() .add(Methods.GET, "/logout", exchange -> exchange.dispatch(this::doLogout)) .add(Methods.GET, "/login", exchange -> exchange.dispatch(this::doLogin)) .addAll(productCtrl.getRoutingHandler()); server = Undertow.builder() .addHttpListener(settings.LISTEN_PORT(), settings.LISTEN_ADDRESS(), rootHandler) .setServerOption(UndertowOptions.ENABLE_HTTP2, true) .build(); try { server.start(); } catch (RuntimeException e) { log.error("Failed to create HTTP actor: " + e.getMessage()); getContext().stop(self()); } } private void doStop(Stop msg) { log.info("HttpActor received stop"); server.stop(); } private void doLogin(HttpServerExchange exchange) { try { // Collect the subject's username and password via HTTP basic authentication. String[] schemeAndValue = exchange.getRequestHeaders().getFirst(Headers.AUTHORIZATION).split(" "); if (!Objects.equals(schemeAndValue[0].toLowerCase().trim(), Headers.BASIC.toString().toLowerCase())) { throw new Exception("Bad authentication scheme"); } byte[] buffer = Base64.getDecoder().decode(schemeAndValue[1]); String[] usernameAndPassword = new String(buffer, Charset.forName("utf-8")).split(":"); // Verify the subject's username and password Subject currentUser = SecurityUtils.getSubject(); if (!currentUser.isAuthenticated()) { UsernamePasswordToken token = new UsernamePasswordToken(usernameAndPassword[0].trim(), usernameAndPassword[1].trim()); token.setRememberMe(true); currentUser.login(token); } // Create a server side session to remember the subject Session currentSession = currentUser.getSession(true); currentSession.setTimeout(3600 * 1000); // 1 hour in-activity timeout // Build a cookie with a JWT value both having 24 hr lifespan. Date jwtExpireAt = new Date(System.currentTimeMillis() + (24 * 3600 * 1000)); Date cookieExpireAt = new Date(System.currentTimeMillis() + (24 * 3600 * 1000)); String jwt = Jwts.builder() .setId(currentSession.getId().toString()) .setSubject(currentUser.getPrincipal().toString()) .setExpiration(jwtExpireAt) .setIssuer(this.getClass().getPackage().getName()) .signWith(SignatureAlgorithm.HS512, Base64.getDecoder().decode(settings.SECRET_KEY())) .compact(); Cookie accessTokenCookie = Cookies.parseSetCookieHeader("access_token" + "=" + jwt + ";") .setExpires(cookieExpireAt) .setHttpOnly(true); // Respond to subject with cookie exchange.getResponseCookies().put("0", accessTokenCookie); exchange.setStatusCode(StatusCodes.OK); exchange.getResponseSender().close(); } catch (Exception e) { // Anything goes wrong then reject the subject e.printStackTrace(); exchange.setStatusCode(StatusCodes.UNAUTHORIZED); exchange.getResponseHeaders().put(Headers.WWW_AUTHENTICATE, Headers.BASIC.toString() + " " + Headers.REALM + "=" + "Login"); exchange.getResponseSender().close(); } } private void doLogout(HttpServerExchange exchange) { try { // Get the cookie back from subject Cookie accessTokenCookie = exchange.getRequestCookies().get("access_token"); // Get the claims back from JWT Claims claims = Jwts.parser() .setSigningKey(Base64.getDecoder().decode(settings.SECRET_KEY())) .parseClaimsJws(accessTokenCookie.getValue()) .getBody(); // Load subject from server side session Serializable sessionId = claims.getId(); Subject currentUser = new Subject.Builder() .sessionId(sessionId) .buildSubject(); if (!Objects.equals(currentUser.getPrincipal(), claims.getSubject())) { throw new Exception("No matching subject"); } // Logout subject and destroy server side session currentUser.logout(); } catch (Exception e) { // Anything goes wrong then reject the subject e.printStackTrace(); exchange.setStatusCode(StatusCodes.FORBIDDEN); exchange.getResponseSender().close(); } } public static class Start implements Serializable { } public static class Stop implements Serializable { } }
package org.fcrepo.binary; import javax.jcr.Node; import java.util.ArrayList; import java.util.List; import org.slf4j.Logger; import static org.slf4j.LoggerFactory.getLogger; /** * Service that evaluates a set of storage policies for an object and provides storage hints * for a binary stream * @author cbeer * @date Apr 25, 2013 */ public class PolicyDecisionPoint { private static final Logger logger = getLogger(MimeTypePolicy.class); private List<Policy> policies; /** * @todo Add Documentation. */ public PolicyDecisionPoint() { logger.debug("Initializing binary PolicyDecisionPoint"); policies = new ArrayList<Policy>(); } /** * Add a new storage policy * @param p */ public void addPolicy(final Policy p) { policies.add(p); } /** * Given a JCR node (likely a jcr:content node), determine which storage * policy should apply * @param n * @return */ public String evaluatePolicies(final Node n) { for (Policy p : policies) { String h = p.evaluatePolicy(n); if (h != null) { return h; } } return null; } /** * @todo Add Documentation. */ public void setPolicies(final List<Policy> policies) { logger.debug("Adding policies to binary PolicyDecisionPoint: {}", policies.toString()); this.policies = policies; } }
package org.gbif.api.util.iterables; import org.gbif.api.model.common.paging.PagingConstants; import org.gbif.api.model.registry.Dataset; import org.gbif.api.model.registry.Node; import org.gbif.api.model.registry.Organization; import org.gbif.api.service.registry.DatasetService; import org.gbif.api.service.registry.InstallationService; import org.gbif.api.service.registry.NetworkService; import org.gbif.api.service.registry.NodeService; import org.gbif.api.service.registry.OrganizationService; import org.gbif.api.vocabulary.Country; import org.gbif.api.vocabulary.DatasetType; import java.util.UUID; import javax.annotation.Nullable; import com.google.common.collect.ImmutableList; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Factory constructing registry entity iterables using specific pagers under the hood. */ public class Iterables { private static final Logger LOG = LoggerFactory.getLogger(Iterables.class); public static Iterable<Dataset> datasets(@Nullable UUID key, @Nullable DatasetType type, DatasetService ds, OrganizationService os, InstallationService is, NetworkService ns, NodeService nos) { return datasets(key, type, ds, os, is, ns, nos, PagingConstants.DEFAULT_PARAM_LIMIT); } public static Iterable<Dataset> datasets(@Nullable UUID key, @Nullable DatasetType type, DatasetService ds, OrganizationService os, InstallationService is, NetworkService ns, NodeService nos, int pageSize) { if (key == null) { LOG.info("Iterate over all {} datasets", type == null ? "" : type); return new DatasetPager(ds, type, pageSize); } else if (isDataset(key, ds)) { LOG.info("Iterate over dataset {}", key); return ImmutableList.of(ds.get(key)); } else if (isOrganization(key, os)) { LOG.info("Iterate over all {} datasets published by {}", type == null ? "" : type, key); return new OrgPublishingPager(os, key, type, pageSize); } else if (isInstallation(key, is)) { LOG.info("Iterate over all {} datasets hosted by installation {}", type == null ? "" : type, key); return new InstallationPager(is, key, type, pageSize); } else if (isNode(key, nos)) { LOG.info("Iterate over all {} datasets endorsed by node {}", type == null ? "" : type, key); return new NetworkPager(ns, key, type, pageSize); } else if (isNetwork(key, ns)) { LOG.info("Iterate over all {} datasets belonging to network {}", type == null ? "" : type, key); return new NodeDatasetPager(nos, key, type, pageSize); } throw new IllegalArgumentException("Given key is no valid GBIF registry key: " + key); } /** * @param type an optional filter to just include the given dataset type */ public static Iterable<Dataset> datasets(@Nullable DatasetType type, DatasetService service) { LOG.info("Iterate over all {} datasets", type == null ? "" : type); return new DatasetPager(service, type, PagingConstants.DEFAULT_PARAM_LIMIT); } /** * @param key a valid organization key * @param type an optional filter to just include the given dataset type */ public static Iterable<Dataset> publishedDatasets(UUID key, @Nullable DatasetType type, OrganizationService service) { LOG.info("Iterate over all {} datasets published by {}", type == null ? "" : type, key); return new OrgPublishingPager(service, key, type, PagingConstants.DEFAULT_PARAM_LIMIT); } /** * @param key a valid organization key * @param type an optional filter to just include the given dataset type */ public static Iterable<Dataset> hostedDatasets(UUID key, @Nullable DatasetType type, OrganizationService service) { LOG.info("Iterate over all {} datasets hosted organization by {}", type == null ? "" : type, key); return new OrgHostingPager(service, key, type, PagingConstants.DEFAULT_PARAM_LIMIT); } /** * @param key a valid installation key * @param type an optional filter to just include the given dataset type */ public static Iterable<Dataset> hostedDatasets(UUID key, @Nullable DatasetType type, InstallationService service) { LOG.info("Iterate over all {} datasets hosted organization by {}", type == null ? "" : type, key); return new InstallationPager(service, key, type, PagingConstants.DEFAULT_PARAM_LIMIT); } /** * @param key a valid installation key * @param type an optional filter to just include the given dataset type */ public static Iterable<Dataset> networkDatasets(UUID key, @Nullable DatasetType type, NetworkService service) { LOG.info("Iterate over all {} datasets belonging to network {}", type == null ? "" : type, key); return new NetworkPager(service, key, type, PagingConstants.DEFAULT_PARAM_LIMIT); } /** * @param nodeKey a valid endorsing node key * @param type an optional filter to just include the given dataset type */ public static Iterable<Dataset> endorsedDatasets(UUID nodeKey, @Nullable DatasetType type, NodeService service) { LOG.info("Iterate over all {} datasets endorsed by node {}", type == null ? "" : type, nodeKey); return new NodeDatasetPager(service, nodeKey, type, PagingConstants.DEFAULT_PARAM_LIMIT); } /** * @param country an optional country filter */ public static Iterable<Organization> organizations(@Nullable Country country, OrganizationService service) { LOG.info("Iterate over all organizations {}", country == null ? "" : "from country "+country); return new OrganizationPager(service, country, PagingConstants.DEFAULT_PARAM_LIMIT); } /** * @param nodeKey a valid endorsing node key */ public static Iterable<Organization> endorsedOrganizations(UUID nodeKey, NodeService service) { LOG.info("Iterate over all organizations endorsed by node {}", nodeKey); return new NodeOrganizationPager(service, nodeKey, PagingConstants.DEFAULT_PARAM_LIMIT); } /** * Iterate over all endorsing nodes */ public static Iterable<Node> nodes(NodeService service) { LOG.info("Iterate over all nodes"); return new NodePager(service, PagingConstants.DEFAULT_PARAM_LIMIT); } private static boolean isDataset(UUID key, DatasetService ds) { return ds.get(key) != null; } private static boolean isOrganization(UUID key, OrganizationService os) { return os.get(key) != null; } private static boolean isInstallation(UUID key, InstallationService is) { return is.get(key) != null; } private static boolean isNetwork(UUID key, NetworkService ns) { return ns.get(key) != null; } private static boolean isNode(UUID key, NodeService ns) { return ns.get(key) != null; } }
package org.jtrfp.trcl.beh; import org.jtrfp.trcl.core.TR; import org.jtrfp.trcl.math.Vect3D; import org.jtrfp.trcl.obj.WorldObject; public class RotateAroundObject extends Behavior { private WorldObject target; private double distance=TR.mapSquareSize; private double angularVelocityRPS = .25; final double [] delta = new double[3]; private double []offset = new double[]{0,0,0}; @Override public void _tick(long tickTimeMillis){ if(target!=null){ final WorldObject parent = getParent(); final double [] tPos = target.getPosition(); final double [] pPos = parent.getPosition(); //Theta = [0,2]pi final double theta = (((angularVelocityRPS*tickTimeMillis) / 1000.)%1.)*2*Math.PI; delta[0]=Math.sin(theta); delta[2]=Math.cos(theta); delta[1]=0; Vect3D.scalarMultiply(delta, distance, delta); Vect3D.add(delta, offset, delta); Vect3D.add(tPos, delta, pPos); parent.setPosition(pPos[0],pPos[1],pPos[2]); parent.notifyPositionChange();//TODO: Remove and see if it still works }//end if(!null) }//end _tick(...) /** * @return the target */ public WorldObject getTarget() { return target; } /** * @param target the target to set */ public RotateAroundObject setTarget(WorldObject target) { this.target = target; return this; } /** * @return the distance */ public double getDistance() { return distance; } /** * @param distance the distance to set */ public RotateAroundObject setDistance(double distance) { this.distance = distance; return this; } /** * @return the angularVelocityRPS */ public double getAngularVelocityRPS() { return angularVelocityRPS; } /** * @param angularVelocityRPS the angularVelocityRPS to set */ public RotateAroundObject setAngularVelocityRPS(double angularVelocityRPS) { this.angularVelocityRPS = angularVelocityRPS; return this; } public void setOffset(double[] ds) { offset = ds; } }//end RotateAroundObject
package org.lantern; import static org.jboss.netty.channel.Channels.pipeline; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.net.InetSocketAddress; import java.net.Socket; import java.net.URI; import java.net.URISyntaxException; import java.security.KeyManagementException; import java.security.KeyStore; import java.security.KeyStoreException; import java.security.NoSuchAlgorithmException; import java.security.Security; import java.security.UnrecoverableKeyException; import java.security.cert.CertificateException; import java.util.Arrays; import java.util.Collection; import java.util.HashSet; import java.util.Iterator; import java.util.Map; import java.util.Properties; import java.util.Queue; import java.util.Scanner; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentLinkedQueue; import javax.net.ServerSocketFactory; import javax.net.SocketFactory; import javax.net.ssl.KeyManagerFactory; import javax.net.ssl.SSLContext; import org.apache.commons.codec.binary.Base64; import org.apache.commons.lang.StringUtils; import org.jboss.netty.channel.ChannelPipeline; import org.jboss.netty.channel.ChannelPipelineFactory; import org.jboss.netty.channel.SimpleChannelUpstreamHandler; import org.jboss.netty.channel.group.ChannelGroup; import org.jboss.netty.handler.codec.http.HttpRequestDecoder; import org.jivesoftware.smack.Chat; import org.jivesoftware.smack.ChatManager; import org.jivesoftware.smack.MessageListener; import org.jivesoftware.smack.Roster; import org.jivesoftware.smack.RosterEntry; import org.jivesoftware.smack.RosterListener; import org.jivesoftware.smack.SmackConfiguration; import org.jivesoftware.smack.XMPPConnection; import org.jivesoftware.smack.XMPPException; import org.jivesoftware.smack.packet.Message; import org.jivesoftware.smack.packet.Presence; import org.lastbamboo.common.p2p.P2PConstants; import org.lastbamboo.jni.JLibTorrent; import org.littleshoot.commom.xmpp.XmppP2PClient; import org.littleshoot.p2p.P2P; import org.littleshoot.proxy.KeyStoreManager; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Factory for creating pipelines for incoming requests to our listening * socket. */ public class HttpServerPipelineFactory implements ChannelPipelineFactory, ProxyStatusListener { private final Logger log = LoggerFactory.getLogger(getClass()); private final String user; private final String pwd; final Map<String, HttpRequestHandler> hashCodesToHandlers = new ConcurrentHashMap<String, HttpRequestHandler>(); final ConcurrentHashMap<Chat, Collection<String>> chatsToHashCodes = new ConcurrentHashMap<Chat, Collection<String>>(); private final Set<InetSocketAddress> proxySet = new HashSet<InetSocketAddress>(); private final Queue<InetSocketAddress> proxies = new ConcurrentLinkedQueue<InetSocketAddress>(); private final Set<URI> peerProxySet = new HashSet<URI>(); private final Queue<URI> peerProxies = new ConcurrentLinkedQueue<URI>(); static { SmackConfiguration.setPacketReplyTimeout(30 * 1000); } private final XmppP2PClient client; private final MessageListener typedListener = new MessageListener() { public void processMessage(final Chat ch, final Message msg) { final String part = ch.getParticipant(); if (part.startsWith("lanternxmpp@appspot.com")) { log.info("Lantern controlling agent response"); final String body = msg.getBody(); final Scanner scan = new Scanner(body); scan.useDelimiter(","); while (scan.hasNext()) { final String ip = scan.next(); addProxy(ip, scan, ch); } } final Integer type = (Integer) msg.getProperty(P2PConstants.MESSAGE_TYPE); if (type != null) { log.info("Processing typed message"); processTypedMessage(msg, type, ch); } } }; private static final String ID = "-la-"; private final KeyStoreManager keyStoreManager; private final int sslProxyRandomPort; private Collection<String> trustedPeers = new HashSet<String>(); /** * Creates a new pipeline factory with the specified class for processing * proxy authentication. * * @param channelGroup The group that keeps track of open channels. * @param sslProxyRandomPort The port of the HTTP proxy that other peers * will relay to. * @param plainTextProxyRandomPort The port of the HTTP proxy running * only locally and accepting plain-text sockets. */ public HttpServerPipelineFactory(final ChannelGroup channelGroup, final KeyStoreManager keyStoreManager, final int sslProxyRandomPort, final int plainTextProxyRandomPort) { this.keyStoreManager = keyStoreManager; this.sslProxyRandomPort = sslProxyRandomPort; final Properties props = new Properties(); final File file = new File(LanternUtils.configDir(), "lantern.properties"); try { props.load(new FileInputStream(file)); this.user = props.getProperty("google.user"); this.pwd = props.getProperty("google.pwd"); if (StringUtils.isBlank(this.user)) { log.error("No user name"); throw new IllegalStateException("No user name in: " + file); } if (StringUtils.isBlank(this.pwd)) { log.error("No password."); throw new IllegalStateException("No password in: " + file); } } catch (final IOException e) { final String msg = "Error loading props file at: " + file; log.error(msg, e); throw new RuntimeException(msg, e); } try { final String libName = System.mapLibraryName("jnltorrent"); final JLibTorrent libTorrent = new JLibTorrent(Arrays.asList(new File (new File(".."), libName), new File (libName)), true); final SocketFactory socketFactory = newTlsSocketFactory(); final ServerSocketFactory serverSocketFactory = newTlsServerSocketFactory(); final InetSocketAddress plainTextProxyRelayAddress = new InetSocketAddress("127.0.0.1", plainTextProxyRandomPort); this.client = P2P.newXmppP2PHttpClient("shoot", libTorrent, libTorrent, new InetSocketAddress(this.sslProxyRandomPort), socketFactory, serverSocketFactory, plainTextProxyRelayAddress); // This is a global, backup listener added to the client. We might // get notifications of messages twice in some cases, but that's // better than the alternative of sometimes not being notified // at all. this.client.addMessageListener(typedListener); this.client.login(this.user, this.pwd, ID); configureRoster(); } catch (final IOException e) { final String msg = "Could not log in!!"; log.warn(msg, e); throw new Error(msg, e); } catch (final XMPPException e) { final String msg = "Could not configure roster!!"; log.warn(msg, e); throw new Error(msg, e); } } private ServerSocketFactory newTlsServerSocketFactory() { log.info("Creating TLS server socket factory"); String algorithm = Security.getProperty("ssl.KeyManagerFactory.algorithm"); if (algorithm == null) { algorithm = "SunX509"; } try { final KeyStore ks = KeyStore.getInstance("JKS"); ks.load(this.keyStoreManager.keyStoreAsInputStream(), this.keyStoreManager.getKeyStorePassword()); // Set up key manager factory to use our key store final KeyManagerFactory kmf = KeyManagerFactory.getInstance(algorithm); kmf.init(ks, this.keyStoreManager.getCertificatePassword()); // Initialize the SSLContext to work with our key managers. final SSLContext serverContext = SSLContext.getInstance("TLS"); serverContext.init(kmf.getKeyManagers(), null, null); return serverContext.getServerSocketFactory(); } catch (final KeyStoreException e) { throw new Error("Could not create SSL server socket factory.", e); } catch (final NoSuchAlgorithmException e) { throw new Error("Could not create SSL server socket factory.", e); } catch (final CertificateException e) { throw new Error("Could not create SSL server socket factory.", e); } catch (final IOException e) { throw new Error("Could not create SSL server socket factory.", e); } catch (final UnrecoverableKeyException e) { throw new Error("Could not create SSL server socket factory.", e); } catch (final KeyManagementException e) { throw new Error("Could not create SSL server socket factory.", e); } } private SocketFactory newTlsSocketFactory() { log.info("Creating TLS socket factory"); try { final SSLContext clientContext = SSLContext.getInstance("TLS"); clientContext.init(null, this.keyStoreManager.getTrustManagers(), null); return clientContext.getSocketFactory(); } catch (final NoSuchAlgorithmException e) { log.error("No TLS?", e); throw new Error("No TLS?", e); } catch (final KeyManagementException e) { log.error("Key managmement issue?", e); throw new Error("Key managmement issue?", e); } } public ChannelPipeline getPipeline() { log.info("Getting pipeline..."); // We randomly use peers and centralized proxies. if (true) { return appEngineProxy(); } synchronized (peerProxySet) { if (usePeerProxies()) { return peerProxy(); } } synchronized (proxySet) { return centralizedProxy(); } } private ChannelPipeline appEngineProxy() { log.info("Using GAE proxy connection..."); final InetSocketAddress proxy = new InetSocketAddress("freelantern.appspot.com", 443); final SimpleChannelUpstreamHandler handler = new GaeProxyRelayHandler(proxy, this); final ChannelPipeline pipeline = pipeline(); pipeline.addLast("decoder", new HttpRequestDecoder()); pipeline.addLast("handler", handler); return pipeline; } private ChannelPipeline peerProxy() { log.info("Using PEER proxy connection..."); final URI uri = peerProxies.poll(); peerProxies.add(uri); final SimpleChannelUpstreamHandler handler = new PeerProxyRelayHandler(uri, this, client); final ChannelPipeline pipeline = pipeline(); pipeline.addLast("handler", handler); return pipeline; } private ChannelPipeline centralizedProxy() { log.info("Using DIRECT proxy connection..."); // We just use it as a cyclic queue. if (proxies.isEmpty()) { log.info("No centralized proxies!!"); return pipeline(); } final InetSocketAddress proxy = proxies.poll(); log.info("Using proxy: {}", proxy); proxies.add(proxy); final SimpleChannelUpstreamHandler handler = new ProxyRelayHandler(proxy, this, this.keyStoreManager); final ChannelPipeline pipeline = pipeline(); pipeline.addLast("handler", handler); return pipeline; } private boolean usePeerProxies() { if (peerProxySet.isEmpty()) { log.info("No peer proxies, so not using peers"); return false; } if (proxySet.isEmpty()) { log.info("Using peer proxies since there are no centralized ones"); return true; } // TODO: We currently just roll the dice, but ideally we'd be smarter // about this based on the performance of the peer proxies. final double rand = Math.random(); if (rand > 0.25) { log.info("Using peer proxies - random was "+rand); return true; } log.info("Not using peer proxies -- random was "+rand); return false; } private void configureRoster() throws XMPPException { final XMPPConnection xmpp = this.client.getXmppConnection(); final Roster roster = xmpp.getRoster(); // Make sure we look for MG packets. //roster.createEntry("mglittleshoot@gmail.com", "MG", null); //roster.createEntry("bravenewsoftware@appspot.com", "MG", null); roster.createEntry("lanternxmpp@appspot.com", "Lantern", null); roster.addRosterListener(new RosterListener() { public void entriesDeleted(final Collection<String> addresses) { log.info("Entries deleted"); } public void entriesUpdated(final Collection<String> addresses) { log.info("Entries updated: {}", addresses); } public void presenceChanged(final Presence presence) { processPresence(presence, xmpp); } public void entriesAdded(final Collection<String> addresses) { log.info("Entries added: "+addresses); } }); // Now we add all the existing entries to get people who are already // online. final Collection<RosterEntry> entries = roster.getEntries(); for (final RosterEntry entry : entries) { //log.info("Got entry: {}", entry); final String jid = entry.getUser(); //log.info("Roster entry user: {}",jid); final Iterator<Presence> presences = roster.getPresences(entry.getUser()); while (presences.hasNext()) { final Presence p = presences.next(); processPresence(p, xmpp); } } log.info("Finished adding listeners"); } private void processPresence(final Presence p, final XMPPConnection xmpp) { final String from = p.getFrom(); //log.info("Got presence with from: {}", from); if (isLanternHub(from)) { log.info("Got lantern proxy!!"); final ChatManager chatManager = xmpp.getChatManager(); final Chat chat = chatManager.createChat(from, typedListener); // Send an "info" message to gather proxy data. final Message msg = new Message(); msg.setBody("/info"); try { log.info("Sending info message to Lantern Hub"); chat.sendMessage(msg); } catch (final XMPPException e) { log.error("Could not send INFO message", e); } } else if (isLanternJid(from)) { this.trustedPeers.add(from); addOrRemovePeer(p, from, xmpp); } } private void addOrRemovePeer(final Presence p, final String from, final XMPPConnection xmpp) { final URI uri; try { uri = new URI(from); } catch (final URISyntaxException e) { log.error("Could not create URI from: {}", from); return; } if (p.isAvailable()) { log.info("Adding from to peer JIDs: {}", from); final Message msg = new Message(); msg.setProperty(P2PConstants.MESSAGE_TYPE, XmppMessageConstants.INFO_REQUEST_TYPE); // Set our certificate in the request as well -- we wan't to make // extra sure these get through! msg.setProperty(P2PConstants.MAC, LanternUtils.getMacAddress()); msg.setProperty(P2PConstants.CERT, this.keyStoreManager.getBase64Cert()); final ChatManager cm = xmpp.getChatManager(); final Chat chat = cm.createChat(from, typedListener); try { log.info("Sending INFO request to: {}", from); chat.sendMessage(msg); } catch (final XMPPException e) { log.info("Could not send message to peer", e); } } else { log.info("Removing JID for peer '"+from+"' with presence: {}", p); removePeerUri(uri); } } private boolean isLanternHub(final String from) { //return from.startsWith("mglittleshoot"); return from.startsWith("lanternxmpp@appspot.com"); } private void sendErrorMessage(final Chat chat, final InetSocketAddress isa, final String message) { final Message msg = new Message(); msg.setProperty(P2PConstants.MESSAGE_TYPE, XmppMessageConstants.ERROR_TYPE); final String errorMessage = "Error: "+message+" with host: "+isa; msg.setProperty(XmppMessageConstants.MESSAGE, errorMessage); try { chat.sendMessage(msg); } catch (final XMPPException e) { log.error("Error sending message", e); } } private void processTypedMessage(final Message msg, final Integer type, final Chat chat) { final String from = chat.getParticipant(); log.info("Processing typed message from {}", from); if (!this.trustedPeers.contains(from)) { log.warn("Ignoring message from untrusted peer: {}", from); log.warn("Peer not in: {}", this.trustedPeers); return; } switch (type) { case (XmppMessageConstants.INFO_REQUEST_TYPE): log.info("Handling INFO request from {}", from); processInfoData(msg, chat); sendInfoResponse(chat); break; case (XmppMessageConstants.INFO_RESPONSE_TYPE): log.info("Handling INFO response from {}", from); processInfoData(msg, chat); break; default: log.warn("Did not understand type: "+type); break; } } private void processInfoData(final Message msg, final Chat chat) { log.info("Processing INFO data from request or response."); final String proxyString = (String) msg.getProperty(XmppMessageConstants.PROXIES); if (StringUtils.isNotBlank(proxyString)) { log.info("Got proxies: {}", proxyString); final Scanner scan = new Scanner(proxyString); while (scan.hasNext()) { final String cur = scan.next(); addProxy(cur, scan, chat); } } final String mac = (String) msg.getProperty(P2PConstants.MAC); final String base64Cert = (String) msg.getProperty(P2PConstants.CERT); log.info("Base 64 cert: {}", base64Cert); if (StringUtils.isNotBlank(base64Cert)) { log.info("Got certificate:\n"+ new String(Base64.decodeBase64(base64Cert))); // First we need to add this certificate to the trusted // certificates on the proxy. Then we can add it to our list of // peers. final URI uri; try { uri = new URI(chat.getParticipant()); } catch (final URISyntaxException e) { log.error("Could not create URI from: {}", chat.getParticipant()); return; } try { // Add the peer if we're able to add the cert. this.keyStoreManager.addBase64Cert(mac, base64Cert); synchronized (peerProxySet) { if (!peerProxySet.contains(uri)) { peerProxies.add(uri); peerProxySet.add(uri); } } } catch (final IOException e) { log.error("Could not add cert??", e); } } } private void addProxy(final String cur, final Scanner scan, final Chat chat) { log.info("Adding proxy: {}", cur); final String hostname = StringUtils.substringBefore(cur, ":"); final int port = Integer.parseInt(StringUtils.substringAfter(cur, ":")); final InetSocketAddress isa = new InetSocketAddress(hostname, port); if (proxySet.contains(isa)) { log.info("We already know about this proxy"); return; } final Socket sock = new Socket(); try { sock.connect(isa, 60*1000); synchronized (proxySet) { if (!proxySet.contains(isa)) { proxySet.add(isa); proxies.add(isa); } } } catch (final IOException e) { log.error("Could not connect to: {}", isa); sendErrorMessage(chat, isa, e.getMessage()); // If we don't have any more proxies to connect to, // revert to XMPP relay mode. if (!scan.hasNext()) { onCouldNotConnect(isa); } } finally { try { sock.close(); } catch (final IOException e) { log.info("Exception closing", e); } } } private void sendInfoResponse(final Chat ch) { final Message msg = new Message(); msg.setProperty(P2PConstants.MESSAGE_TYPE, XmppMessageConstants.INFO_RESPONSE_TYPE); // We want to separate out direct friend proxies here from the // proxies that are friends of friends. We only want to notify our // friends of other direct friend proxies, not friends of friends. msg.setProperty(XmppMessageConstants.PROXIES, ""); msg.setProperty(P2PConstants.MAC, LanternUtils.getMacAddress()); msg.setProperty(P2PConstants.CERT,this.keyStoreManager.getBase64Cert()); try { ch.sendMessage(msg); } catch (final XMPPException e) { log.error("Could not send info message", e); } } protected boolean isLanternJid(final String from) { // Here's the format we're looking for: "-la-" if (from.contains("/"+ID)) { log.info("Returning Lantern TRUE for from: {}", from); return true; } return false; } public void onCouldNotConnect(final InetSocketAddress proxyAddress) { log.info("COULD NOT CONNECT!! Proxy address: {}", proxyAddress); synchronized (this.proxySet) { this.proxySet.remove(proxyAddress); this.proxies.remove(proxyAddress); } } public void onCouldNotConnectToPeer(final URI peerUri) { removePeerUri(peerUri); } public void onError(final URI peerUri) { removePeerUri(peerUri); } private void removePeerUri(final URI peerUri) { log.info("Removing peer with URI: {}", peerUri); synchronized (this.peerProxySet) { this.peerProxySet.remove(peerUri); this.peerProxies.remove(peerUri); } } }
package org.lemsml.jlems.core.type; import org.lemsml.jlems.core.expression.ParseError; import org.lemsml.jlems.core.logging.E; import org.lemsml.jlems.core.sim.ContentError; public class ParamValue implements Named { public FinalParam r_finalParam; public double value; public ParamValue(FinalParam dp) { r_finalParam = dp; value = 0; } public ParamValue(FinalParam dp, double d) { r_finalParam = dp; value = d; } public String getName() { return r_finalParam.getName(); } public FinalParam getFinalParam() { return r_finalParam; } @Override public String toString() { return "ParamValue: "+r_finalParam +" = "+value; } public void parseValue(String s) { E.missing(); } public void setValue(String atval, LemsCollection<Unit> units) throws ContentError, ParseError { DimensionalQuantity dq = QuantityReader.parseValue(atval, units); Dimension dtgt = r_finalParam.getDimension(); if (dtgt == null) { //throw new ContentError("No dimension for param " + r_finalParam); // Assume dimension="none" value = dq.getDoubleValue(); } else if (dtgt.isAny()) { value = dq.getDoubleValue(); } else if (dq.dimensionsMatch(dtgt)) { value = dq.getDoubleValue(); } else { throw new ContentError("Can't set parameter: "+getName()+" with dimensions " + r_finalParam.getDimension() + " with string " + atval + ", " + dq); } } public void copyFrom(ParamValue pv) throws ContentError { if (r_finalParam.equals(pv.r_finalParam)) { value = pv.value; } else { throw new ContentError("can't copy: different dimParams?"); } } public String stringValue() { return "" + value; } public String getDimensionName() { return r_finalParam.getDimension().getName(); } public double getDoubleValue() { return value; } public double getDoubleValue(Dimension d) throws ContentError { double ret = 0; if (r_finalParam.getDimension().matches(d)) { ret = value; } else { throw new ContentError("Wrong dimension for " + this + ": need " + d); } return ret; } public void setDoubleValue(double v) { value = v; } }
package org.oucs.gaboto.entities; import java.lang.reflect.Method; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import org.apache.log4j.Logger; import org.oucs.gaboto.entities.pool.EntityExistsCallback; import org.oucs.gaboto.entities.pool.GabotoEntityPool; import org.oucs.gaboto.entities.pool.PassiveEntitiesRequest; import org.oucs.gaboto.entities.time.GabotoTimeBasedEntity; import org.oucs.gaboto.entities.utils.GabotoEntityUtils; import org.oucs.gaboto.entities.utils.SimpleLiteralProperty; import org.oucs.gaboto.entities.utils.SimpleURIProperty; import org.oucs.gaboto.exceptions.GabotoRuntimeException; import org.oucs.gaboto.exceptions.IllegalAnnotationException; import org.oucs.gaboto.model.Gaboto; import org.oucs.gaboto.model.GabotoSnapshot; import org.oucs.gaboto.reflection.RDFContainer; import org.oucs.gaboto.reflection.RDFContainerTriplesGeneratorImpl; import org.oucs.gaboto.timedim.TimeSpan; import org.oucs.gaboto.vocabulary.OxPointsVocab; import com.hp.hpl.jena.graph.Graph; import com.hp.hpl.jena.graph.Node; import com.hp.hpl.jena.graph.Triple; import com.hp.hpl.jena.rdf.model.Model; import com.hp.hpl.jena.rdf.model.Property; import com.hp.hpl.jena.rdf.model.Resource; abstract public class GabotoEntity implements RDFContainer { private static Logger logger = Logger.getLogger(GabotoEntity.class.getName()); /** * Stores the timespan in which this entity is valid. If it is null, then the entity is valid indefinitely. */ private TimeSpan timespan = TimeSpan.EXISTENCE; /** * stores the entity's URI. */ private String uri; /** * Stores the pool, this entity was created from */ private GabotoEntityPool createFromPool = null; /** * Tells whether or not passive entities were loaded */ private boolean passiveEntitiesLoaded = false; protected Map<String, Resource> missingEntityReferences = new HashMap<String, Resource>(); protected Map<String, Collection<EntityExistsCallback>> missingEntityReferenceCallbacks = new HashMap<String, Collection<EntityExistsCallback>>(); /** * Instantiate the GabotoEntity. * * <p> * OxPointsEntities should always use the default constructor and provide * methods for all their fields in order for Gaboto to be able to automatically * load and serialize them. * </p> */ public GabotoEntity(){ } final static public <T extends GabotoEntity> T createNew(Gaboto gaboto, T newEntity){ newEntity.setUri(gaboto.generateID()); return newEntity; } final public void setCreatedFromPool(GabotoEntityPool pool){ this.createFromPool = pool; } final protected void addMissingReference(Resource res, EntityExistsCallback callback){ missingEntityReferences.put(res.getURI(), res); if(! missingEntityReferenceCallbacks.containsKey(res.getURI())){ missingEntityReferenceCallbacks.put(res.getURI(), new HashSet<EntityExistsCallback>()); } missingEntityReferenceCallbacks.get(res.getURI()).add(callback); } final protected void removeMissingReference(String uriToRemove){ missingEntityReferences.remove(uriToRemove); //missingEntityReferenceCallbacks.remove(uri); } /** * Returns the entity's URI. * * @see #setUri(String) * @return the entity's URI. */ final public String getUri() { return uri; } /** * Sets the entity's URI. * * @see #getUri() * @param uri this entity's new URI. */ final public void setUri(String uri) { this.uri = uri; } /** * Stores the lifespan of this GabotoEntity. * * <p> * The lifespan does not necessary correspond to when the entity came into * being and was destroyed. It is the span in which the entity is valid in the * form at it is represented by this object. * </p> * * @see #getTimeSpan() * * @param ts The lifespan */ final public void setTimeSpan(TimeSpan ts){ if(ts == null) this.timespan = TimeSpan.EXISTENCE; else this.timespan = ts.canonicalize(); } /** * Returns the entity's lifespan. * * @see #setTimeSpan(TimeSpan) * @return Returns the entity's lifespan. */ final public TimeSpan getTimeSpan(){ return this.timespan; } /** * Every entity has a defined type. * * @see OxPointsVocab * * @return The entity's type (ontology class). */ abstract public String getType(); /** * @param propertyURI unused, overridden */ protected List<Method> getIndirectMethodsForProperty(String propertyURI){ return null; } /** * Tells whether direct references have been resolved for this entity. * * @return true if direct references have been resolved. */ public boolean isDirectReferencesResolved() { return missingEntityReferences.isEmpty(); } /** * Tries to resolve the direct references from the entity's pool (the pool it was last added to). */ public void resolveDirectReferences() { resolveDirectReferences(createFromPool); } /** * Tries to resolve the entity's direct references from the passed pool. * @param pool The pool to load the direct references from. */ public void resolveDirectReferences(GabotoEntityPool pool) { if(isDirectReferencesResolved()) return; if(pool == null) throw new IllegalStateException("The GabotoEntity was not provided with a pool object to resolve its references from."); // add missing references pool.addMissingReferencesForEntity(missingEntityReferences.values(), missingEntityReferenceCallbacks); } /** * Tells the entity that all passive properties were loaded. */ public void setPassiveEntitiesLoaded(){ this.passiveEntitiesLoaded = true; } /** * * @return true if passive entities were loaded. */ public boolean isPassiveEntitiesLoaded(){ return passiveEntitiesLoaded; } /** * Tries to load passive entities from the entity's pool. */ public void loadPassiveEntities() { loadPassiveEntities(createFromPool); } /** * Tries to load passive entities from the passed pool. * @param pool The pool to load the passive entities from. */ public void loadPassiveEntities(GabotoEntityPool pool) { if(passiveEntitiesLoaded) return; if(pool == null) throw new IllegalStateException("The GabotoEntity was not provided with a pool object to load the passive properties from."); pool.addPassiveEntitiesFor(this); } /** * Is called by subclasses to ask for passive entities that they claim belong to them. * * @return null */ public Collection<PassiveEntitiesRequest> getPassiveEntitiesRequest(){ return null; } /** * The entity adds itself to the supplied Jena Model. * * @param model the JenaModel */ public void addToModel(Model model) { Graph g = model.getGraph(); List<Triple> triples = getTriplesFor(); for(Triple t : triples) g.add(t); } /** * Returns the value of a property via reflection (searching in direct and indirect properties, but not in passive properties) * * @param prop The property * @return The property's value (or null). */ public Object getPropertyValue(Property prop){ return getPropertyValue(prop.getURI(), false, true); } /** * Returns the value of a property via reflection * * @param prop The property * @param searchInPassiveProperties Search in passive properties * @param searchInIndirectProperties Search in indirect properties */ public Object getPropertyValue(Property prop, boolean searchInPassiveProperties, boolean searchInIndirectProperties){ return getPropertyValue(prop.getURI(), searchInPassiveProperties, searchInIndirectProperties); } /** * Returns the value of a property via reflection (searching in direct and indirect properties, but not in passive properties) * * @param propURI The property's URI */ public Object getPropertyValue(String propURI){ return getPropertyValue(propURI, false, true); } /** * Returns the value of a property. * * @param propURI The property's URI. * @param searchInPassiveProperties True to search in passive properties. * @param searchInIndirectProperties True to search in indirect properties. */ @SuppressWarnings("unchecked") public Object getPropertyValue(String propURI, boolean searchInPassiveProperties, boolean searchInIndirectProperties){ Method directMethod = GabotoEntityUtils.getGetMethodFor(this, propURI); if(directMethod != null){ try { Object value = directMethod.invoke(this, (Object[])null); if(value != null) return value; } catch (Exception e) { throw new GabotoRuntimeException(e); } } // search in passive if(searchInPassiveProperties){ Object value = getPassivePropertyValue(propURI); if(value != null) return value; } // search in indirect properties? if(! searchInIndirectProperties) return null; // look for indirect Method List<Method> indirectMethods = getIndirectMethodsForProperty(propURI); if(indirectMethods == null) return null; for(Method indirectMethod : indirectMethods){ try { Object obj = indirectMethod.invoke(this, (Object[])null); if(obj != null){ if(! (obj instanceof GabotoEntity) && !(obj instanceof Collection)) throw new IllegalAnnotationException(getClass()); if(obj instanceof GabotoEntity){ // cast GabotoEntity entity = (GabotoEntity) obj; // try to find answer at entity Object value = entity.getPropertyValue(propURI, searchInPassiveProperties, searchInIndirectProperties); if(value != null) return value; } else if(obj instanceof Collection){ // try to cast try{ Collection<GabotoEntity> entityCollection = (Collection<GabotoEntity>) obj; for(GabotoEntity entityInCollection : entityCollection){ // try to find answer at entity Object value = entityInCollection.getPropertyValue(propURI, searchInPassiveProperties, searchInIndirectProperties); if(null != value) return value; } } catch(ClassCastException e){ IllegalAnnotationException iae = new IllegalAnnotationException(getClass()); iae.initCause(e); throw iae; } } } } catch (Exception e) { throw new GabotoRuntimeException(e); } } return null; } /** * Returns the value of a passive property. * @param prop The passive property. * @return The property's value */ public Object getPassivePropertyValue(Property prop){ return getPassivePropertyValue(prop.getURI()); } /** * Returns the value of a passive property. * @param propURI The URI of the passive property. * @return The property's value */ public Object getPassivePropertyValue(String propURI){ Method m = GabotoEntityUtils.getPassiveGetMethodFor(this.getClass(), propURI); if(m != null){ try { //System.err.println("For class " + this.getClass() + // " found passive method " + m.getName() + ":" + m.invoke(this, (Object[])null)); return m.invoke(this, (Object[])null); } catch (Exception e) { throw new GabotoRuntimeException(e); } } return null; } /** * Creates a map with all properties: direct (including static and unstored), indirect and passive */ public Map<String, Object> getAllProperties(){ Map<String, Object> properties = getAllDirectProperties(); properties.putAll(getAllPassiveProperties()); properties.putAll(getAllIndirectProperties()); return properties; } /** * Creates a map with property value pairs for all direct properties (including static and unstored). * @return a map with property value pairs */ public Map<String, Object> getAllDirectProperties(){ Map<String, Object> properties = new HashMap<String, Object>(); for(String prop : GabotoEntityUtils.getAllDirectProperties(this.getClass())){ properties.put(prop, getPropertyValue(prop)); } return properties; } /** * Creates a map with property value pairs for all passive properties. * @return a map with property value pairs. */ public Map<String, Object> getAllPassiveProperties(){ Map<String, Object> properties = new HashMap<String, Object>(); for(String prop : GabotoEntityUtils.getAllPassiveProperties(this.getClass())){ Object value = getPassivePropertyValue(prop); properties.put(prop, value); //System.err.println("Adding " + prop + " : (" + value.getClass() + ") " + value); } return properties; } /** * Creates a map with property value pairs for all indirect properties. * @return a map with property value pairs. */ public Map<String, Object> getAllIndirectProperties(){ Map<String, Object> properties = new HashMap<String, Object>(); for(String prop : GabotoEntityUtils.getAllIndirectProperties(this.getClass())){ properties.put(prop, getPropertyValue(prop, false, true)); } return properties; } /** * Creates the java representation from an GabotoSnapshot and a resource object.. * * <p> * The method uses reflection mechanisms to find appropriate setter methods. * </p> * * @param res An RDF Resource. * @param snapshot The GabotoSnapshot containing all the data. * @param pool The entity pool that is currently created (used for references to other entities). * */ public void loadFromSnapshot(Resource res, GabotoSnapshot snapshot, GabotoEntityPool pool) { // set uri this.setUri(res.getURI()); // log logger.debug("Load entity " + this.getUri() + " from Snapshot."); // try to set time span this.setTimeSpan(snapshot.getTimeSpanForEntity(res)); // load entity //RDFContainerLoaderImpl.getInstance().loadFromSnapshot(this, res, snapshot, pool); } /** * Creates a list of RDF triples that represent this {@link GabotoEntity}. * * <p> * Same as: entity.getTriplesFor(true); * </p> * * @see #getTriplesFor(boolean) * @return a list of triples that represent this entity. */ public List<Triple> getTriplesFor() { return getTriplesFor(true); } /** * Creates a list of RDF triples that represent this {@link GabotoEntity}. * * @param includeType Whether or not a triple denoting the entities type should be added to the list of triples. * * @return a list of triples that represent this entity. * */ public List<Triple> getTriplesFor(boolean includeType) { // if no uri if(this.getUri() == null) throw new IllegalArgumentException("Entities need to have a defined uri"); List<Triple> triples = RDFContainerTriplesGeneratorImpl.getInstance().getTriplesFor(this, Node.createURI(getUri()), includeType); return triples; } @Override public String toString(){ TimeSpan ts = getTimeSpan(); if(ts == null) ts = TimeSpan.EXISTENCE; return getUri() + " " + this.getClass().getSimpleName() + " : " + ts + ""; } @Override public boolean equals(Object obj) { if(! (obj instanceof GabotoEntity)) return false; GabotoEntity entity = (GabotoEntity) obj; if(getTimeSpan() != null && getUri() != null && entity.getTimeSpan() != null && entity.getUri() != null) return getTimeSpan().equals(entity.getTimeSpan()) && getUri().equals(entity.getUri()) && getClass().equals(entity.getClass()); if(getUri() != null && entity.getUri() != null) return getUri().equals(entity.getUri()) && getClass().equals(entity.getClass()); return super.equals(obj); } @Override public int hashCode(){ return toString().hashCode(); } }
package org.restheart.handlers; import com.mongodb.BasicDBObject; import com.mongodb.DBObject; import org.restheart.db.DBCursorPool.EAGER_CURSOR_ALLOCATION_POLICY; import org.restheart.utils.URLUtils; import io.undertow.server.HttpServerExchange; import io.undertow.util.HeaderValues; import io.undertow.util.Headers; import io.undertow.util.HttpString; import io.undertow.util.Methods; import java.io.File; import java.net.URI; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.Arrays; import java.util.Deque; import java.util.List; import org.restheart.Bootstrapper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * * @author Andrea Di Cesare <andrea@softinstigate.com> */ public class RequestContext { private static final Logger LOGGER = LoggerFactory.getLogger(RequestContext.class); public enum TYPE { ERROR, ROOT, DB, COLLECTION, DOCUMENT, COLLECTION_INDEXES, INDEX, FILES_BUCKET, FILE, FILE_BINARY, AGGREGATION, SCHEMA, SCHEMA_STORE }; public enum METHOD { GET, POST, PUT, DELETE, PATCH, OPTIONS, OTHER }; public enum DOC_ID_TYPE { OID, // ObjectId STRING_OID, // String eventually converted to ObjectId in case ObjectId.isValid() is true STRING, // String NUMBER, // any Number (including mongodb NumberLong) DATE, // Date MINKEY, //org.bson.types.MinKey; MAXKEY // org.bson.types.MaxKey } public enum HAL_MODE { FULL, // full mode F, // alias for full COMPACT, // new compact mode C // alias for compact } public enum ETAG_CHECK_POLICY { REQUIRED, // always requires the etag, return PRECONDITION FAILED if missing REQUIRED_FOR_DELETE, // only requires the etag for DELETE, return PRECONDITION FAILED if missing OPTIONAL // checks the etag only if provided by client via If-Match header } public static final String PAGE_QPARAM_KEY = "page"; public static final String PAGESIZE_QPARAM_KEY = "pagesize"; public static final String COUNT_QPARAM_KEY = "count"; public static final String SORT_BY_QPARAM_KEY = "sort_by"; public static final String FILTER_QPARAM_KEY = "filter"; public static final String AGGREGATION_VARIABLES_QPARAM_KEY = "avars"; public static final String KEYS_QPARAM_KEY = "keys"; public static final String EAGER_CURSOR_ALLOCATION_POLICY_QPARAM_KEY = "eager"; public static final String DOC_ID_TYPE_KEY = "id_type"; public static final String SLASH = "/"; public static final String PATCH = "PATCH"; public static final String UNDERSCORE = "_"; public static final String SYSTEM = "system."; public static final String LOCAL = "local"; public static final String ADMIN = "admin"; public static final String FS_CHUNKS_SUFFIX = ".chunks"; public static final String FS_FILES_SUFFIX = ".files"; public static final String _INDEXES = "_indexes"; public static final String AGGREGATIONS_QPARAM_KEY = "aggrs"; public static final String _SCHEMAS = "_schemas"; public static final String _AGGREGATIONS = "_aggrs"; public static final String BINARY_CONTENT = "binary"; public static final String HAL_QPARAM_KEY = "hal"; public static final String MAX_KEY_ID = "_MaxKey"; public static final String MIN_KEY_ID = "_MinKey"; public static final String ETAG_DOC_POLICY_METADATA_KEY = "etagDocPolicy"; public static final String ETAG_POLICY_METADATA_KEY = "etagPolicy"; public static final String ETAG_CHECK_QPARAM_KEY = "checkEtag"; private final String whereUri; private final String whatUri; private final TYPE type; private final METHOD method; private final String[] pathTokens; private DBObject dbProps; private DBObject collectionProps; private DBObject content; private File file; private DBObject responseContent; private final List<String> warnings = new ArrayList<>(); private int page = 1; private int pagesize = 100; private boolean count = false; private EAGER_CURSOR_ALLOCATION_POLICY cursorAllocationPolicy; private Deque<String> filter = null; private BasicDBObject aggregationVars = null; // aggregation vars private Deque<String> keys = null; private Deque<String> sortBy = null; private DOC_ID_TYPE docIdType = DOC_ID_TYPE.STRING_OID; private Object documentId; private String mappedUri = null; private String unmappedUri = null; private static final String NUL = Character.toString('\0'); private final String etag; private boolean forceEtagCheck = false; /** * the HAL mode */ private HAL_MODE halMode = HAL_MODE.FULL; /** * * @param exchange the url rewriting feature is implemented by the whatUri * and whereUri parameters. * * the exchange request path (mapped uri) is rewritten replacing the * whereUri string with the whatUri string the special whatUri value * means * any resource: the whereUri is replaced with / * * example 1 * * whatUri = /mydb/mycollection whereUri = / * * then the requestPath / is rewritten to /mydb/mycollection * * example 2 * * whatUri = * whereUri = /data * * then the requestPath /data is rewritten to / * * @param whereUri the uri to map to * @param whatUri the uri to map */ public RequestContext(HttpServerExchange exchange, String whereUri, String whatUri) { this.whereUri = URLUtils.removeTrailingSlashes(whereUri == null ? null : whereUri.startsWith("/") ? whereUri : "/" + whereUri); this.whatUri = URLUtils.removeTrailingSlashes( whatUri == null ? null : whatUri.startsWith("/") || "*".equals(whatUri) ? whatUri : "/" + whatUri); this.mappedUri = exchange.getRequestPath(); this.unmappedUri = unmapUri(exchange.getRequestPath()); // "/db/collection/document" --> { "", "mappedDbName", "collection", "document" } this.pathTokens = this.unmappedUri.split(SLASH); this.type = selectRequestType(pathTokens); this.method = selectRequestMethod(exchange.getRequestMethod()); // etag HeaderValues etagHvs = exchange.getRequestHeaders() == null ? null : exchange.getRequestHeaders().get(Headers.IF_MATCH); this.etag = etagHvs == null || etagHvs.getFirst() == null ? null : etagHvs.getFirst(); this.forceEtagCheck = exchange.getQueryParameters().get(ETAG_CHECK_QPARAM_KEY) != null; } protected static METHOD selectRequestMethod(HttpString _method) { METHOD method; if (Methods.GET.equals(_method)) { method = METHOD.GET; } else if (Methods.POST.equals(_method)) { method = METHOD.POST; } else if (Methods.PUT.equals(_method)) { method = METHOD.PUT; } else if (Methods.DELETE.equals(_method)) { method = METHOD.DELETE; } else if (PATCH.equals(_method.toString())) { method = METHOD.PATCH; } else if (Methods.OPTIONS.equals(_method)) { method = METHOD.OPTIONS; } else { method = METHOD.OTHER; } return method; } protected static TYPE selectRequestType(String[] pathTokens) { TYPE type; if (pathTokens.length < 2) { type = TYPE.ROOT; } else if (pathTokens.length < 3) { type = TYPE.DB; } else if (pathTokens.length >= 3 && pathTokens[2].endsWith(FS_FILES_SUFFIX)) { if (pathTokens.length == 3) { type = TYPE.FILES_BUCKET; } else if (pathTokens.length == 4 && pathTokens[3].equalsIgnoreCase(_INDEXES)) { type = TYPE.COLLECTION_INDEXES; } else if (pathTokens.length == 4 && !pathTokens[3].equalsIgnoreCase(_INDEXES)) { type = TYPE.FILE; } else if (pathTokens.length > 4 && pathTokens[3].equalsIgnoreCase(_INDEXES)) { type = TYPE.INDEX; } else if (pathTokens.length > 4 && !pathTokens[3].equalsIgnoreCase(_INDEXES) && !pathTokens[4].equalsIgnoreCase(BINARY_CONTENT)) { type = TYPE.FILE; } else if (pathTokens.length == 5 && pathTokens[4].equalsIgnoreCase(BINARY_CONTENT)) { // URL: <host>/db/bucket.file/xxx/binary type = TYPE.FILE_BINARY; } else { type = TYPE.DOCUMENT; } } else if (pathTokens.length >= 3 && pathTokens[2].endsWith(_SCHEMAS)) { if (pathTokens.length == 3) { type = TYPE.SCHEMA_STORE; } else { type = TYPE.SCHEMA; } } else if (pathTokens.length < 4) { type = TYPE.COLLECTION; } else if (pathTokens.length == 4 && pathTokens[3].equalsIgnoreCase(_INDEXES)) { type = TYPE.COLLECTION_INDEXES; } else if (pathTokens.length > 4 && pathTokens[3].equalsIgnoreCase(_INDEXES)) { type = TYPE.INDEX; } else if (pathTokens.length > 4 && pathTokens[3].equalsIgnoreCase(_AGGREGATIONS)) { type = TYPE.AGGREGATION; } else { type = TYPE.DOCUMENT; } return type; } /** * given a mapped uri (/some/mapping/coll) returns the canonical uri * (/db/coll) URLs are mapped to mongodb resources by using the mongo-mounts * configuration properties * * @param mappedUri * @return */ public final String unmapUri(String mappedUri) { String ret = URLUtils.removeTrailingSlashes(mappedUri); if (whatUri.equals("*")) { if (!this.whereUri.equals(SLASH)) { ret = ret.replaceFirst("^" + this.whereUri, ""); } } else if (!this.whereUri.equals(SLASH)) { ret = URLUtils.removeTrailingSlashes(ret.replaceFirst("^" + this.whereUri, this.whatUri)); } else { ret = URLUtils.removeTrailingSlashes(URLUtils.removeTrailingSlashes(this.whatUri) + ret); } if (ret.isEmpty()) { ret = SLASH; } return ret; } /** * given a canonical uri (/db/coll) returns the mapped uri * (/some/mapping/uri) relative to this context. URLs are mapped to mongodb * resources via the mongo-mounts configuration properties * * @param unmappedUri * @return */ public final String mapUri(String unmappedUri) { String ret = URLUtils.removeTrailingSlashes(unmappedUri); if (whatUri.equals("*")) { if (!this.whereUri.equals(SLASH)) { return this.whereUri + unmappedUri; } } else { ret = URLUtils.removeTrailingSlashes(ret.replaceFirst("^" + this.whatUri, this.whereUri)); } if (ret.isEmpty()) { ret = SLASH; } return ret; } /** * check if the parent of the requested resource is accessible in this * request context * * for instance if /mydb/mycollection is mapped to /coll then: * * the db is accessible from the collection the root is not accessible from * the collection (since / is actually mapped to the db) * * @return true if parent of the requested resource is accessible */ public final boolean isParentAccessible() { return type == TYPE.DB ? mappedUri.split(SLASH).length > 1 : mappedUri.split(SLASH).length > 2; } /** * * @return type */ public TYPE getType() { return type; } /** * * @return DB Name */ public String getDBName() { return getPathTokenAt(1); } /** * * @return collection name */ public String getCollectionName() { return getPathTokenAt(2); } /** * * @return document id */ public String getDocumentIdRaw() { return getPathTokenAt(3); } /** * * @return index id */ public String getIndexId() { return getPathTokenAt(4); } /** * * @return collection name */ public String getAggregationOperation() { return getPathTokenAt(4); } /** * * @return URI * @throws URISyntaxException */ public URI getUri() throws URISyntaxException { return new URI(Arrays.asList(pathTokens).stream().reduce(SLASH, (t1, t2) -> t1 + SLASH + t2)); } /** * * @return method */ public METHOD getMethod() { return method; } /** * * @param dbName * @return true if the dbName is a reserved resource */ public static boolean isReservedResourceDb(String dbName) { return dbName.equals(ADMIN) || dbName.equals(LOCAL) || dbName.startsWith(SYSTEM) || dbName.startsWith(UNDERSCORE); } /** * * @param collectionName * @return true if the collectionName is a reserved resource */ public static boolean isReservedResourceCollection(String collectionName) { return collectionName != null && !collectionName.equalsIgnoreCase(_SCHEMAS) && (collectionName.startsWith(SYSTEM) || collectionName.startsWith(UNDERSCORE) || collectionName.endsWith(FS_CHUNKS_SUFFIX)); } /** * * @param type * @param documentIdRaw * @return true if the documentIdRaw is a reserved resource */ public static boolean isReservedResourceDocument(TYPE type, String documentIdRaw) { if (documentIdRaw == null) { return false; } return documentIdRaw.startsWith(UNDERSCORE) && !documentIdRaw.equalsIgnoreCase(_INDEXES) && !documentIdRaw.equalsIgnoreCase(MIN_KEY_ID) && !documentIdRaw.equalsIgnoreCase(MAX_KEY_ID) && (type != TYPE.AGGREGATION && _AGGREGATIONS.equalsIgnoreCase(documentIdRaw)) && !(type == TYPE.AGGREGATION); } /** * * @return isReservedResource */ public boolean isReservedResource() { if (type == TYPE.ROOT) { return false; } return isReservedResourceDb(getDBName()) || isReservedResourceCollection(getCollectionName()) || isReservedResourceDocument(type, getDocumentIdRaw()); } /** * @return the whereUri */ public String getUriPrefix() { return whereUri; } /** * @return the whatUri */ public String getMappingUri() { return whatUri; } /** * @return the page */ public int getPage() { return page; } /** * @param page the page to set */ public void setPage(int page) { this.page = page; } /** * @return the pagesize */ public int getPagesize() { return pagesize; } /** * @param pagesize the pagesize to set */ public void setPagesize(int pagesize) { this.pagesize = pagesize; } /** * @return the count */ public boolean isCount() { return count; } /** * @param count the count to set */ public void setCount(boolean count) { this.count = count; } /** * @return the filter */ public Deque<String> getFilter() { return filter; } /** * @param filter the filter to set */ public void setFilter(Deque<String> filter) { this.filter = filter; } /** * @return the aggregationVars */ public BasicDBObject getAggreationVars() { return aggregationVars; } /** * @param aggregationVars the aggregationVars to set */ public void setAggregationVars(BasicDBObject aggregationVars) { this.aggregationVars = aggregationVars; } /** * @return the sortBy */ public Deque<String> getSortBy() { return sortBy; } /** * @param sortBy the sortBy to set */ public void setSortBy(Deque<String> sortBy) { this.sortBy = sortBy; } /** * @return the collectionProps */ public DBObject getCollectionProps() { return collectionProps; } /** * @param collectionProps the collectionProps to set */ public void setCollectionProps(DBObject collectionProps) { this.collectionProps = collectionProps; } /** * @return the dbProps */ public DBObject getDbProps() { return dbProps; } /** * @param dbProps the dbProps to set */ public void setDbProps(DBObject dbProps) { this.dbProps = dbProps; } /** * @return the content */ public DBObject getContent() { return content; } /** * @param content the content to set */ public void setContent(DBObject content) { this.content = content; } /** * @return the warnings */ public List<String> getWarnings() { return warnings; } /** * @param warning */ public void addWarning(String warning) { warnings.add(warning); } /** * * The unmapped uri is the cononical uri of a mongodb resource (e.g. * /db/coll). * * @return the unmappedUri */ public String getUnmappedRequestUri() { return unmappedUri; } /** * The mapped uri is the exchange request uri. This is "mapped" by the * mongo-mounts mapping paramenters. * * @return the mappedUri */ public String getMappedRequestUri() { return mappedUri; } /** * * @param index * @return pathTokens[index] if pathTokens.length > index, else null */ private String getPathTokenAt(int index) { return pathTokens.length > index ? pathTokens[index] : null; } /** * * @return the cursorAllocationPolicy */ public EAGER_CURSOR_ALLOCATION_POLICY getCursorAllocationPolicy() { return cursorAllocationPolicy; } /** * @param cursorAllocationPolicy the cursorAllocationPolicy to set */ public void setCursorAllocationPolicy(EAGER_CURSOR_ALLOCATION_POLICY cursorAllocationPolicy) { this.cursorAllocationPolicy = cursorAllocationPolicy; } /** * @return the docIdType */ public DOC_ID_TYPE getDocIdType() { return docIdType; } /** * @param docIdType the docIdType to set */ public void setDocIdType(DOC_ID_TYPE docIdType) { this.docIdType = docIdType; } /** * @param documentId the documentId to set */ public void setDocumentId(Object documentId) { this.documentId = documentId; } /** * @return the documentId */ public Object getDocumentId() { return documentId; } /** * @return the responseContent */ public DBObject getResponseContent() { return responseContent; } /** * @param responseContent the responseContent to set */ public void setResponseContent(DBObject responseContent) { this.responseContent = responseContent; } /** * @return the file */ public File getFile() { return file; } /** * @param file the file to set */ public void setFile(File file) { this.file = file; } /** * @return keys */ public Deque<String> getKeys() { return keys; } /** * @param keys keys to set */ public void setKeys(Deque<String> keys) { this.keys = keys; } /** * @return the halMode */ public HAL_MODE getHalMode() { return halMode; } public boolean isFullHalMode() { return halMode == HAL_MODE.FULL || halMode == HAL_MODE.F; } /** * @param halMode the halMode to set */ public void setHalMode(HAL_MODE halMode) { this.halMode = halMode; } public boolean isDbNameInvalid() { return isDbNameInvalid(getDBName()); } public boolean isDbNameInvalid(String dbName) { return (dbName == null || dbName.contains(NUL) || dbName.contains(" ") || dbName.contains("/") || dbName.contains("\\") || dbName.contains(".") || dbName.contains("\"") || dbName.contains("$") || dbName.length() > 64 || dbName.length() == 0); } public boolean isDbNameInvalidOnWindows() { return isDbNameInvalidOnWindows(getDBName()); } public boolean isDbNameInvalidOnWindows(String dbName) { return (isDbNameInvalid() || dbName.contains("*") || dbName.contains("<") || dbName.contains(">") || dbName.contains(":") || dbName.contains(".") || dbName.contains("|") || dbName.contains("?")); } public boolean isCollectionNameInvalid() { return isCollectionNameInvalid(getCollectionName()); } public boolean isCollectionNameInvalid(String collectionName) { // collection starting with system. will return FORBIDDEN return (collectionName == null || collectionName.contains(NUL) || collectionName.contains("$") || collectionName.length() == 64); } public String getETag() { return etag; } public boolean isETagCheckRequired() { // if client specifies the If-Match header, than check it if (getETag() != null) { return true; } // if client requires the check via qparam return true if (forceEtagCheck) { return true; } // for documents consider db and coll etagDocPolicy metadata if (type == TYPE.DOCUMENT || type == TYPE.FILE) { // check the coll metadata Object _policy = collectionProps != null ? collectionProps.get(ETAG_DOC_POLICY_METADATA_KEY) : null; LOGGER.trace("collection etag policy (from coll properties) {}", _policy); if (_policy == null) { // check the db metadata _policy = dbProps != null ? dbProps.get(ETAG_DOC_POLICY_METADATA_KEY) : null; LOGGER.trace("collection etag policy (from db properties) {}", _policy); } ETAG_CHECK_POLICY policy = null; if (_policy != null && _policy instanceof String) { try { policy = ETAG_CHECK_POLICY.valueOf((String) _policy); } catch (IllegalArgumentException iae) { policy = null; } } if (null != policy) { if (method == METHOD.DELETE) { return policy != ETAG_CHECK_POLICY.OPTIONAL; } else { return policy == ETAG_CHECK_POLICY.REQUIRED; } } } // for db consider db etagPolicy metadata if (type == TYPE.DB && dbProps != null) { // check the coll metadata Object _policy = dbProps.get(ETAG_POLICY_METADATA_KEY); LOGGER.trace("db etag policy (from db properties) {}", _policy); ETAG_CHECK_POLICY policy = null; if (_policy != null && _policy instanceof String) { try { policy = ETAG_CHECK_POLICY.valueOf((String) _policy); } catch (IllegalArgumentException iae) { policy = null; } } if (null != policy) { if (method == METHOD.DELETE) { return policy != ETAG_CHECK_POLICY.OPTIONAL; } else { return policy == ETAG_CHECK_POLICY.REQUIRED; } } } // for collection consider coll etagPolicy metadata if (type == TYPE.DB && collectionProps != null) { // check the coll metadata Object _policy = collectionProps.get(ETAG_POLICY_METADATA_KEY); LOGGER.trace("coll etag policy (from coll properties) {}", _policy); ETAG_CHECK_POLICY policy = null; if (_policy != null && _policy instanceof String) { try { policy = ETAG_CHECK_POLICY.valueOf((String) _policy); } catch (IllegalArgumentException iae) { policy = null; } } if (null != policy) { if (method == METHOD.DELETE) { return policy != ETAG_CHECK_POLICY.OPTIONAL; } else { return policy == ETAG_CHECK_POLICY.REQUIRED; } } } // apply the default policy from configuration ETAG_CHECK_POLICY dbP = Bootstrapper.getConfiguration().getDbEtagCheckPolicy(); ETAG_CHECK_POLICY collP = Bootstrapper.getConfiguration().getCollEtagCheckPolicy(); ETAG_CHECK_POLICY docP = Bootstrapper.getConfiguration().getDocEtagCheckPolicy(); if (LOGGER.isTraceEnabled()) { LOGGER.trace("default etag db check (from conf) {}", dbP); LOGGER.trace("default etag coll check (from conf) {}", collP); LOGGER.trace("default etag doc check (from conf) {}", docP); } ETAG_CHECK_POLICY policy = null; if (null != type) { switch (type) { case DB: policy = dbP; break; case COLLECTION: case FILES_BUCKET: case SCHEMA_STORE: policy = collP; break; default: policy = docP; } } if (null != policy) { if (method == METHOD.DELETE) { return policy != ETAG_CHECK_POLICY.OPTIONAL; } else { return policy == ETAG_CHECK_POLICY.REQUIRED; } } return false; } }
package org.testng.internal; import org.testng.IClass; import org.testng.IInstanceInfo; import org.testng.IObjectFactory; import org.testng.ITestContext; import org.testng.ITestObjectFactory; import org.testng.TestNGException; import org.testng.annotations.IAnnotation; import org.testng.collections.Lists; import org.testng.collections.Maps; import org.testng.internal.annotations.AnnotationHelper; import org.testng.internal.annotations.IAnnotationFinder; import org.testng.xml.XmlClass; import org.testng.xml.XmlTest; import java.lang.reflect.Constructor; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.util.List; import java.util.Map; import java.util.Set; /** * This class creates an ITestClass from a test class. * * @author <a href="mailto:cedric@beust.com">Cedric Beust</a> */ public class TestNGClassFinder extends BaseClassFinder { private ITestContext m_testContext = null; private Map<Class, List<Object>> m_instanceMap = Maps.newHashMap(); public TestNGClassFinder(ClassInfoMap cim, Map<Class, List<Object>> instanceMap, XmlTest xmlTest, IConfiguration configuration, ITestContext testContext) { m_testContext = testContext; if(null == instanceMap) { instanceMap= Maps.newHashMap(); } IAnnotationFinder annotationFinder = configuration.getAnnotationFinder(); ITestObjectFactory objectFactory = configuration.getObjectFactory(); // Find all the new classes and their corresponding instances Set<Class<?>> allClasses= cim.getClasses(); //very first pass is to find ObjectFactory, can't create anything else until then if(objectFactory == null) { objectFactory = new ObjectFactoryImpl(); outer: for (Class cls : allClasses) { try { if (null != cls) { for (Method m : cls.getMethods()) { IAnnotation a = annotationFinder.findAnnotation(m, org.testng.annotations.IObjectFactoryAnnotation.class); if (null != a) { if (!ITestObjectFactory.class.isAssignableFrom(m.getReturnType())) { throw new TestNGException("Return type of " + m + " is not IObjectFactory"); } try { Object instance = cls.newInstance(); if (m.getParameterTypes().length > 0 && m.getParameterTypes()[0].equals(ITestContext.class)) { objectFactory = (ITestObjectFactory) m.invoke(instance, testContext); } else { objectFactory = (ITestObjectFactory) m.invoke(instance); } break outer; } catch (Exception ex) { throw new TestNGException("Error creating object factory", ex); } } } } } catch (NoClassDefFoundError e) { Utils.log("[TestNGClassFinder]", 1, "Unable to read methods on class " + cls.getName() + " - unable to resolve class reference " + e.getMessage()); for (XmlClass xmlClass : xmlTest.getXmlClasses()) { if (xmlClass.loadClasses() && xmlClass.getName().equals(cls.getName())) { throw e; } } } } } for(Class cls : allClasses) { if((null == cls)) { ppp("FOUND NULL CLASS IN FOLLOWING ARRAY:"); int i= 0; for(Class c : allClasses) { ppp(" " + i + ": " + c); } continue; } if(isTestNGClass(cls, annotationFinder)) { List allInstances= instanceMap.get(cls); Object thisInstance= (null != allInstances) ? allInstances.get(0) : null; // If annotation class and instances are abstract, skip them if ((null == thisInstance) && Modifier.isAbstract(cls.getModifiers())) { Utils.log("", 5, "[WARN] Found an abstract class with no valid instance attached: " + cls); continue; } IClass ic= findOrCreateIClass(m_testContext, cls, cim.getXmlClass(cls), thisInstance, xmlTest, annotationFinder, objectFactory); if(null != ic) { Object[] theseInstances = ic.getInstances(false); if (theseInstances.length == 0) { theseInstances = ic.getInstances(true); } Object instance= theseInstances[0]; putIClass(cls, ic); ConstructorOrMethod factoryMethod = ClassHelper.findDeclaredFactoryMethod(cls, annotationFinder); if (null != factoryMethod) { FactoryMethod fm = new FactoryMethod( /* cls, */ factoryMethod, instance, xmlTest, annotationFinder, m_testContext); ClassInfoMap moreClasses = new ClassInfoMap(); { // ppp("INVOKING FACTORY " + fm + " " + this.hashCode()); Object[] instances= fm.invoke(); // If the factory returned IInstanceInfo, get the class from it, // otherwise, just call getClass() on the returned instances if (instances.length > 0) { if (instances[0] != null) { Class elementClass = instances[0].getClass(); if(IInstanceInfo.class.isAssignableFrom(elementClass)) { for(Object o : instances) { IInstanceInfo ii = (IInstanceInfo) o; addInstance(ii.getInstanceClass(), ii.getInstance()); moreClasses.addClass(ii.getInstanceClass()); } } else { for (int i = 0; i < instances.length; i++) { Object o = instances[i]; if (o == null) { throw new TestNGException("The factory " + fm + " returned a null instance" + "at index " + i); } else { addInstance(o.getClass(), o); if(!classExists(o.getClass())) { moreClasses.addClass(o.getClass()); } } } } } } } if(moreClasses.getSize() > 0) { TestNGClassFinder finder= new TestNGClassFinder(moreClasses, m_instanceMap, xmlTest, configuration, m_testContext); IClass[] moreIClasses= finder.findTestClasses(); for(IClass ic2 : moreIClasses) { putIClass(ic2.getRealClass(), ic2); } } // if moreClasses.size() > 0 } } // null != ic } // if not TestNG class else { Utils.log("TestNGClassFinder", 3, "SKIPPING CLASS " + cls + " no TestNG annotations found"); } } // for // Add all the instances we found to their respective IClasses for(Class c : m_instanceMap.keySet()) { List<Object> instances= m_instanceMap.get(c); for(Object instance : instances) { IClass ic= getIClass(c); if(null != ic) { ic.addInstance(instance); } } } } /** * @returns true if this class contains TestNG annotations (either on itself * or on a superclass). */ public static boolean isTestNGClass(Class c, IAnnotationFinder annotationFinder) { Class[] allAnnotations= AnnotationHelper.getAllAnnotations(); Class cls = c; try { for(Class annotation : allAnnotations) { for (cls = c; cls != null; cls = cls.getSuperclass()) { // Try on the methods for (Method m : cls.getMethods()) { IAnnotation ma= annotationFinder.findAnnotation(m, annotation); if(null != ma) { return true; } } // Try on the class IAnnotation a= annotationFinder.findAnnotation(cls, annotation); if(null != a) { return true; } // Try on the constructors for (Constructor ctor : cls.getConstructors()) { IAnnotation ca= annotationFinder.findAnnotation(ctor, annotation); if(null != ca) { return true; } } } } return false; } catch (NoClassDefFoundError e) { Utils.log("[TestNGClassFinder]", 1, "Unable to read methods on class " + cls.getName() + " - unable to resolve class reference " + e.getMessage()); return false; } } private void addInstance(Class clazz, Object o) { List<Object> list= m_instanceMap.get(clazz); if(null == list) { list= Lists.newArrayList(); m_instanceMap.put(clazz, list); } list.add(o); } public static void ppp(String s) { System.out.println("[TestNGClassFinder] " + s); } }
package oshi.hardware.common; import java.net.NetworkInterface; import java.net.SocketException; import java.util.ArrayList; import java.util.Collections; import java.util.Enumeration; import java.util.List; import javax.json.Json; import javax.json.JsonArrayBuilder; import javax.json.JsonBuilderFactory; import javax.json.JsonObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import oshi.hardware.NetworkIF; import oshi.hardware.Networks; import oshi.json.NullAwareJsonObjectBuilder; /** * Network interfaces implementation. * * @author enrico[dot]bianchi[at]gmail[dot]com */ public abstract class AbstractNetworks implements Networks { private static final Logger LOG = LoggerFactory.getLogger(AbstractNetworks.class); private JsonBuilderFactory jsonFactory = Json.createBuilderFactory(null); /** * {@inheritDoc} */ @Override public NetworkIF[] getNetworks() { List<NetworkIF> result = new ArrayList<>(); try { Enumeration<NetworkInterface> interfaces = NetworkInterface.getNetworkInterfaces(); for (NetworkInterface netint : Collections.list(interfaces)) { if (!netint.isLoopback() && netint.getHardwareAddress() != null) { NetworkIF netIF = new NetworkIF(); netIF.setNetworkInterface(netint); this.updateNetworkStats(netIF); result.add(netIF); } } } catch (SocketException ex) { LOG.error("Socket exception when retrieving network interfaces: " + ex.getMessage()); } return result.toArray(new NetworkIF[result.size()]); } /** * {@inheritDoc} */ @Override public JsonObject toJSON() { JsonArrayBuilder netArray = jsonFactory.createArrayBuilder(); for (NetworkIF store : getNetworks()) { netArray.add(store.toJSON()); } return NullAwareJsonObjectBuilder.wrap(jsonFactory.createObjectBuilder()).add("networks", netArray).build(); } }
package se.kth.csc.controller; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableMap; import org.joda.time.DateTime; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Controller; import org.springframework.transaction.annotation.Transactional; import org.springframework.web.bind.annotation.ModelAttribute; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.servlet.ModelAndView; import se.kth.csc.auth.Role; import se.kth.csc.model.Account; import se.kth.csc.model.Queue; import se.kth.csc.model.QueuePosition; import se.kth.csc.payload.QueueCreationInfo; import se.kth.csc.persist.AccountStore; import se.kth.csc.persist.QueuePositionStore; import se.kth.csc.persist.QueueStore; import javax.servlet.http.HttpServletRequest; import java.io.NotActiveException; import java.security.Principal; import java.util.List; @Controller @RequestMapping(value = "/queue") public class QueueController { private static final Logger log = LoggerFactory.getLogger(QueueController.class); private final ObjectMapper objectMapper; private final QueueStore queueStore; private final AccountStore accountStore; private final QueuePositionStore queuePositionStore; protected QueueController() { // Needed for injection objectMapper = null; queueStore = null; accountStore = null; queuePositionStore = null; } @Autowired public QueueController( ObjectMapper objectMapper, QueueStore queueStore, AccountStore accountStore, QueuePositionStore queuePositionStore) { this.objectMapper = objectMapper; this.queueStore = queueStore; this.accountStore = accountStore; this.queuePositionStore = queuePositionStore; } @RequestMapping(value = "/list", method = RequestMethod.GET) public ModelAndView list(HttpServletRequest request) throws JsonProcessingException { // Get all available queues List<Queue> queues = queueStore.fetchAllQueues(); if (!request.isUserInRole("admin")) { List<Queue> userQueue = new ArrayList<Queue>(); for (Queue q :queues) { if (q.isActive()) { userQueue.add(q); } } queues = userQueue; } String queuesJson = objectMapper.writerWithView(Queue.class).writeValueAsString(queues); return new ModelAndView("queue/list", ImmutableMap.of("queues", queues, "queuesJson", queuesJson)); } private Account getCurrentAccount(Principal principal) { return accountStore.fetchAccountWithPrincipalName(principal.getName()); } @Transactional @RequestMapping(value = "/create", method = RequestMethod.POST) public String create(@ModelAttribute("queueCreationInfo") QueueCreationInfo queueCreationInfo, HttpServletRequest request, Principal principal) throws ForbiddenException { if (request.isUserInRole(Role.SUPER_ADMIN.getAuthority())) { Queue queue = new Queue(); queue.setName(queueCreationInfo.getName()); queue.setOwner(getCurrentAccount(principal)); queue.setActive(true); queueStore.storeQueue(queue); return "redirect:/queue/list"; } else { throw new ForbiddenException(); } } @RequestMapping(value = "/{id}", method = RequestMethod.GET) public ModelAndView show(@PathVariable("id") int id, Principal principal) throws NotFoundException, JsonProcessingException { Queue queue = queueStore.fetchQueueWithId(id); if (queue == null) { throw new NotFoundException(); } String queueJson = objectMapper.writerWithView(Queue.class).writeValueAsString(queue); return new ModelAndView("queue/show", ImmutableMap.of("queue", queue, "queueJson", queueJson)); } @Transactional @RequestMapping(value = "/{id}/remove", method = RequestMethod.POST) public String remove(@PathVariable("id") int id, HttpServletRequest request) throws NotFoundException, ForbiddenException { if (request.isUserInRole(Role.SUPER_ADMIN.getAuthority())) { Queue queue = queueStore.fetchQueueWithId(id); if (queue == null) { throw new NotFoundException(); } queueStore.removeQueue(queue); return "redirect:/queue/list"; } else { throw new ForbiddenException(); } } @Transactional @RequestMapping(value = "/{id}/position/create", method = RequestMethod.POST) public String createPosition(@PathVariable("id") int id, Principal principal) throws Exception { Queue queue = queueStore.fetchQueueWithId(id); if (queue == null) { throw new NotFoundException(); } if (!queue.isActive()) { throw new Exception("Tried to join queue which is not active!"); } QueuePosition queuePosition = new QueuePosition(); queuePosition.setQueue(queue); queuePosition.setAccount(getCurrentAccount(principal)); queuePosition.setStartTime(DateTime.now()); queuePositionStore.storeQueuePosition(queuePosition); queue.getPositions().add(queuePosition); return "redirect:/queue/" + id; } @Transactional @RequestMapping(value = "/{id}/position/{positionId}/remove", method = {RequestMethod.POST}) public String deletePosition(@PathVariable("id") int id, @PathVariable("positionId") int positionId, HttpServletRequest request, Principal principal) throws Exception { Account account = getCurrentAccount(principal); QueuePosition queuePosition = queuePositionStore.fetchQueuePositionWithId(positionId); if (queuePosition == null) { throw new NotFoundException(); } if (request.isUserInRole(Role.ADMIN.getAuthority()) || queuePosition.getAccount().equals(account)) { Queue queue = queueStore.fetchQueueWithId(id); if (queue == null) { throw new NotFoundException(); } queue.getPositions().remove(queuePosition); queuePositionStore.removeQueuePosition(queuePosition); return "redirect:/queue/" + id; } else { throw new ForbiddenException(); } } @Transactional @RequestMapping(value = "/{id}/position/{positionId}/comment", method = {RequestMethod.POST}) public String updateComment(@PathVariable("id") int id, @PathVariable("positionId") int positionId, String comment) throws NotFoundException { QueuePosition queuePosition = queuePositionStore.fetchQueuePositionWithId(positionId); Queue queue = queueStore.fetchQueueWithId(id); if (queuePosition == null || queue == null) { throw new NotFoundException(); } queuePosition.setComment(comment); return "redirect:/queue/" + id; } @Transactional @RequestMapping(value = "/{id}/close", method = {RequestMethod.POST}) public String closeQueue(@PathVariable("id") int id, HttpServletRequest request) throws Exception { if (request.isUserInRole("admin")) { Queue queue = queueStore.fetchQueueWithId(id); queue.setActive(false); for (QueuePosition pos : queue.getPositions ()) { queuePositionStore.removeQueuePosition(queuePositionStore.fetchQueuePositionWithId(pos.getId())); } queue.getPositions().clear(); return "redirect:/queue/list"; } else { throw new NotOwnerException(); } } @Transactional @RequestMapping(value = "/{id}/open", method = {RequestMethod.POST}) public String openQueue(@PathVariable("id") int id, HttpServletRequest request) throws Exception { if (request.isUserInRole("admin")) { Queue queue = queueStore.fetchQueueWithId(id); queue.setActive(true); return "redirect:/queue/list"; } else { throw new NotOwnerException(); } } }
package seedu.address.model.tag; import static java.util.Objects.requireNonNull; import static seedu.address.commons.util.CollectionUtil.requireAllNonNull; import java.util.Collection; import java.util.HashSet; import java.util.Iterator; import java.util.Set; import javafx.collections.FXCollections; import javafx.collections.ObservableList; import seedu.address.commons.core.UnmodifiableObservableList; import seedu.address.commons.exceptions.DuplicateDataException; import seedu.address.commons.util.CollectionUtil; /** * A list of tags that enforces no nulls and uniqueness between its elements. * * Supports minimal set of list operations for the app's features. * * @see Tag#equals(Object) * @see CollectionUtil#elementsAreUnique(Collection) */ public class UniqueTagList implements Iterable<Tag> { private final ObservableList<Tag> internalList = FXCollections.observableArrayList(); /** * Constructs empty TagList. */ public UniqueTagList() {} /** * Creates a UniqueTagList using given tags. * Enforces no nulls. */ public UniqueTagList(Set<Tag> tags) { requireAllNonNull(tags); internalList.addAll(tags); assert CollectionUtil.elementsAreUnique(internalList); } /** * Returns all tags in this list as a Set. * This set is mutable and change-insulated against the internal list. */ public Set<Tag> toSet() { assert CollectionUtil.elementsAreUnique(internalList); return new HashSet<>(internalList); } /** * Replaces the Tags in this list with those in the argument tag list. */ public void setTags(UniqueTagList replacement) { this.internalList.setAll(replacement.internalList); assert CollectionUtil.elementsAreUnique(internalList); } public void setTags(Collection<Tag> tags) throws DuplicateTagException { requireAllNonNull(tags); if (!CollectionUtil.elementsAreUnique(tags)) { throw new DuplicateTagException(); } internalList.setAll(tags); assert CollectionUtil.elementsAreUnique(internalList); } /** * Ensures every tag in the argument list exists in this object. */ public void mergeFrom(UniqueTagList from) { final Set<Tag> alreadyInside = this.toSet(); from.internalList.stream() .filter(tag -> !alreadyInside.contains(tag)) .forEach(internalList::add); assert CollectionUtil.elementsAreUnique(internalList); } /** * Returns true if the list contains an equivalent Tag as the given argument. */ public boolean contains(Tag toCheck) { requireNonNull(toCheck); return internalList.contains(toCheck); } /** * Adds a Tag to the list. * * @throws DuplicateTagException if the Tag to add is a duplicate of an existing Tag in the list. */ public void add(Tag toAdd) throws DuplicateTagException { requireNonNull(toAdd); if (contains(toAdd)) { throw new DuplicateTagException(); } internalList.add(toAdd); assert CollectionUtil.elementsAreUnique(internalList); } @Override public Iterator<Tag> iterator() { assert CollectionUtil.elementsAreUnique(internalList); return internalList.iterator(); } public UnmodifiableObservableList<Tag> asObservableList() { assert CollectionUtil.elementsAreUnique(internalList); return new UnmodifiableObservableList<>(internalList); } @Override public boolean equals(Object other) { assert CollectionUtil.elementsAreUnique(internalList); return other == this // short circuit if same object || (other instanceof UniqueTagList // instanceof handles nulls && this.internalList.equals(((UniqueTagList) other).internalList)); } public boolean equalsOrderInsensitive(UniqueTagList other) { assert CollectionUtil.elementsAreUnique(internalList); assert CollectionUtil.elementsAreUnique(other.internalList); return this == other || new HashSet<>(this.internalList).equals(new HashSet<>(other.internalList)); } @Override public int hashCode() { assert CollectionUtil.elementsAreUnique(internalList); return internalList.hashCode(); } /** * Signals that an operation would have violated the 'no duplicates' property of the list. */ public static class DuplicateTagException extends DuplicateDataException { protected DuplicateTagException() { super("Operation would result in duplicate tags"); } } }
package seedu.doit.logic.commands; import java.util.Set; /** * Finds and lists all tasks in task manager whose name contains any of the argument keywords. * Keyword matching is case sensitive. */ public class FindCommand extends Command { public static final String COMMAND_WORD = "find"; public static final String MESSAGE_USAGE = COMMAND_WORD + ": Finds all tasks whose names contain any of " + "the specified keywords (case-sensitive) and displays them as a list with index numbers.\n" + "Parameters: KEYWORD [MORE_KEYWORDS]...\n" + "Example: " + COMMAND_WORD + " Task A Project 3 Print 10 pages"; private final Set<String> namekeywords; private final Set<String> startTimekeywords; private final Set<String> endTimekeywords; private final Set<String> prioritykeywords; private final Set<String> tagskeywords; private final Set<String> desckeywords; public FindCommand(Set<String> namekeywords, Set<String> startTimekeywords, Set<String> endTimekeywords, Set<String> prioritykeywords, Set<String> tagskeywords, Set<String> desckeywords) { this.namekeywords = namekeywords; this.startTimekeywords = namekeywords; this.endTimekeywords = namekeywords; this.prioritykeywords = namekeywords; this.tagskeywords = namekeywords; this.desckeywords = namekeywords; } @Override public CommandResult execute() { model.updateFilteredTaskList(namekeywords, prioritykeywords, desckeywords); return new CommandResult(getMessageForTaskListShownSummary(model.getFilteredTaskList().size())); } }
package seedu.tache.logic.commands; import java.util.HashSet; import java.util.Optional; import java.util.Set; import seedu.tache.commons.exceptions.IllegalValueException; import seedu.tache.logic.commands.exceptions.CommandException; import seedu.tache.model.tag.Tag; import seedu.tache.model.tag.UniqueTagList; import seedu.tache.model.task.Date; import seedu.tache.model.task.DetailedTask; import seedu.tache.model.task.Name; import seedu.tache.model.task.Task; import seedu.tache.model.task.Time; import seedu.tache.model.task.UniqueTaskList; /** * Adds a task to the task manager. */ public class AddCommand extends Command { public static final String COMMAND_WORD = "add"; public static final String MESSAGE_USAGE = COMMAND_WORD + ": Adds a task to the task manager. " + "Parameters: NAME [;START DATE & TIME] [;END DATE & TIME] [;TAG...]\n" + "Example: " + COMMAND_WORD + " Orientation week camp; 25/7/16 0800; 28/7/2016 0900; HighPriority; Events"; public static final String MESSAGE_SUCCESS = "New task added: %1$s"; public static final String MESSAGE_DUPLICATE_TASK = "This task already exists in the task manager"; private final Task toAdd; public AddCommand(String name, Set<String> tags) throws IllegalValueException { final Set<Tag> tagSet = new HashSet<>(); for (String tagName : tags) { tagSet.add(new Tag(tagName)); } this.toAdd = new Task( new Name(name), new UniqueTagList(tagSet) ); } public AddCommand(String name, String startDate, String startTime, Optional<String> endDate, Optional<String> endTime, Set<String> tags) throws IllegalValueException { final Set<Tag> tagSet = new HashSet<>(); for (String tagName : tags) { tagSet.add(new Tag(tagName)); } Name _name = new Name(name); Date _startDate = new Date(startDate); Time _startTime = new Time(startTime); Date _endDate = null; if (endDate.isPresent()) { _endDate = new Date(endDate.get()); } Time _endTime = null; if (endTime.isPresent()) { _endTime = new Time(endTime.get()); } UniqueTagList _tagList = new UniqueTagList(tagSet); this.toAdd = new DetailedTask(_name, _startDate, _endDate, _startTime, _endTime, _tagList); } @Override public CommandResult execute() throws CommandException { assert model != null; try { model.addTask(toAdd); return new CommandResult(String.format(MESSAGE_SUCCESS, toAdd)); } catch (UniqueTaskList.DuplicateTaskException e) { throw new CommandException(MESSAGE_DUPLICATE_TASK); } } }
package seedu.taskell.model.task; import seedu.taskell.commons.exceptions.IllegalValueException; /** * Represents a Task's priority in the task manager. * Guarantees: immutable; is valid as declared in {@link #isValidPriority(String)} */ public class TaskPriority { public static final String MESSAGE_TASK_PRIORITY_CONSTRAINTS = "Task priority should range from 0-3" + "\nThere should not be more than 1 priority level for a task"; public static final String TASK_PRIORITY_VALIDATION_REGEX = ".+"; public static final String PREFIX = "p/"; public static final String NO_PRIORITY = "0"; public static final String LOW_PRIORITY = "1"; public static final String MEDIUM_PRIORITY = "2"; public static final String HIGH_PRIORITY = "3"; public static final String DEFAULT_PRIORITY = NO_PRIORITY; public final String taskPriority; public TaskPriority(String priority) throws IllegalValueException { assert priority != null; if (!isValidPriority(priority)) { throw new IllegalValueException(MESSAGE_TASK_PRIORITY_CONSTRAINTS); } this.taskPriority = priority; } /** * Returns true if a given string is a valid task taskTime. */ public static boolean isValidPriority(String priority) { return priority.equals(NO_PRIORITY) || priority.equals(LOW_PRIORITY) || priority.equals(MEDIUM_PRIORITY) || priority.equals(HIGH_PRIORITY); } @Override public String toString() { return taskPriority; } @Override public boolean equals(Object other) { return other == this // short circuit if same object || (other instanceof TaskPriority // instanceof handles nulls && this.taskPriority.equals(((TaskPriority) other).taskPriority)); // state check } @Override public int hashCode() { return taskPriority.hashCode(); } }
package skadistats.clarity.examples.test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import skadistats.clarity.decoder.BitStream; import skadistats.clarity.decoder.s2.FieldOpType; import skadistats.clarity.decoder.s2.HuffmanTree; import skadistats.clarity.decoder.unpacker.Unpacker; import skadistats.clarity.model.FieldPath; import skadistats.clarity.model.StringTable; import skadistats.clarity.model.s2.S2DTClass; import skadistats.clarity.model.s2.field.FieldProperties; import skadistats.clarity.model.s2.field.FieldType; import skadistats.clarity.processor.reader.OnTickStart; import skadistats.clarity.processor.runner.Context; import skadistats.clarity.processor.runner.SimpleRunner; import skadistats.clarity.processor.sendtables.DTClasses; import skadistats.clarity.processor.sendtables.UsesDTClasses; import skadistats.clarity.processor.stringtables.StringTables; import skadistats.clarity.processor.stringtables.UsesStringTable; import skadistats.clarity.source.MappedFileSource; import skadistats.clarity.util.TextTable; import java.io.*; import java.util.ArrayList; import java.util.List; @UsesDTClasses @UsesStringTable("instancebaseline") public class Main { public static final HuffmanTree HUFFMAN_TREE = new HuffmanTree(); private final Logger log = LoggerFactory.getLogger(Main.class.getPackage().getClass()); @OnTickStart public void onTickStart(Context ctx, boolean synthetic) throws InterruptedException, FileNotFoundException, UnsupportedEncodingException { if (ctx.getTick() == 30000) { //System.out.println(new HuffmanGraph(FieldPathDecoder.HUFFMAN_TREE).generate()); StringTables stringTables = ctx.getProcessor(StringTables.class); DTClasses dtClasses = ctx.getProcessor(DTClasses.class); StringTable baseline = stringTables.forName("instancebaseline"); File dir = new File("baselines/" + ctx.getBuildNumber()); if (!dir.exists()) { dir.mkdirs(); } PrintStream[] ps = new PrintStream[] { System.out, null, }; List<String> onlyThese = new ArrayList<>(); //onlyThese = Arrays.asList("CBaseAnimating"); Exception exx; for (int idx = 0; idx < baseline.getEntryCount(); idx++) { int clsId = Integer.valueOf(baseline.getNameByIndex(idx)); if (baseline.getValueByIndex(idx) != null) { S2DTClass dtClass = (S2DTClass) dtClasses.forClassId(clsId); if (onlyThese.size() != 0 && !onlyThese.contains(dtClass.getDtName())) { continue; } ps[0] = new PrintStream(new FileOutputStream("baselines/" + ctx.getBuildNumber() + "/" + dtClass.getDtName() + ".txt"), true, "UTF-8"); TextTable.Builder b = new TextTable.Builder(); b.setTitle(dtClass.getDtName()); b.setFrame(TextTable.FRAME_COMPAT); b.setPadding(0, 0); b.addColumn("FP"); b.addColumn("Name"); b.addColumn("L", TextTable.Alignment.RIGHT); b.addColumn("H", TextTable.Alignment.RIGHT); b.addColumn("BC", TextTable.Alignment.RIGHT); b.addColumn("Flags", TextTable.Alignment.RIGHT); b.addColumn("Decoder"); b.addColumn("Type"); b.addColumn("Value"); b.addColumn("#", TextTable.Alignment.RIGHT); b.addColumn("read"); TextTable t = b.build(); BitStream bs = new BitStream(baseline.getValueByIndex(idx)); exx = null; try { List<FieldPath> fieldPaths = new ArrayList<>(); FieldPath fp = new FieldPath(); while (true) { FieldOpType op = HUFFMAN_TREE.decodeOp(bs); op.execute(fp, bs); if (op == FieldOpType.FieldPathEncodeFinish) { break; } fieldPaths.add(fp); fp = new FieldPath(fp); } for (int r = 0; r < fieldPaths.size(); r++) { fp = fieldPaths.get(r); FieldProperties f = dtClass.getFieldForFieldPath(fp).getProperties(); FieldType ft = dtClass.getTypeForFieldPath(fp); t.setData(r, 0, fp); t.setData(r, 1, dtClass.getNameForFieldPath(fp)); t.setData(r, 2, f.getLowValue()); t.setData(r, 3, f.getHighValue()); t.setData(r, 4, f.getBitCount()); t.setData(r, 5, f.getEncodeFlags() != null ? Integer.toHexString(f.getEncodeFlags()) : "-"); t.setData(r, 7, String.format("%s%s", ft.toString(true), f.getEncoder() != null ? String.format(" {%s}", f.getEncoder()) : "")); int offsBefore = bs.pos(); Unpacker unpacker = dtClass.getUnpackerForFieldPath(fp); if (unpacker == null) { System.out.format("no unpacker for field %s with type %s!", f.getName(), f.getType()); System.exit(1); } Object data = unpacker.unpack(bs); t.setData(r, 6, unpacker.getClass().getSimpleName().toString()); t.setData(r, 8, data); t.setData(r, 9, bs.pos() - offsBefore); t.setData(r, 10, bs.toString(offsBefore, bs.pos())); } } catch (Exception e) { exx = e; } finally { for (PrintStream s : ps) { if (s == null) { continue; } t.print(s); s.format("%s/%s remaining: %s\n", bs.remaining(), bs.len(), bs.toString(bs.pos(), bs.len())); if (exx != null) { exx.printStackTrace(s); } s.format("\n\n\n"); } } } } } } public void run(String[] args) throws Exception { long tStart = System.currentTimeMillis(); new SimpleRunner(new MappedFileSource(args[0])).runWith(this); long tMatch = System.currentTimeMillis() - tStart; log.info("total time taken: {}s", (tMatch) / 1000.0); } public static void main(String[] args) throws Exception { new Main().run(args); } }
package uk.ac.manchester.libchebi; import java.io.*; import java.text.*; import java.util.*; /** * @author neilswainston */ public class ChebiEntity { public static final short UNDEFINED_VALUE = Short.MIN_VALUE; private static final int FIRST = 0; private final int id; private int[] allIds = null; /** * @throws ChebiException * @throws ParseException * @throws IOException */ public ChebiEntity( final int id ) throws IOException, ParseException, ChebiException { this.id = id; if( getName() == null ) { throw new ChebiException( "ChEBI id " + id + " invalid" ); //$NON-NLS-1$ //$NON-NLS-2$ } } /** * * @return id */ public int getId() { return id; } /** * * @return parent id * @throws IOException * @throws ParseException */ public int getParentId() throws IOException, ParseException { return CompoundsParser.getInstance().getParentId( id ); } /** * * @return formulae * @throws IOException * @throws ParseException */ public List<Formula> getFormulae() throws IOException, ParseException { return ChemicalDataParser.getInstance().getFormulae( getAllIds() ); } /** * * @return formula * @throws IOException * @throws ParseException */ public String getFormula() throws IOException, ParseException { final List<Formula> formulae = getFormulae(); return formulae.size() == 0 ? null : formulae.get( FIRST ).getFormula(); } /** * * @return mass * @throws IOException * @throws ParseException */ public float getMass() throws IOException, ParseException { float mass = ChemicalDataParser.getInstance().getMass( id ); if( mass == ChebiEntity.UNDEFINED_VALUE ) { mass = ChemicalDataParser.getInstance().getMass( getParentId() ); if( mass != ChebiEntity.UNDEFINED_VALUE ) { return mass; } for( final int childOrParentId : getAllIds() ) { mass = ChemicalDataParser.getInstance().getMass( childOrParentId ); if( mass != ChebiEntity.UNDEFINED_VALUE ) { return mass; } } } return mass; } /** * * @return charge * @throws IOException * @throws ParseException */ public int getCharge() throws IOException, ParseException { int charge = ChemicalDataParser.getInstance().getCharge( id ); if( charge == ChebiEntity.UNDEFINED_VALUE ) { charge = ChemicalDataParser.getInstance().getCharge( getParentId() ); if( charge != ChebiEntity.UNDEFINED_VALUE ) { return charge; } for( final int childOrParentId : getAllIds() ) { charge = ChemicalDataParser.getInstance().getCharge( childOrParentId ); if( charge != ChebiEntity.UNDEFINED_VALUE ) { return charge; } } } return charge; } /** * * @return comments * @throws IOException * @throws ParseException */ public List<Comment> getComments() throws IOException, ParseException { return CommentsParser.getInstance().getComments( getAllIds() ); } /** * * @return source * @throws IOException * @throws ParseException */ public String getSource() throws IOException, ParseException { return CompoundsParser.getInstance().getSource( id ); } /** * * @return name * @throws IOException * @throws ParseException */ public String getName() throws IOException, ParseException { String name = CompoundsParser.getInstance().getName( id ); if( name == null ) { name = CompoundsParser.getInstance().getName( getParentId() ); if( name != null ) { return name; } for( final int childOrParentId : getAllIds() ) { name = CompoundsParser.getInstance().getName( childOrParentId ); if( name != null ) { return name; } } } return name; } /** * * @return definition * @throws IOException * @throws ParseException */ public String getDefinition() throws IOException, ParseException { String definition = CompoundsParser.getInstance().getDefinition( id ); if( definition == null ) { definition = CompoundsParser.getInstance().getDefinition( getParentId() ); if( definition != null ) { return definition; } for( final int childOrParentId : getAllIds() ) { definition = CompoundsParser.getInstance().getDefinition( childOrParentId ); if( definition != null ) { return definition; } } } return definition; } public Date getModifiedOn() throws IOException, ParseException { return CompoundsParser.getInstance().getModifiedOn( getAllIds() ); } public String getCreatedBy() throws IOException, ParseException { String createdBy = CompoundsParser.getInstance().getCreatedBy( id ); if( createdBy == null ) { createdBy = CompoundsParser.getInstance().getCreatedBy( getParentId() ); if( createdBy != null ) { return createdBy; } for( final int childOrParentId : getAllIds() ) { createdBy = CompoundsParser.getInstance().getCreatedBy( childOrParentId ); if( createdBy != null ) { return createdBy; } } } return createdBy; } /** * * @return star * @throws IOException * @throws ParseException */ public short getStar() throws IOException, ParseException { return CompoundsParser.getInstance().getStar( id ); } /** * * @return database accessions * @throws IOException * @throws ParseException */ public List<DatabaseAccession> getDatabaseAccessions() throws IOException, ParseException { return DatabaseAccessionParser.getInstance().getDatabaseAccessions( getAllIds() ); } /** * * @return InChI string * @throws IOException * @throws ParseException */ public String getInchi() throws IOException, ParseException { String inchi = InchiParser.getInstance().getInchi( id ); if( inchi == null ) { inchi = InchiParser.getInstance().getInchi( getParentId() ); if( inchi != null ) { return inchi; } for( final int childOrParentId : getAllIds() ) { inchi = InchiParser.getInstance().getInchi( childOrParentId ); if( inchi != null ) { return inchi; } } } return inchi; } /** * * @return InChI key string * @throws IOException * @throws ParseException */ public String getInchiKey() throws IOException, ParseException { Structure structure = StructuresParser.getInstance().getInchiKey( id ); if( structure != null ) { return structure.getStructure(); } structure = StructuresParser.getInstance().getInchiKey( getParentId() ); if( structure != null ) { return structure.getStructure(); } for( final int childOrParentId : getAllIds() ) { structure = StructuresParser.getInstance().getInchiKey( childOrParentId ); if( structure != null ) { return structure.getStructure(); } } return null; } /** * * @return SMILES string * @throws IOException * @throws ParseException */ public String getSmiles() throws IOException, ParseException { Structure structure = StructuresParser.getInstance().getSmiles( id ); if( structure != null ) { return structure.getStructure(); } structure = StructuresParser.getInstance().getSmiles( getParentId() ); if( structure != null ) { return structure.getStructure(); } for( final int childOrParentId : getAllIds() ) { structure = StructuresParser.getInstance().getSmiles( childOrParentId ); if( structure != null ) { return structure.getStructure(); } } return null; } /** * * @return mol string * @throws IOException * @throws ParseException */ public String getMol() throws IOException, ParseException { Structure structure = StructuresParser.getInstance().getMol( id ); if( structure != null ) { return structure.getStructure(); } structure = StructuresParser.getInstance().getMol( getParentId() ); if( structure != null ) { return structure.getStructure(); } for( final int childOrParentId : getAllIds() ) { structure = StructuresParser.getInstance().getMol( childOrParentId ); if( structure != null ) { return structure.getStructure(); } } return null; } /** * * @return mol file * @throws MolFileException * @throws ParseException * @throws IOException */ public File getMolFile() throws IOException, ParseException { File molFile = StructuresParser.getInstance().getMolFile( id ); if( molFile != null ) { return molFile; } molFile = StructuresParser.getInstance().getMolFile( getParentId() ); if( molFile != null ) { return molFile; } for( final int childOrParentId : getAllIds() ) { molFile = StructuresParser.getInstance().getMolFile( childOrParentId ); if( molFile != null ) { return molFile; } } return null; } /** * * @return names * @throws IOException * @throws ParseException */ public List<Name> getNames() throws IOException, ParseException { return NamesParser.getInstance().getNames( getAllIds() ); } /** * * @return references * @throws IOException * @throws ParseException */ public List<Reference> getReferences() throws IOException, ParseException { return ReferenceParser.getInstance().getReferences( getAllIds() ); } /** * * @return compound origins * @throws IOException * @throws ParseException */ public List<CompoundOrigin> getCompoundOrigins() throws IOException, ParseException { return CompoundOriginsParser.getInstance().getCompoundOrigins( getAllIds() ); } /** * * @return outgoing Relations * @throws IOException * @throws ParseException */ public List<Relation> getOutgoings() throws IOException, ParseException { return RelationParser.getInstance().getOutgoings( getAllIds() ); } /** * * @return incoming Relations * @throws IOException * @throws ParseException */ public List<Relation> getIncomings() throws IOException, ParseException { return RelationParser.getInstance().getIncomings( getAllIds() ); } /** * * @return status * @throws IOException * @throws ParseException */ String getStatus() throws IOException, ParseException { return CompoundsParser.getInstance().getStatus( id ); } /** * * @return allIds * @throws IOException * @throws ParseException */ int[] getAllIds() throws IOException, ParseException { if( allIds == null ) { final int parentId = getParentId(); allIds = CompoundsParser.getInstance().getAllIds( parentId == ChebiEntity.UNDEFINED_VALUE ? id : parentId ); if( allIds == null ) { allIds = new int[ 0 ]; } } return allIds; } /** * Example code, showing the instantiation of a ChebiEntity, a call to getNames(), * and the calling of a number of methods of the returned Names objects. * * @param args * @throws IOException * @throws ParseException * @throws ChebiException */ @SuppressWarnings("nls") public static void main( final String[] args ) throws IOException, ParseException, ChebiException { ChebiEntity chebiEntity = new ChebiEntity( 17634 ); for( Name name : chebiEntity.getNames() ) { System.out.println( name.getName() + "\t" + name.getSource() + "\t" + name.getLanguage() ); } } }
package net.wigle.wigleandroid; import java.util.Map; import org.andnav.osm.util.GeoPoint; import org.andnav.osm.views.OpenStreetMapView; import android.content.Context; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.Paint; import android.graphics.Point; import android.util.AttributeSet; /** * wrap the open street map view, to allow setting overlays */ public class OpenStreetMapViewWrapper extends OpenStreetMapView { private Paint trailPaint = new Paint(); /** * XML Constructor (uses default Renderer) */ public OpenStreetMapViewWrapper(Context context, AttributeSet attrs) { super( context, attrs ); int color = Color.argb( 200, 200, 128, 200 ); trailPaint.setColor( color ); } @Override public void onDraw( Canvas c ) { super.onDraw( c ); synchronized( WigleAndroid.lameStatic.trail ) { for ( Map.Entry<GeoPoint,Integer> entry : WigleAndroid.lameStatic.trail.entrySet() ) { GeoPoint geoPoint = entry.getKey(); int nets = entry.getValue(); // WigleAndroid.info( "nets: " + nets + " point: " + geoPoint ); if ( nets > 0 ) { final Point point = this.getProjection().toMapPixels( geoPoint, null ); c.drawCircle(point.x, point.y, nets, trailPaint); } } } // draw center crosshairs final GeoPoint center = this.getMapCenter(); final Point centerPoint = this.getProjection().toMapPixels( center, null ); c.drawLine( centerPoint.x, centerPoint.y - 9, centerPoint.x, centerPoint.y + 9, mPaint ); c.drawLine( centerPoint.x - 9, centerPoint.y, centerPoint.x + 9, centerPoint.y, mPaint ); } }
package net.zephyrizing.http_server_test; import java.io.IOException; import java.util.ArrayList; import java.util.List; import net.zephyrizing.http_server.HttpRequest; import net.zephyrizing.http_server.HttpServer; import net.zephyrizing.http_server.HttpServerSocket; import org.junit.Before; import org.junit.Test; import static org.junit.Assert.*; public class HttpServerTest { MockHttpServerSocket serverSocket; int port = 7070; HttpServer server; @Before public void initialize() { serverSocket = new MockHttpServerSocket(); server = new HttpServer(serverSocket, port); } public class MockHttpServerSocket implements HttpServerSocket { public int bindCallCount = 0; public List<Integer> portList = new ArrayList<Integer>(); @Override public void bind(int port) throws IOException { bindCallCount++; portList.add(port); } @Override public HttpRequest accept() { String[] requestLines = new String[] {"GET / HTTP/1.1\r\n"}; return new HttpRequest(requestLines); } @Override public void close() {} } @Test public void serverListensAtPort() throws Exception { server.listen(); assertEquals(1, serverSocket.bindCallCount); assertEquals(port, (int)serverSocket.portList.get(0)); } @Test public void serverReceivesARequest() throws Exception { server.listen(); HttpRequest request = server.acceptRequest(); assertEquals("GET", request.method()); } public class AcceptMockedHttpServer extends HttpServer { public int timesAcceptRequestCalled = 0; private int acceptThreshold = 1; public AcceptMockedHttpServer(HttpServerSocket socket, int port) { super(socket, port); } public void setNumberOfAccepts(int threshold) { acceptThreshold = threshold; } @Override public boolean acceptingRequests() { return timesAcceptRequestCalled < acceptThreshold; } @Override public HttpRequest acceptRequest() { timesAcceptRequestCalled++; return null; } } @Test public void serverAcceptsMultipleRequests() { AcceptMockedHttpServer server = new AcceptMockedHttpServer(serverSocket, port); int numCalls = 3; server.setNumberOfAccepts(numCalls); server.serve(); assertEquals(numCalls, server.timesAcceptRequestCalled); } }
package nu.validator.servlet; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.io.OutputStream; import java.net.SocketTimeoutException; import java.util.Arrays; import java.util.Deque; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.SortedMap; import java.util.TreeMap; import java.util.regex.Matcher; import java.util.regex.Pattern; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import nu.validator.checker.XmlPiChecker; import nu.validator.checker.jing.CheckerSchema; import nu.validator.gnu.xml.aelfred2.SAXDriver; import nu.validator.htmlparser.common.DoctypeExpectation; import nu.validator.htmlparser.common.DocumentMode; import nu.validator.htmlparser.common.DocumentModeHandler; import nu.validator.htmlparser.common.Heuristics; import nu.validator.htmlparser.common.XmlViolationPolicy; import nu.validator.htmlparser.io.ChangingEncodingException; import nu.validator.htmlparser.sax.CannotRecoverException; import nu.validator.htmlparser.sax.HtmlParser; import nu.validator.htmlparser.sax.HtmlSerializer; import nu.validator.htmlparser.sax.XmlSerializer; import nu.validator.io.BoundedInputStream; import nu.validator.io.DataUri; import nu.validator.io.StreamBoundException; import nu.validator.localentities.LocalCacheEntityResolver; import nu.validator.messages.GnuMessageEmitter; import nu.validator.messages.JsonMessageEmitter; import nu.validator.messages.MessageEmitterAdapter; import nu.validator.messages.TextMessageEmitter; import nu.validator.messages.TooManyErrorsException; import nu.validator.messages.XhtmlMessageEmitter; import nu.validator.messages.XmlMessageEmitter; import nu.validator.servlet.imagereview.ImageCollector; import nu.validator.servlet.OutlineBuildingXMLReaderWrapper.Section; import nu.validator.source.SourceCode; import nu.validator.spec.Spec; import nu.validator.spec.html5.Html5SpecBuilder; import nu.validator.xml.AttributesImpl; import nu.validator.xml.AttributesPermutingXMLReaderWrapper; import nu.validator.xml.BaseUriTracker; import nu.validator.xml.CharacterUtil; import nu.validator.xml.CombineContentHandler; import nu.validator.xml.ContentTypeParser; import nu.validator.xml.DataUriEntityResolver; import nu.validator.xml.IdFilter; import nu.validator.xml.NamespaceDroppingXMLReaderWrapper; import nu.validator.xml.NullEntityResolver; import nu.validator.xml.PrudentHttpEntityResolver; import nu.validator.xml.SystemErrErrorHandler; import nu.validator.xml.TypedInputSource; import nu.validator.xml.WiretapXMLReaderWrapper; import nu.validator.xml.XhtmlSaxEmitter; import nu.validator.xml.dataattributes.DataAttributeDroppingSchemaWrapper; import nu.validator.xml.langattributes.XmlLangAttributeDroppingSchemaWrapper; import nu.validator.xml.roleattributes.RoleAttributeFilteringSchemaWrapper; import org.xml.sax.ContentHandler; import org.xml.sax.EntityResolver; import org.xml.sax.ErrorHandler; import org.xml.sax.Locator; import org.xml.sax.SAXException; import org.xml.sax.SAXNotRecognizedException; import org.xml.sax.SAXNotSupportedException; import org.xml.sax.SAXParseException; import org.xml.sax.XMLReader; import org.xml.sax.ext.LexicalHandler; import com.thaiopensource.relaxng.impl.CombineValidator; import com.thaiopensource.util.PropertyMap; import com.thaiopensource.util.PropertyMapBuilder; import com.thaiopensource.validate.IncorrectSchemaException; import com.thaiopensource.validate.Schema; import com.thaiopensource.validate.SchemaReader; import com.thaiopensource.validate.SchemaResolver; import com.thaiopensource.validate.ValidateProperty; import com.thaiopensource.validate.Validator; import com.thaiopensource.validate.auto.AutoSchemaReader; import com.thaiopensource.validate.prop.rng.RngProperty; import com.thaiopensource.validate.prop.wrap.WrapProperty; import com.thaiopensource.validate.rng.CompactSchemaReader; import org.apache.http.conn.ConnectTimeoutException; import org.apache.log4j.Logger; import com.ibm.icu.text.Normalizer; import org.apache.stanbol.enhancer.engines.langdetect.LanguageIdentifier; /** * @version $Id: VerifierServletTransaction.java,v 1.10 2005/07/24 07:32:48 * hsivonen Exp $ * @author hsivonen */ class VerifierServletTransaction implements DocumentModeHandler, SchemaResolver { private enum OutputFormat { HTML, XHTML, TEXT, XML, JSON, RELAXED, SOAP, UNICORN, GNU } private static final Logger log4j = Logger.getLogger(VerifierServletTransaction.class); private static final Pattern SPACE = Pattern.compile("\\s+"); private static final Pattern JS_IDENTIFIER = Pattern.compile("[\\p{Lu}\\p{Ll}\\p{Lt}\\p{Lm}\\p{Lo}\\p{Nl}_\\$][\\p{Lu}\\p{Ll}\\p{Lt}\\p{Lm}\\p{Lo}\\p{Nl}_\\$\\p{Mn}\\p{Mc}\\p{Nd}\\p{Pc}]*"); private static final String[] JS_RESERVED_WORDS = { "abstract", "boolean", "break", "byte", "case", "catch", "char", "class", "const", "continue", "debugger", "default", "delete", "do", "double", "else", "enum", "export", "extends", "final", "finally", "float", "for", "function", "goto", "if", "implements", "import", "in", "instanceof", "int", "interface", "long", "native", "new", "package", "private", "protected", "public", "return", "short", "static", "super", "switch", "synchronized", "this", "throw", "throws", "transient", "try", "typeof", "var", "void", "volatile", "while", "with" }; private static final String[] CHARSETS = { "UTF-8", "UTF-16", "Windows-1250", "Windows-1251", "Windows-1252", "Windows-1253", "Windows-1254", "Windows-1255", "Windows-1256", "Windows-1257", "Windows-1258", "ISO-8859-1", "ISO-8859-2", "ISO-8859-3", "ISO-8859-4", "ISO-8859-5", "ISO-8859-6", "ISO-8859-7", "ISO-8859-8", "ISO-8859-9", "ISO-8859-13", "ISO-8859-15", "KOI8-R", "TIS-620", "GBK", "GB18030", "Big5", "Big5-HKSCS", "Shift_JIS", "ISO-2022-JP", "EUC-JP", "ISO-2022-KR", "EUC-KR" }; private static final char[][] CHARSET_DESCRIPTIONS = { "UTF-8 (Global)".toCharArray(), "UTF-16 (Global)".toCharArray(), "Windows-1250 (Central European)".toCharArray(), "Windows-1251 (Cyrillic)".toCharArray(), "Windows-1252 (Western)".toCharArray(), "Windows-1253 (Greek)".toCharArray(), "Windows-1254 (Turkish)".toCharArray(), "Windows-1255 (Hebrew)".toCharArray(), "Windows-1256 (Arabic)".toCharArray(), "Windows-1257 (Baltic)".toCharArray(), "Windows-1258 (Vietnamese)".toCharArray(), "ISO-8859-1 (Western)".toCharArray(), "ISO-8859-2 (Central European)".toCharArray(), "ISO-8859-3 (South European)".toCharArray(), "ISO-8859-4 (Baltic)".toCharArray(), "ISO-8859-5 (Cyrillic)".toCharArray(), "ISO-8859-6 (Arabic)".toCharArray(), "ISO-8859-7 (Greek)".toCharArray(), "ISO-8859-8 (Hebrew)".toCharArray(), "ISO-8859-9 (Turkish)".toCharArray(), "ISO-8859-13 (Baltic)".toCharArray(), "ISO-8859-15 (Western)".toCharArray(), "KOI8-R (Russian)".toCharArray(), "TIS-620 (Thai)".toCharArray(), "GBK (Chinese, simplified)".toCharArray(), "GB18030 (Chinese, simplified)".toCharArray(), "Big5 (Chinese, traditional)".toCharArray(), "Big5-HKSCS (Chinese, traditional)".toCharArray(), "Shift_JIS (Japanese)".toCharArray(), "ISO-2022-JP (Japanese)".toCharArray(), "EUC-JP (Japanese)".toCharArray(), "ISO-2022-KR (Korean)".toCharArray(), "EUC-KR (Korean)".toCharArray() }; protected static final int HTML5_SCHEMA = 3; protected static final int XHTML1STRICT_SCHEMA = 2; protected static final int XHTML1TRANSITIONAL_SCHEMA = 1; protected static final int XHTML5_SCHEMA = 7; private static final char[] SERVICE_TITLE; private static final char[] LIVING_VERSION = "Living Validator".toCharArray(); private static final char[] VERSION; private static final char[] RESULTS_TITLE; private static final char[] FOR = " for ".toCharArray(); private static final char[] ABOUT_THIS_SERVICE = "About this Service".toCharArray(); private static final char[] SIMPLE_UI = "Simplified Interface".toCharArray(); private static final String USER_AGENT; private static Spec html5spec; private static int[] presetDoctypes; private static String[] presetLabels; private static String[] presetUrls; private static String[] presetNamespaces; private static LanguageIdentifier languageIdentifier; // XXX SVG!!! private static final String[] KNOWN_CONTENT_TYPES = { "application/atom+xml", "application/docbook+xml", "application/xhtml+xml", "application/xv+xml", "image/svg+xml" }; private static final String[] NAMESPACES_FOR_KNOWN_CONTENT_TYPES = { "http: "http: "http: private static final String[] ALL_CHECKERS = { "http: "http://c.validator.nu/text-content/", "http://c.validator.nu/unchecked/", "http: "http: "http://c.validator.nu/microdata/" }; private static final String[] ALL_CHECKERS_HTML4 = { "http: "http: private long start = System.currentTimeMillis(); protected final HttpServletRequest request; private final HttpServletResponse response; protected String document = null; private ParserMode parser = ParserMode.AUTO; private String profile = ""; private boolean laxType = false; protected ContentHandler contentHandler; protected XhtmlSaxEmitter emitter; protected MessageEmitterAdapter errorHandler; protected final AttributesImpl attrs = new AttributesImpl(); private OutputStream out; private PropertyMap jingPropertyMap; protected LocalCacheEntityResolver entityResolver; private static long lastModified; private static String[] preloadedSchemaUrls; private static Schema[] preloadedSchemas; private final static String ABOUT_PAGE = System.getProperty( "nu.validator.servlet.about-page", "https://about.validator.nu/"); private final static String HTML5_FACET = (VerifierServlet.HTML5_HOST.isEmpty() ? "" : ("//" + VerifierServlet.HTML5_HOST)) + VerifierServlet.HTML5_PATH; private final static String STYLE_SHEET = System.getProperty( "nu.validator.servlet.style-sheet", "style.css"); private final static String ICON = System.getProperty( "nu.validator.servlet.icon", "icon.png"); private final static String SCRIPT = System.getProperty( "nu.validator.servlet.script", "script.js"); private static final long SIZE_LIMIT = Integer.parseInt(System.getProperty( "nu.validator.servlet.max-file-size", "2097152")); private String schemaUrls = null; protected Validator validator = null; private BufferingRootNamespaceSniffer bufferingRootNamespaceSniffer = null; private String contentType = null; protected HtmlParser htmlParser = null; protected SAXDriver xmlParser = null; protected XMLReader reader; protected TypedInputSource documentInput; protected PrudentHttpEntityResolver httpRes; protected DataUriEntityResolver dataRes; protected ContentTypeParser contentTypeParser; private Set<String> loadedValidatorUrls = new HashSet<>(); private boolean checkNormalization = false; private boolean rootNamespaceSeen = false; private OutputFormat outputFormat; private String postContentType; private boolean methodIsGet; private SourceCode sourceCode = new SourceCode(); private Deque<Section> outline; private boolean showSource; private boolean showOutline; private boolean schemaIsDefault; private String userAgent; private BaseUriTracker baseUriTracker = null; private String charsetOverride = null; private Set<String> filteredNamespaces = new LinkedHashSet<>(); // linked private LexicalHandler lexicalHandler; // for // stability protected ImageCollector imageCollector; private boolean externalSchema = false; private boolean externalSchematron = false; private String schemaListForStats = null; static { try { log4j.debug("Starting static initializer."); lastModified = 0; BufferedReader r = new BufferedReader(new InputStreamReader(LocalCacheEntityResolver.getPresetsAsStream(), "UTF-8")); String line; List<String> doctypes = new LinkedList<>(); List<String> namespaces = new LinkedList<>(); List<String> labels = new LinkedList<>(); List<String> urls = new LinkedList<>(); Properties props = new Properties(); log4j.debug("Reading miscellaneous properties."); props.load(VerifierServlet.class.getClassLoader().getResourceAsStream( "nu/validator/localentities/files/misc.properties")); SERVICE_TITLE = (System.getProperty( "nu.validator.servlet.service-name", props.getProperty("nu.validator.servlet.service-name", "Validator.nu")) + " ").toCharArray(); RESULTS_TITLE = (System.getProperty( "nu.validator.servlet.results-title", props.getProperty( "nu.validator.servlet.results-title", "Validation results"))).toCharArray(); VERSION = (System.getProperty("nu.validator.servlet.version", props.getProperty("nu.validator.servlet.version", "Living Validator"))).toCharArray(); USER_AGENT= (System.getProperty("nu.validator.servlet.user-agent", props.getProperty("nu.validator.servlet.user-agent", "Validator.nu/LV"))); log4j.debug("Starting to loop over config file lines."); while ((line = r.readLine()) != null) { if ("".equals(line.trim())) { break; } String s[] = line.split("\t"); doctypes.add(s[0]); namespaces.add(s[1]); labels.add(s[2]); urls.add(s[3]); } log4j.debug("Finished reading config."); String[] presetDoctypesAsStrings = doctypes.toArray(new String[0]); presetNamespaces = namespaces.toArray(new String[0]); presetLabels = labels.toArray(new String[0]); presetUrls = urls.toArray(new String[0]); log4j.debug("Converted config to arrays."); for (int i = 0; i < presetNamespaces.length; i++) { String str = presetNamespaces[i]; if ("-".equals(str)) { presetNamespaces[i] = null; } else { presetNamespaces[i] = presetNamespaces[i].intern(); } } log4j.debug("Prepared namespace array."); presetDoctypes = new int[presetDoctypesAsStrings.length]; for (int i = 0; i < presetDoctypesAsStrings.length; i++) { presetDoctypes[i] = Integer.parseInt(presetDoctypesAsStrings[i]); } log4j.debug("Parsed doctype numbers into ints."); String prefix = System.getProperty("nu.validator.servlet.cachepathprefix"); log4j.debug("The cache path prefix is: " + prefix); ErrorHandler eh = new SystemErrErrorHandler(); LocalCacheEntityResolver er = new LocalCacheEntityResolver(new NullEntityResolver()); er.setAllowRnc(true); PropertyMapBuilder pmb = new PropertyMapBuilder(); pmb.put(ValidateProperty.ERROR_HANDLER, eh); pmb.put(ValidateProperty.ENTITY_RESOLVER, er); pmb.put(ValidateProperty.XML_READER_CREATOR, new VerifierServletXMLReaderCreator(eh, er)); RngProperty.CHECK_ID_IDREF.add(pmb); PropertyMap pMap = pmb.toPropertyMap(); log4j.debug("Parsing set up. Starting to read schemas."); SortedMap<String, Schema> schemaMap = new TreeMap<>(); schemaMap.put("http://c.validator.nu/table/", CheckerSchema.TABLE_CHECKER); schemaMap.put("http://hsivonen.iki.fi/checkers/table/", CheckerSchema.TABLE_CHECKER); schemaMap.put("http://c.validator.nu/nfc/", CheckerSchema.NORMALIZATION_CHECKER); schemaMap.put("http://hsivonen.iki.fi/checkers/nfc/", CheckerSchema.NORMALIZATION_CHECKER); schemaMap.put("http://c.validator.nu/debug/", CheckerSchema.DEBUG_CHECKER); schemaMap.put("http://hsivonen.iki.fi/checkers/debug/", CheckerSchema.DEBUG_CHECKER); schemaMap.put("http://c.validator.nu/text-content/", CheckerSchema.TEXT_CONTENT_CHECKER); schemaMap.put("http://hsivonen.iki.fi/checkers/text-content/", CheckerSchema.TEXT_CONTENT_CHECKER); schemaMap.put("http://c.validator.nu/usemap/", CheckerSchema.USEMAP_CHECKER); schemaMap.put("http://n.validator.nu/checkers/usemap/", CheckerSchema.USEMAP_CHECKER); schemaMap.put("http://c.validator.nu/unchecked/", CheckerSchema.UNCHECKED_SUBTREE_WARNER); schemaMap.put("http://s.validator.nu/html5/assertions.sch", CheckerSchema.ASSERTION_SCH); schemaMap.put("http://s.validator.nu/html4/assertions.sch", CheckerSchema.HTML4ASSERTION_SCH); schemaMap.put("http://c.validator.nu/obsolete/", CheckerSchema.CONFORMING_BUT_OBSOLETE_WARNER); schemaMap.put("http://c.validator.nu/xml-pi/", CheckerSchema.XML_PI_CHECKER); schemaMap.put("http://c.validator.nu/unsupported/", CheckerSchema.UNSUPPORTED_CHECKER); schemaMap.put("http://c.validator.nu/microdata/", CheckerSchema.MICRODATA_CHECKER); schemaMap.put("http://c.validator.nu/rdfalite/", CheckerSchema.RDFALITE_CHECKER); for (String presetUrl : presetUrls) { for (String url : SPACE.split(presetUrl)) { if (schemaMap.get(url) == null && !isCheckerUrl(url)) { Schema sch = schemaByUrl(url, er, pMap); schemaMap.put(url, sch); } } } log4j.debug("Schemas read."); preloadedSchemaUrls = new String[schemaMap.size()]; preloadedSchemas = new Schema[schemaMap.size()]; int i = 0; for (Map.Entry<String, Schema> entry : schemaMap.entrySet()) { preloadedSchemaUrls[i] = entry.getKey().intern(); Schema s = entry.getValue(); String u = entry.getKey(); if (isDataAttributeDroppingSchema(u)) { s = new DataAttributeDroppingSchemaWrapper( s); } if (isXmlLangAllowingSchema(u)) { s = new XmlLangAttributeDroppingSchemaWrapper(s); } if (isRoleAttributeFilteringSchema(u)) { s = new RoleAttributeFilteringSchemaWrapper(s); } preloadedSchemas[i] = s; i++; } log4j.debug("Reading spec."); html5spec = Html5SpecBuilder.parseSpec(LocalCacheEntityResolver.getHtml5SpecAsStream()); log4j.debug("Spec read."); log4j.debug("Initializing language identifier."); languageIdentifier = new LanguageIdentifier(); log4j.debug("Initialization complete."); } catch (Exception e) { throw new RuntimeException(e); } } protected static String scrub(CharSequence s) { return Normalizer.normalize( CharacterUtil.prudentlyScrubCharacterData(s), Normalizer.NFC); } private static boolean isDataAttributeDroppingSchema(String key) { return ("http://s.validator.nu/xhtml5.rnc".equals(key) || "http://s.validator.nu/html5.rnc".equals(key) || "http://s.validator.nu/html5-all.rnc".equals(key) || "http://s.validator.nu/xhtml5-all.rnc".equals(key) || "http://s.validator.nu/html5-its.rnc".equals(key) || "http://s.validator.nu/xhtml5-rdfalite.rnc".equals(key) || "http://s.validator.nu/html5-rdfalite.rnc".equals(key)); } private static boolean isXmlLangAllowingSchema(String key) { return ("http://s.validator.nu/xhtml5.rnc".equals(key) || "http://s.validator.nu/html5.rnc".equals(key) || "http://s.validator.nu/html5-all.rnc".equals(key) || "http://s.validator.nu/xhtml5-all.rnc".equals(key) || "http://s.validator.nu/html5-its.rnc".equals(key) || "http://s.validator.nu/xhtml5-rdfalite.rnc".equals(key) || "http://s.validator.nu/html5-rdfalite.rnc".equals(key)); } private static boolean isRoleAttributeFilteringSchema(String key) { return ("http://s.validator.nu/xhtml5.rnc".equals(key) || "http://s.validator.nu/html5.rnc".equals(key) || "http://s.validator.nu/html5-all.rnc".equals(key) || "http://s.validator.nu/xhtml5-all.rnc".equals(key) || "http://s.validator.nu/html5-its.rnc".equals(key) || "http://s.validator.nu/xhtml5-rdfalite.rnc".equals(key) || "http://s.validator.nu/html5-rdfalite.rnc".equals(key)); } private static boolean isCheckerUrl(String url) { if ("http://c.validator.nu/all/".equals(url) || "http://hsivonen.iki.fi/checkers/all/".equals(url)) { return true; } else if ("http://c.validator.nu/all-html4/".equals(url) || "http://hsivonen.iki.fi/checkers/all-html4/".equals(url)) { return true; } else if ("http://c.validator.nu/base/".equals(url)) { return true; } else if ("http://c.validator.nu/rdfalite/".equals(url)) { return true; } for (String checker : ALL_CHECKERS) { if (checker.equals(url)) { return true; } } return false; } /** * @param request * @param response */ VerifierServletTransaction(HttpServletRequest request, HttpServletResponse response) { this.request = request; this.response = response; } protected boolean willValidate() { if (methodIsGet) { return document != null; } else { // POST return true; } } void service() throws ServletException, IOException { this.methodIsGet = "GET".equals(request.getMethod()) || "HEAD".equals(request.getMethod()); this.out = response.getOutputStream(); try { request.setCharacterEncoding("utf-8"); } catch (NoSuchMethodError e) { log4j.debug("Vintage Servlet API doesn't support setCharacterEncoding().", e); } if (!methodIsGet) { postContentType = request.getContentType(); if (postContentType == null) { response.sendError(HttpServletResponse.SC_BAD_REQUEST, "Content-Type missing"); return; } else if (postContentType.trim().toLowerCase().startsWith( "application/x-www-form-urlencoded")) { response.sendError( HttpServletResponse.SC_UNSUPPORTED_MEDIA_TYPE, "application/x-www-form-urlencoded not supported. Please use multipart/form-data."); return; } } String outFormat = request.getParameter("out"); if (outFormat == null) { outputFormat = OutputFormat.HTML; } else { if ("html".equals(outFormat)) { outputFormat = OutputFormat.HTML; } else if ("xhtml".equals(outFormat)) { outputFormat = OutputFormat.XHTML; } else if ("text".equals(outFormat)) { outputFormat = OutputFormat.TEXT; } else if ("gnu".equals(outFormat)) { outputFormat = OutputFormat.GNU; } else if ("xml".equals(outFormat)) { outputFormat = OutputFormat.XML; } else if ("json".equals(outFormat)) { outputFormat = OutputFormat.JSON; } else { response.sendError(HttpServletResponse.SC_BAD_REQUEST, "Unsupported output format"); return; } } if (!methodIsGet) { document = request.getHeader("Content-Location"); } if (document == null) { document = request.getParameter("doc"); } if (document == null) { document = request.getParameter("file"); } document = ("".equals(document)) ? null : document; String callback = null; if (outputFormat == OutputFormat.JSON) { callback = request.getParameter("callback"); if (callback != null) { Matcher m = JS_IDENTIFIER.matcher(callback); if (m.matches()) { if (Arrays.binarySearch(JS_RESERVED_WORDS, callback) >= 0) { response.sendError(HttpServletResponse.SC_BAD_REQUEST, "Callback is a reserved word."); return; } } else { response.sendError(HttpServletResponse.SC_BAD_REQUEST, "Callback is not a valid ECMA 262 IdentifierName."); return; } } } if (willValidate()) { response.setDateHeader("Expires", 0); response.setHeader("Cache-Control", "no-cache"); } else if (outputFormat == OutputFormat.HTML || outputFormat == OutputFormat.XHTML) { response.setDateHeader("Last-Modified", lastModified); } else { response.sendError(HttpServletResponse.SC_BAD_REQUEST, "No input document"); return; } setup(); if (request.getParameter("useragent") != null) { userAgent = scrub(request.getParameter("useragent")); } else { userAgent = USER_AGENT; } Object inputType = request.getAttribute("nu.validator.servlet.MultipartFormDataFilter.type"); showSource = (request.getParameter("showsource") != null); showSource = (showSource || "textarea".equals(inputType)); showOutline = (request.getParameter("showoutline") != null); if (request.getParameter("showimagereport") != null) { imageCollector = new ImageCollector(sourceCode); } String charset = request.getParameter("charset"); if (charset != null) { charset = scrub(charset.trim()); if (!"".equals(charset)) { charsetOverride = charset; } } String nsfilter = request.getParameter("nsfilter"); if (nsfilter != null) { for (String ns : SPACE.split(nsfilter)) { if (ns.length() > 0) { filteredNamespaces.add(ns); } } } boolean errorsOnly = ("error".equals(request.getParameter("level"))); boolean asciiQuotes = (request.getParameter("asciiquotes") != null); int lineOffset = 0; String lineOffsetStr = request.getParameter("lineoffset"); if (lineOffsetStr != null) { try { lineOffset = Integer.parseInt(lineOffsetStr); } catch (NumberFormatException e) { } } try { if (outputFormat == OutputFormat.HTML || outputFormat == OutputFormat.XHTML) { if (outputFormat == OutputFormat.HTML) { response.setContentType("text/html; charset=utf-8"); contentHandler = new HtmlSerializer(out); } else { response.setContentType("application/xhtml+xml"); contentHandler = new XmlSerializer(out); } emitter = new XhtmlSaxEmitter(contentHandler); errorHandler = new MessageEmitterAdapter(sourceCode, showSource, imageCollector, lineOffset, false, new XhtmlMessageEmitter(contentHandler)); PageEmitter.emit(contentHandler, this); } else { if (outputFormat == OutputFormat.TEXT) { response.setContentType("text/plain; charset=utf-8"); errorHandler = new MessageEmitterAdapter(sourceCode, showSource, null, lineOffset, false, new TextMessageEmitter(out, asciiQuotes)); } else if (outputFormat == OutputFormat.GNU) { response.setContentType("text/plain; charset=utf-8"); errorHandler = new MessageEmitterAdapter(sourceCode, showSource, null, lineOffset, false, new GnuMessageEmitter(out, asciiQuotes)); } else if (outputFormat == OutputFormat.XML) { response.setContentType("application/xml"); errorHandler = new MessageEmitterAdapter(sourceCode, showSource, null, lineOffset, false, new XmlMessageEmitter(new XmlSerializer(out))); } else if (outputFormat == OutputFormat.JSON) { if (callback == null) { response.setContentType("application/json; charset=utf-8"); } else { response.setContentType("application/javascript; charset=utf-8"); } errorHandler = new MessageEmitterAdapter(sourceCode, showSource, null, lineOffset, false, new JsonMessageEmitter( new nu.validator.json.Serializer(out), callback)); } else { throw new RuntimeException("Unreachable."); } errorHandler.setErrorsOnly(errorsOnly); validate(); } } catch (SAXException e) { throw new ServletException(e); } } /** * @throws ServletException */ protected void setup() throws ServletException { String preset = request.getParameter("preset"); if (preset != null && !"".equals(preset)) { schemaUrls = preset; } else { schemaUrls = request.getParameter("schema"); } if (schemaUrls == null) { schemaUrls = ""; } String parserStr = request.getParameter("parser"); if ("html".equals(parserStr)) { parser = ParserMode.HTML_AUTO; } else if ("xmldtd".equals(parserStr)) { parser = ParserMode.XML_EXTERNAL_ENTITIES_NO_VALIDATION; } else if ("xml".equals(parserStr)) { parser = ParserMode.XML_NO_EXTERNAL_ENTITIES; } else if ("html5".equals(parserStr)) { parser = ParserMode.HTML; } else if ("html4".equals(parserStr)) { parser = ParserMode.HTML401_STRICT; } else if ("html4tr".equals(parserStr)) { parser = ParserMode.HTML401_TRANSITIONAL; } // else auto laxType = (request.getParameter("laxtype") != null); } private boolean useHtml5Schema() { if ("".equals(schemaUrls)) { return false; } return (schemaUrls.contains("http://s.validator.nu/html5.rnc") || schemaUrls.contains("http://s.validator.nu/html5-all.rnc") || schemaUrls.contains("http://s.validator.nu/html5-its.rnc") || schemaUrls.contains("http://s.validator.nu/html5-rdfalite.rnc")); } private boolean isHtmlUnsafePreset() { if ("".equals(schemaUrls)) { return false; } boolean preset = false; for (String presetUrl : presetUrls) { if (presetUrl.equals(schemaUrls)) { preset = true; break; } } if (!preset) { return false; } return !(schemaUrls.startsWith("http://s.validator.nu/xhtml10/xhtml-basic.rnc") || schemaUrls.startsWith("http://s.validator.nu/xhtml10/xhtml-strict.rnc") || schemaUrls.startsWith("http://s.validator.nu/xhtml10/xhtml-transitional.rnc") || schemaUrls.startsWith("http://s.validator.nu/xhtml10/xhtml-frameset.rnc") || schemaUrls.startsWith("http://s.validator.nu/html5.rnc") || schemaUrls.startsWith("http://s.validator.nu/html5-all.rnc") || schemaUrls.startsWith("http://s.validator.nu/html5-its.rnc") || schemaUrls.startsWith("http://s.validator.nu/html5-rdfalite.rnc")); } /** * @throws SAXException */ @SuppressWarnings({ "deprecation", "unchecked" }) void validate() throws SAXException { if (!willValidate()) { return; } boolean isHtmlOrXhtml = (outputFormat == OutputFormat.HTML || outputFormat == OutputFormat.XHTML); if (isHtmlOrXhtml) { try { out.flush(); } catch (IOException e1) { throw new SAXException(e1); } } httpRes = new PrudentHttpEntityResolver(SIZE_LIMIT, laxType, errorHandler); httpRes.setUserAgent(userAgent); dataRes = new DataUriEntityResolver(httpRes, laxType, errorHandler); contentTypeParser = new ContentTypeParser(errorHandler, laxType); entityResolver = new LocalCacheEntityResolver(dataRes); setAllowRnc(true); try { this.errorHandler.start(document); PropertyMapBuilder pmb = new PropertyMapBuilder(); pmb.put(ValidateProperty.ERROR_HANDLER, errorHandler); pmb.put(ValidateProperty.ENTITY_RESOLVER, entityResolver); pmb.put(ValidateProperty.XML_READER_CREATOR, new VerifierServletXMLReaderCreator(errorHandler, entityResolver)); pmb.put(ValidateProperty.SCHEMA_RESOLVER, this); RngProperty.CHECK_ID_IDREF.add(pmb); jingPropertyMap = pmb.toPropertyMap(); tryToSetupValidator(); setAllowRnc(false); loadDocAndSetupParser(); setErrorProfile(); reader.setErrorHandler(errorHandler); contentType = documentInput.getType(); sourceCode.initialize(documentInput); if (validator == null) { checkNormalization = true; } if (checkNormalization) { reader.setFeature( "http://xml.org/sax/features/unicode-normalization-checking", true); } WiretapXMLReaderWrapper wiretap = new WiretapXMLReaderWrapper( reader); ContentHandler recorder = sourceCode.getLocationRecorder(); if (baseUriTracker == null) { wiretap.setWiretapContentHander(recorder); } else { wiretap.setWiretapContentHander(new CombineContentHandler( recorder, baseUriTracker)); } wiretap.setWiretapLexicalHandler((LexicalHandler) recorder); reader = wiretap; if (htmlParser != null) { htmlParser.addCharacterHandler(sourceCode); htmlParser.setMappingLangToXmlLang(true); htmlParser.setErrorHandler(errorHandler.getExactErrorHandler()); htmlParser.setTreeBuilderErrorHandlerOverride(errorHandler); errorHandler.setHtml(true); } else if (xmlParser != null) { // this must be after wiretap! if (!filteredNamespaces.isEmpty()) { reader = new NamespaceDroppingXMLReaderWrapper(reader, filteredNamespaces); } xmlParser.setErrorHandler(errorHandler.getExactErrorHandler()); xmlParser.lockErrorHandler(); } else { throw new RuntimeException("Bug. Unreachable."); } reader = new AttributesPermutingXMLReaderWrapper(reader); // make // RNG // validation // better if (charsetOverride != null) { String charset = documentInput.getEncoding(); if (charset == null) { errorHandler.warning(new SAXParseException( "Overriding document character encoding from none to \u201C" + charsetOverride + "\u201D.", null)); } else { errorHandler.warning(new SAXParseException( "Overriding document character encoding from \u201C" + charset + "\u201D to \u201C" + charsetOverride + "\u201D.", null)); } documentInput.setEncoding(charsetOverride); } if (showOutline) { reader = new OutlineBuildingXMLReaderWrapper(reader, request); } reader = new LanguageDetectingXMLReaderWrapper(reader, languageIdentifier); reader.setContentHandler(reader.getContentHandler()); reader.parse(documentInput); if (showOutline) { outline = (Deque<Section>) request.getAttribute( "http://validator.nu/properties/document-outline"); } } catch (CannotRecoverException e) { } catch (ChangingEncodingException e) { } catch (SocketTimeoutException e) { errorHandler.ioError(new IOException(e.getMessage(), null)); } catch (ConnectTimeoutException e) { errorHandler.ioError(new IOException(e.getMessage(), null)); } catch (TooManyErrorsException e) { errorHandler.fatalError(e); } catch (SAXException e) { log4j.debug("SAXException", e); } catch (IOException e) { isHtmlOrXhtml = false; errorHandler.ioError(e); } catch (IncorrectSchemaException e) { log4j.debug("IncorrectSchemaException", e); errorHandler.schemaError(e); } catch (RuntimeException e) { isHtmlOrXhtml = false; log4j.error("RuntimeException, doc: " + document + " schema: " + schemaUrls + " lax: " + laxType, e); errorHandler.internalError( e, "Oops. That was not supposed to happen. A bug manifested itself in the application internals. Unable to continue. Sorry. The admin was notified."); } catch (Error e) { isHtmlOrXhtml = false; log4j.error("Error, doc: " + document + " schema: " + schemaUrls + " lax: " + laxType, e); errorHandler.internalError( e, "Oops. That was not supposed to happen. A bug manifested itself in the application internals. Unable to continue. Sorry. The admin was notified."); } finally { errorHandler.end(successMessage(), failureMessage()); gatherStatistics(); } if (isHtmlOrXhtml) { XhtmlOutlineEmitter outlineEmitter = new XhtmlOutlineEmitter( contentHandler, outline); outlineEmitter.emitHeadings(); outlineEmitter.emit(); emitDetails(); StatsEmitter.emit(contentHandler, this); } } private void gatherStatistics() { Statistics stats = Statistics.STATISTICS; if (stats == null) { return; } synchronized (stats) { stats.incrementTotal(); if (charsetOverride != null) { stats.incrementField(Statistics.Field.CUSTOM_ENC); } switch (parser) { case HTML401_STRICT: case HTML401_TRANSITIONAL: stats.incrementField(Statistics.Field.PARSER_HTML4); break; case XML_EXTERNAL_ENTITIES_NO_VALIDATION: stats.incrementField(Statistics.Field.PARSER_XML_EXTERNAL); break; case AUTO: case HTML: case HTML_AUTO: case XML_NO_EXTERNAL_ENTITIES: default: break; } if (!filteredNamespaces.isEmpty()) { stats.incrementField(Statistics.Field.XMLNS_FILTER); } if (laxType) { stats.incrementField(Statistics.Field.LAX_TYPE); } if (imageCollector != null) { stats.incrementField(Statistics.Field.IMAGE_REPORT); } if (showSource) { stats.incrementField(Statistics.Field.SHOW_SOURCE); } if (showOutline) { stats.incrementField(Statistics.Field.SHOW_OUTLINE); } if (methodIsGet) { stats.incrementField(Statistics.Field.INPUT_GET); } else { // POST stats.incrementField(Statistics.Field.INPUT_POST); Object inputType = request.getAttribute("nu.validator.servlet.MultipartFormDataFilter.type"); if ("textarea".equals(inputType)) { stats.incrementField(Statistics.Field.INPUT_TEXT_FIELD); } else if ("file".equals(inputType)) { stats.incrementField(Statistics.Field.INPUT_FILE_UPLOAD); } else { stats.incrementField(Statistics.Field.INPUT_ENTITY_BODY); } } if (htmlParser != null) { stats.incrementField(Statistics.Field.INPUT_HTML); } if (xmlParser != null) { stats.incrementField(Statistics.Field.INPUT_XML); } switch (outputFormat) { case GNU: stats.incrementField(Statistics.Field.OUTPUT_GNU); break; case HTML: stats.incrementField(Statistics.Field.OUTPUT_HTML); break; case JSON: stats.incrementField(Statistics.Field.OUTPUT_JSON); break; case TEXT: stats.incrementField(Statistics.Field.OUTPUT_TEXT); break; case XHTML: stats.incrementField(Statistics.Field.OUTPUT_XHTML); break; case XML: stats.incrementField(Statistics.Field.OUTPUT_XML); break; case RELAXED: case SOAP: case UNICORN: default: break; } if (schemaListForStats == null) { stats.incrementField(Statistics.Field.LOGIC_ERROR); } else { boolean preset = false; for (int i = 0; i < presetUrls.length; i++) { if (presetUrls[i].equals(schemaListForStats)) { preset = true; if (externalSchema || externalSchematron) { stats.incrementField(Statistics.Field.LOGIC_ERROR); } else { stats.incrementField(Statistics.Field.PRESET_SCHEMA); /* * XXX WARNING WARNING: These mappings correspond to * values in the presets.txt file in the validator * source repo. They might be bogus if a custom * presets file is used instead. */ switch (i) { case 0: case 5: stats.incrementField(Statistics.Field.HTML5_SCHEMA); break; case 1: case 6: stats.incrementField(Statistics.Field.HTML5_RDFA_LITE_SCHEMA); break; case 2: stats.incrementField(Statistics.Field.HTML4_STRICT_SCHEMA); break; case 3: stats.incrementField(Statistics.Field.HTML4_TRANSITIONAL_SCHEMA); break; case 4: stats.incrementField(Statistics.Field.HTML4_FRAMESET_SCHEMA); break; case 7: stats.incrementField(Statistics.Field.XHTML1_COMPOUND_SCHEMA); break; case 8: stats.incrementField(Statistics.Field.SVG_SCHEMA); break; default: stats.incrementField(Statistics.Field.LOGIC_ERROR); break; } } break; } } if (!preset && !externalSchema) { stats.incrementField(Statistics.Field.BUILT_IN_NON_PRESET); } } if ("".equals(schemaUrls)) { stats.incrementField(Statistics.Field.AUTO_SCHEMA); if (externalSchema) { stats.incrementField(Statistics.Field.LOGIC_ERROR); } } else if (externalSchema) { if (externalSchematron) { stats.incrementField(Statistics.Field.EXTERNAL_SCHEMA_SCHEMATRON); } else { stats.incrementField(Statistics.Field.EXTERNAL_SCHEMA_NON_SCHEMATRON); } } else if (externalSchematron) { stats.incrementField(Statistics.Field.LOGIC_ERROR); } } } /** * @return * @throws SAXException */ protected String successMessage() throws SAXException { return "The document validates according to the specified schema(s)."; } protected String failureMessage() throws SAXException { return "There were errors."; } void emitDetails() throws SAXException { Object inputType = request.getAttribute("nu.validator.servlet.MultipartFormDataFilter.type"); String type = documentInput != null ? documentInput.getType() : ""; if ("text/html".equals(type) || "text/html-sandboxed".equals(type)) { attrs.clear(); emitter.startElementWithClass("div", "details"); if (schemaIsDefault) { emitter.startElementWithClass("p", "msgschema"); emitter.characters(String.format("Used the schema for %s.", getPresetLabel(HTML5_SCHEMA))); emitter.endElement("p"); } emitter.startElementWithClass("p", "msgmediatype"); if (!isHtmlUnsafePreset()) { emitter.characters("Used the HTML parser."); } if (methodIsGet && !"textarea".equals(inputType) && !"file".equals(inputType)) { String charset = documentInput.getEncoding(); if (charset != null) { emitter.characters(String.format( " Externally specified character encoding was %s.", charset)); } } emitter.endElement("div"); } } /** * @throws SAXException * @throws IOException * @throws IncorrectSchemaException */ protected void tryToSetupValidator() throws SAXException, IOException, IncorrectSchemaException { validator = validatorByUrls(schemaUrls); } protected void setErrorProfile() { profile = request.getParameter("profile"); HashMap<String, String> profileMap = new HashMap<>(); if ("pedagogical".equals(profile)) { profileMap.put("xhtml1", "warn"); } else if ("polyglot".equals(profile)) { profileMap.put("xhtml1", "warn"); profileMap.put("xhtml2", "warn"); } else { return; // presumed to be permissive } htmlParser.setErrorProfile(profileMap); } /** * @throws SAXException * @throws IOException * @throws IncorrectSchemaException * @throws SAXNotRecognizedException * @throws SAXNotSupportedException */ protected void loadDocAndSetupParser() throws SAXException, IOException, IncorrectSchemaException, SAXNotRecognizedException, SAXNotSupportedException { switch (parser) { case HTML_AUTO: case HTML: case HTML401_STRICT: case HTML401_TRANSITIONAL: if (isHtmlUnsafePreset()) { String message = "The chosen preset schema is not appropriate for HTML."; SAXException se = new SAXException(message); errorHandler.schemaError(se); throw se; } setAllowGenericXml(false); setAllowHtml(true); setAcceptAllKnownXmlTypes(false); setAllowXhtml(false); loadDocumentInput(); newHtmlParser(); DoctypeExpectation doctypeExpectation; int schemaId; switch (parser) { case HTML: doctypeExpectation = DoctypeExpectation.HTML; schemaId = HTML5_SCHEMA; break; case HTML401_STRICT: doctypeExpectation = DoctypeExpectation.HTML401_STRICT; schemaId = XHTML1STRICT_SCHEMA; break; case HTML401_TRANSITIONAL: doctypeExpectation = DoctypeExpectation.HTML401_TRANSITIONAL; schemaId = XHTML1TRANSITIONAL_SCHEMA; break; default: doctypeExpectation = DoctypeExpectation.AUTO; schemaId = 0; break; } htmlParser.setDoctypeExpectation(doctypeExpectation); htmlParser.setDocumentModeHandler(this); reader = htmlParser; if (validator == null) { validator = validatorByDoctype(schemaId); } if (validator != null) { reader.setContentHandler(validator.getContentHandler()); } break; case XML_NO_EXTERNAL_ENTITIES: case XML_EXTERNAL_ENTITIES_NO_VALIDATION: setAllowGenericXml(true); setAllowHtml(false); setAcceptAllKnownXmlTypes(true); setAllowXhtml(true); loadDocumentInput(); setupXmlParser(); break; default: setAllowGenericXml(true); setAllowHtml(true); setAcceptAllKnownXmlTypes(true); setAllowXhtml(true); loadDocumentInput(); String type = documentInput.getType(); if ("text/html".equals(type) || "text/html-sandboxed".equals(type)) { if (isHtmlUnsafePreset()) { String message = "The Content-Type was \u201C" + type + "\u201D, but the chosen preset schema is not appropriate for HTML."; SAXException se = new SAXException(message); errorHandler.schemaError(se); throw se; } newHtmlParser(); if (useHtml5Schema()) { htmlParser.setDoctypeExpectation(DoctypeExpectation.HTML); } else { htmlParser.setDoctypeExpectation(DoctypeExpectation.AUTO); } htmlParser.setDocumentModeHandler(this); reader = htmlParser; if (validator != null) { reader.setContentHandler(validator.getContentHandler()); } } else { errorHandler.info("The Content-Type was \u201C" + type + "\u201D. Using the XML parser (not resolving external entities)."); setupXmlParser(); } break; } } protected void newHtmlParser() { htmlParser = new HtmlParser(); htmlParser.setCommentPolicy(XmlViolationPolicy.ALLOW); htmlParser.setContentNonXmlCharPolicy(XmlViolationPolicy.ALLOW); htmlParser.setContentSpacePolicy(XmlViolationPolicy.ALTER_INFOSET); htmlParser.setNamePolicy(XmlViolationPolicy.ALLOW); htmlParser.setStreamabilityViolationPolicy(XmlViolationPolicy.FATAL); htmlParser.setXmlnsPolicy(XmlViolationPolicy.ALTER_INFOSET); htmlParser.setMappingLangToXmlLang(true); htmlParser.setHtml4ModeCompatibleWithXhtml1Schemata(true); htmlParser.setHeuristics(Heuristics.ALL); } protected Validator validatorByDoctype(int schemaId) throws SAXException, IOException, IncorrectSchemaException { if (schemaId == 0) { return null; } for (int i = 0; i < presetDoctypes.length; i++) { if (presetDoctypes[i] == schemaId) { return validatorByUrls(presetUrls[i]); } } throw new RuntimeException("Doctype mappings not initialized properly."); } /** * @throws SAXNotRecognizedException * @throws SAXNotSupportedException */ protected void setupXmlParser() throws SAXNotRecognizedException, SAXNotSupportedException { xmlParser = new SAXDriver(); xmlParser.setCharacterHandler(sourceCode); if (lexicalHandler != null) { xmlParser.setProperty("http://xml.org/sax/properties/lexical-handler", lexicalHandler); } reader = new IdFilter(xmlParser); reader.setFeature("http://xml.org/sax/features/string-interning", true); reader.setFeature( "http://xml.org/sax/features/external-general-entities", parser == ParserMode.XML_EXTERNAL_ENTITIES_NO_VALIDATION); reader.setFeature( "http://xml.org/sax/features/external-parameter-entities", parser == ParserMode.XML_EXTERNAL_ENTITIES_NO_VALIDATION); if (parser == ParserMode.XML_EXTERNAL_ENTITIES_NO_VALIDATION) { reader.setEntityResolver(entityResolver); } else { reader.setEntityResolver(new NullEntityResolver()); } if (validator == null) { bufferingRootNamespaceSniffer = new BufferingRootNamespaceSniffer( this); reader.setContentHandler(bufferingRootNamespaceSniffer); } else { reader.setContentHandler(new RootNamespaceSniffer(this, validator.getContentHandler())); reader.setDTDHandler(validator.getDTDHandler()); } } /** * @param validator * @return * @throws SAXException * @throws IOException * @throws IncorrectSchemaException */ private Validator validatorByUrls(String schemaList) throws SAXException, IOException, IncorrectSchemaException { System.setProperty("nu.validator.schema.rdfa-full", "0"); schemaListForStats = schemaList; Validator v = null; String[] schemas = SPACE.split(schemaList); for (int i = schemas.length - 1; i > -1; i String url = schemas[i]; if ("http://s.validator.nu/html5-all.rnc".equals(url)) { System.setProperty("nu.validator.schema.rdfa-full", "1"); } if ("http://c.validator.nu/all/".equals(url) || "http://hsivonen.iki.fi/checkers/all/".equals(url)) { for (String checker : ALL_CHECKERS) { v = combineValidatorByUrl(v, checker); } } else if ("http://c.validator.nu/all-html4/".equals(url) || "http://hsivonen.iki.fi/checkers/all-html4/".equals(url)) { for (String checker : ALL_CHECKERS_HTML4) { v = combineValidatorByUrl(v, checker); } } else { v = combineValidatorByUrl(v, url); } } if (imageCollector != null && v != null) { v = new CombineValidator(imageCollector, v); } return v; } /** * @param val * @param url * @return * @throws SAXException * @throws IOException * @throws IncorrectSchemaException */ private Validator combineValidatorByUrl(Validator val, String url) throws SAXException, IOException, IncorrectSchemaException { if (!"".equals(url)) { Validator v = validatorByUrl(url); if (val == null) { val = v; } else { val = new CombineValidator(v, val); } } return val; } /** * @param url * @return * @throws SAXException * @throws IOException * @throws IncorrectSchemaException */ private Validator validatorByUrl(String url) throws SAXException, IOException, IncorrectSchemaException { if (loadedValidatorUrls.contains(url)) { return null; } loadedValidatorUrls.add(url); if ("http://s.validator.nu/xhtml5.rnc".equals(url) || "http://s.validator.nu/html5.rnc".equals(url) || "http://s.validator.nu/html5-all.rnc".equals(url) || "http://s.validator.nu/xhtml5-all.rnc".equals(url) || "http://s.validator.nu/html5-its.rnc".equals(url) || "http://s.validator.nu/xhtml5-rdfalite.rnc".equals(url) || "http://s.validator.nu/html5-rdfalite.rnc".equals(url)) { errorHandler.setSpec(html5spec); } Schema sch = resolveSchema(url, jingPropertyMap); Validator validator = sch.createValidator(jingPropertyMap); if (validator.getContentHandler() instanceof XmlPiChecker) { lexicalHandler = (LexicalHandler) validator.getContentHandler(); } return validator; } @Override public Schema resolveSchema(String url, PropertyMap options) throws SAXException, IOException, IncorrectSchemaException { int i = Arrays.binarySearch(preloadedSchemaUrls, url); if (i > -1) { Schema rv = preloadedSchemas[i]; if (options.contains(WrapProperty.ATTRIBUTE_OWNER)) { if (rv instanceof CheckerSchema) { errorHandler.error(new SAXParseException( "A non-schema checker cannot be used as an attribute schema.", null, url, -1, -1)); throw new IncorrectSchemaException(); } else { // ugly fall through } } else { return rv; } } externalSchema = true; TypedInputSource schemaInput = (TypedInputSource) entityResolver.resolveEntity( null, url); SchemaReader sr = null; if ("application/relax-ng-compact-syntax".equals(schemaInput.getType())) { sr = CompactSchemaReader.getInstance(); } else { sr = new AutoSchemaReader(); } Schema sch = sr.createSchema(schemaInput, options); if (Statistics.STATISTICS != null && "com.thaiopensource.validate.schematron.SchemaImpl".equals(sch.getClass().getName())) { externalSchematron = true; } return sch; } /** * @param url * @return * @throws SAXException * @throws IOException * @throws IncorrectSchemaException */ private static Schema schemaByUrl(String url, EntityResolver resolver, PropertyMap pMap) throws SAXException, IOException, IncorrectSchemaException { log4j.debug("Will load schema: " + url); TypedInputSource schemaInput; try { schemaInput = (TypedInputSource) resolver.resolveEntity( null, url); } catch (ClassCastException e) { log4j.fatal(url, e); throw e; } SchemaReader sr = null; if ("application/relax-ng-compact-syntax".equals(schemaInput.getType())) { sr = CompactSchemaReader.getInstance(); } else { sr = new AutoSchemaReader(); } Schema sch = sr.createSchema(schemaInput, pMap); return sch; } /** * @throws SAXException */ void emitTitle(boolean markupAllowed) throws SAXException { if (willValidate()) { emitter.characters(RESULTS_TITLE); emitter.characters(FOR); if (document != null && document.length() > 0) { emitter.characters(scrub(shortenDataUri(document))); } else if (request.getAttribute("nu.validator.servlet.MultipartFormDataFilter.filename") != null) { emitter.characters("uploaded file " + scrub(request.getAttribute( "nu.validator.servlet.MultipartFormDataFilter.filename").toString())); } else { emitter.characters("contents of text-input area"); } } else { emitter.characters(SERVICE_TITLE); if (markupAllowed && System.getProperty("nu.validator.servlet.service-name", "").equals("Validator.nu")) { emitter.startElement("span"); emitter.characters(LIVING_VERSION); emitter.endElement("span"); } } } protected String shortenDataUri(String uri) { if (DataUri.startsWithData(uri)) { return "data:\u2026"; } else { return uri; } } void emitForm() throws SAXException { attrs.clear(); attrs.addAttribute("method", "get"); // attrs.addAttribute("action", request.getRequestURL().toString()); if (isSimple()) { attrs.addAttribute("class", "simple"); } // attrs.addAttribute("onsubmit", "formSubmission()"); emitter.startElement("form", attrs); emitFormContent(); emitter.endElement("form"); } protected boolean isSimple() { return false; } /** * @throws SAXException */ protected void emitFormContent() throws SAXException { FormEmitter.emit(contentHandler, this); } void emitSchemaField() throws SAXException { attrs.clear(); attrs.addAttribute("name", "schema"); attrs.addAttribute("id", "schema"); // attrs.addAttribute("onchange", "schemaChanged();"); attrs.addAttribute( "pattern", "(?:(?:(?:https?://\\S+)|(?:data:\\S+))(?:\\s+(?:(?:https?://\\S+)|(?:data:\\S+)))*)?"); attrs.addAttribute("title", "Space-separated list of schema IRIs. (Leave blank to let the service guess.)"); if (schemaUrls != null) { attrs.addAttribute("value", scrub(schemaUrls)); } emitter.startElement("input", attrs); emitter.endElement("input"); } void emitDocField() throws SAXException { attrs.clear(); attrs.addAttribute("type", "url"); attrs.addAttribute("name", "doc"); attrs.addAttribute("id", "doc"); attrs.addAttribute("pattern", "(?:(?:https?://.+)|(?:data:.+))?"); attrs.addAttribute("title", "Absolute IRI (http, https or data only) of the document to be checked."); attrs.addAttribute("tabindex", "0"); attrs.addAttribute("autofocus", "autofocus"); if (document != null) { attrs.addAttribute("value", scrub(document)); } Object att = request.getAttribute("nu.validator.servlet.MultipartFormDataFilter.type"); if (att != null) { attrs.addAttribute("class", att.toString()); } emitter.startElement("input", attrs); emitter.endElement("input"); } /** * @throws SAXException * */ void emitSchemaDuration() throws SAXException { } /** * @throws SAXException * */ void emitDocDuration() throws SAXException { } /** * @throws SAXException * */ void emitTotalDuration() throws SAXException { emitter.characters("" + (System.currentTimeMillis() - start)); } /** * @throws SAXException * */ void emitPresetOptions() throws SAXException { for (int i = 0; i < presetUrls.length; i++) { emitter.option(presetLabels[i], presetUrls[i], false); } } /** * @throws SAXException * */ void emitParserOptions() throws SAXException { emitter.option("Automatically from Content-Type", "", (parser == ParserMode.AUTO)); emitter.option("XML; don\u2019t load external entities", "xml", (parser == ParserMode.XML_NO_EXTERNAL_ENTITIES)); emitter.option("XML; load external entities", "xmldtd", (parser == ParserMode.XML_EXTERNAL_ENTITIES_NO_VALIDATION)); emitter.option("HTML; flavor from doctype", "html", (parser == ParserMode.HTML_AUTO)); emitter.option("HTML5", "html5", (parser == ParserMode.HTML)); emitter.option("HTML 4.01 Strict", "html4", (parser == ParserMode.HTML401_STRICT)); emitter.option("HTML 4.01 Transitional", "html4tr", (parser == ParserMode.HTML401_TRANSITIONAL)); } /** * @throws SAXException * */ void emitProfileOptions() throws SAXException { profile = request.getParameter("profile"); emitter.option("Permissive: only what the spec requires", "", ("".equals(profile))); emitter.option("Pedagogical: suitable for teaching purposes", "pedagogical", ("pedagogical".equals(profile))); emitter.option("Polyglot: works both as HTML and as XML", "polyglot", ("polyglot".equals(profile))); } /** * @throws SAXException * */ void emitLaxTypeField() throws SAXException { emitter.checkbox("laxtype", "yes", laxType); } /** * @throws SAXException * */ void emitShowSourceField() throws SAXException { emitter.checkbox("showsource", "yes", showSource); } /** * @throws SAXException * */ void emitShowOutlineField() throws SAXException { emitter.checkbox("showoutline", "yes", showOutline); } /** * @throws SAXException * */ void emitShowImageReportField() throws SAXException { emitter.checkbox("showimagereport", "yes", imageCollector != null); } void rootNamespace(String namespace, Locator locator) throws SAXException { if (validator == null) { int index = -1; for (int i = 0; i < presetNamespaces.length; i++) { if (namespace.equals(presetNamespaces[i])) { index = i; break; } } if (index == -1) { String message = "Cannot find preset schema for namespace: \u201C" + namespace + "\u201D."; SAXException se = new SAXException(message); errorHandler.schemaError(se); return; } String label = presetLabels[index]; String urls = presetUrls[index]; errorHandler.info("Using the preset for " + label + " based on the root namespace."); try { validator = validatorByUrls(urls); } catch (IncorrectSchemaException | IOException e) { // At this point the schema comes from memory. throw new RuntimeException(e); } if (bufferingRootNamespaceSniffer == null) { throw new RuntimeException( "Bug! bufferingRootNamespaceSniffer was null."); } bufferingRootNamespaceSniffer.setContentHandler(validator.getContentHandler()); } if (!rootNamespaceSeen) { rootNamespaceSeen = true; if (contentType != null) { int i; if ((i = Arrays.binarySearch(KNOWN_CONTENT_TYPES, contentType)) > -1) { if (!NAMESPACES_FOR_KNOWN_CONTENT_TYPES[i].equals(namespace)) { String message = "".equals(namespace) ? "\u201C" + contentType + "\u201D is not an appropriate Content-Type for a document whose root element is not in a namespace." : "\u201C" + contentType + "\u201D is not an appropriate Content-Type for a document whose root namespace is \u201C" + namespace + "\u201D."; SAXParseException spe = new SAXParseException(message, locator); errorHandler.warning(spe); } } } } } @Override public void documentMode(DocumentMode mode, String publicIdentifier, String systemIdentifier, boolean html4SpecificAdditionalErrorChecks) throws SAXException { if (validator == null) { try { if ("yes".equals(request.getParameter("sniffdoctype"))) { if ("-//W3C//DTD XHTML 1.0 Transitional//EN".equals(publicIdentifier)) { errorHandler.info("XHTML 1.0 Transitional doctype seen. Appendix C is not supported. Proceeding anyway for your convenience. The parser is still an HTML parser, so namespace processing is not performed and \u201Cxml:*\u201D attributes are not supported. Using the schema for " + getPresetLabel(XHTML1TRANSITIONAL_SCHEMA) + "." + (html4SpecificAdditionalErrorChecks ? " HTML4-specific tokenization errors are enabled." : "")); validator = validatorByDoctype(XHTML1TRANSITIONAL_SCHEMA); } else if ("-//W3C//DTD XHTML 1.0 Strict//EN".equals(publicIdentifier)) { errorHandler.info("XHTML 1.0 Strict doctype seen. Appendix C is not supported. Proceeding anyway for your convenience. The parser is still an HTML parser, so namespace processing is not performed and \u201Cxml:*\u201D attributes are not supported. Using the schema for " + getPresetLabel(XHTML1STRICT_SCHEMA) + "." + (html4SpecificAdditionalErrorChecks ? " HTML4-specific tokenization errors are enabled." : "")); validator = validatorByDoctype(XHTML1STRICT_SCHEMA); } else if ("-//W3C//DTD HTML 4.01 Transitional//EN".equals(publicIdentifier)) { errorHandler.info("HTML 4.01 Transitional doctype seen. Using the schema for " + getPresetLabel(XHTML1TRANSITIONAL_SCHEMA) + "." + (html4SpecificAdditionalErrorChecks ? "" : " HTML4-specific tokenization errors are not enabled.")); validator = validatorByDoctype(XHTML1TRANSITIONAL_SCHEMA); } else if ("-//W3C//DTD HTML 4.01//EN".equals(publicIdentifier)) { errorHandler.info("HTML 4.01 Strict doctype seen. Using the schema for " + getPresetLabel(XHTML1STRICT_SCHEMA) + "." + (html4SpecificAdditionalErrorChecks ? "" : " HTML4-specific tokenization errors are not enabled.")); validator = validatorByDoctype(XHTML1STRICT_SCHEMA); } else if ("-//W3C//DTD HTML 4.0 Transitional//EN".equals(publicIdentifier)) { errorHandler.info("Legacy HTML 4.0 Transitional doctype seen. Please consider using HTML 4.01 Transitional instead. Proceeding anyway for your convenience with the schema for " + getPresetLabel(XHTML1TRANSITIONAL_SCHEMA) + "." + (html4SpecificAdditionalErrorChecks ? "" : " HTML4-specific tokenization errors are not enabled.")); validator = validatorByDoctype(XHTML1TRANSITIONAL_SCHEMA); } else if ("-//W3C//DTD HTML 4.0//EN".equals(publicIdentifier)) { errorHandler.info("Legacy HTML 4.0 Strict doctype seen. Please consider using HTML 4.01 instead. Proceeding anyway for your convenience with the schema for " + getPresetLabel(XHTML1STRICT_SCHEMA) + "." + (html4SpecificAdditionalErrorChecks ? "" : " HTML4-specific tokenization errors are not enabled.")); validator = validatorByDoctype(XHTML1STRICT_SCHEMA); } } else { schemaIsDefault = true; if (html4SpecificAdditionalErrorChecks) { errorHandler.info("HTML4-specific tokenization errors are enabled."); } validator = validatorByDoctype(HTML5_SCHEMA); } } catch (IncorrectSchemaException | IOException e) { // At this point the schema comes from memory. throw new RuntimeException(e); } ContentHandler ch = validator.getContentHandler(); ch.setDocumentLocator(htmlParser.getDocumentLocator()); ch.startDocument(); reader.setContentHandler(ch); } else { if (html4SpecificAdditionalErrorChecks) { errorHandler.info("HTML4-specific tokenization errors are enabled."); } } } private String getPresetLabel(int schemaId) { for (int i = 0; i < presetDoctypes.length; i++) { if (presetDoctypes[i] == schemaId) { return presetLabels[i]; } } return "unknown"; } /** * @param acceptAllKnownXmlTypes * @see nu.validator.xml.ContentTypeParser#setAcceptAllKnownXmlTypes(boolean) */ protected void setAcceptAllKnownXmlTypes(boolean acceptAllKnownXmlTypes) { contentTypeParser.setAcceptAllKnownXmlTypes(acceptAllKnownXmlTypes); dataRes.setAcceptAllKnownXmlTypes(acceptAllKnownXmlTypes); httpRes.setAcceptAllKnownXmlTypes(acceptAllKnownXmlTypes); } /** * @param allowGenericXml * @see nu.validator.xml.ContentTypeParser#setAllowGenericXml(boolean) */ protected void setAllowGenericXml(boolean allowGenericXml) { contentTypeParser.setAllowGenericXml(allowGenericXml); httpRes.setAllowGenericXml(allowGenericXml); dataRes.setAllowGenericXml(allowGenericXml); } /** * @param allowHtml * @see nu.validator.xml.ContentTypeParser#setAllowHtml(boolean) */ protected void setAllowHtml(boolean allowHtml) { contentTypeParser.setAllowHtml(allowHtml); httpRes.setAllowHtml(allowHtml); dataRes.setAllowHtml(allowHtml); } /** * @param allowRnc * @see nu.validator.xml.ContentTypeParser#setAllowRnc(boolean) */ protected void setAllowRnc(boolean allowRnc) { contentTypeParser.setAllowRnc(allowRnc); httpRes.setAllowRnc(allowRnc); dataRes.setAllowRnc(allowRnc); entityResolver.setAllowRnc(allowRnc); } /** * @param allowXhtml * @see nu.validator.xml.ContentTypeParser#setAllowXhtml(boolean) */ protected void setAllowXhtml(boolean allowXhtml) { contentTypeParser.setAllowXhtml(allowXhtml); httpRes.setAllowXhtml(allowXhtml); dataRes.setAllowXhtml(allowXhtml); } /** * @throws SAXException * @throws IOException */ protected void loadDocumentInput() throws SAXException, IOException { if (methodIsGet) { documentInput = (TypedInputSource) entityResolver.resolveEntity( null, document); errorHandler.setLoggingOk(true); } else { // POST long len = request.getContentLength(); if (len > SIZE_LIMIT) { throw new StreamBoundException("Resource size exceeds limit."); } documentInput = contentTypeParser.buildTypedInputSource(document, null, postContentType); documentInput.setByteStream(len < 0 ? new BoundedInputStream( request.getInputStream(), SIZE_LIMIT, document) : request.getInputStream()); documentInput.setSystemId(request.getHeader("Content-Location")); } if (imageCollector != null) { baseUriTracker = new BaseUriTracker(documentInput.getSystemId(), documentInput.getLanguage()); imageCollector.initializeContext(baseUriTracker); } } void emitStyle() throws SAXException { attrs.clear(); attrs.addAttribute("href", STYLE_SHEET); attrs.addAttribute("rel", "stylesheet"); emitter.startElement("link", attrs); emitter.endElement("link"); } void emitIcon() throws SAXException { attrs.clear(); attrs.addAttribute("href", ICON); attrs.addAttribute("rel", "icon"); emitter.startElement("link", attrs); emitter.endElement("link"); } void emitScript() throws SAXException { attrs.clear(); attrs.addAttribute("src", SCRIPT); emitter.startElement("script", attrs); emitter.endElement("script"); } void emitAbout() throws SAXException { attrs.clear(); attrs.addAttribute("href", ABOUT_PAGE); emitter.startElement("a", attrs); emitter.characters(ABOUT_THIS_SERVICE); emitter.endElement("a"); } void emitVersion() throws SAXException { emitter.characters(VERSION); } void emitUserAgentInput() throws SAXException { attrs.clear(); attrs.addAttribute("name", "useragent"); attrs.addAttribute("list", "useragents"); attrs.addAttribute("value", userAgent); emitter.startElement("input", attrs); emitter.endElement("input"); } void emitOtherFacetLink() throws SAXException { attrs.clear(); attrs.addAttribute("href", HTML5_FACET); emitter.startElement("a", attrs); emitter.characters(SIMPLE_UI); emitter.endElement("a"); } void emitNsfilterField() throws SAXException { attrs.clear(); attrs.addAttribute("name", "nsfilter"); attrs.addAttribute("id", "nsfilter"); attrs.addAttribute("pattern", "(?:.+:.+(?:\\s+.+:.+)*)?"); attrs.addAttribute("title", "Space-separated namespace URIs for vocabularies to be filtered out."); if (!filteredNamespaces.isEmpty()) { StringBuilder sb = new StringBuilder(); boolean first = true; for (String ns : filteredNamespaces) { if (!first) { sb.append(' '); } sb.append(ns); first = false; } attrs.addAttribute("value", scrub(sb)); } emitter.startElement("input", attrs); emitter.endElement("input"); } void maybeEmitNsfilterField() throws SAXException { NsFilterEmitter.emit(contentHandler, this); } void emitCharsetOptions() throws SAXException { boolean found = false; for (int i = 0; i < CHARSETS.length; i++) { String charset = CHARSETS[i]; boolean selected = charset.equalsIgnoreCase(charsetOverride); // XXX // use // ASCII-caseinsensitivity emitter.option(CHARSET_DESCRIPTIONS[i], charset, selected); if (selected) { found = true; } } if (!found && charsetOverride != null) { emitter.option(charsetOverride, charsetOverride, true); } } void maybeEmitCharsetField() throws SAXException { CharsetEmitter.emit(contentHandler, this); } }
package org.aikodi.jlo.translate; import java.util.function.Predicate; import org.aikodi.chameleon.core.declaration.Declaration; import org.aikodi.chameleon.core.document.Document; import org.aikodi.chameleon.core.element.Element; import org.aikodi.chameleon.core.lookup.LookupException; import org.aikodi.chameleon.core.modifier.ElementWithModifiers; import org.aikodi.chameleon.core.modifier.Modifier; import org.aikodi.chameleon.core.property.ChameleonProperty; import org.aikodi.chameleon.core.reference.CrossReferenceWithName; import org.aikodi.chameleon.exception.ChameleonProgrammerException; import org.aikodi.chameleon.oo.expression.ExpressionFactory; import org.aikodi.chameleon.oo.language.ObjectOrientedLanguage; import org.aikodi.chameleon.oo.method.Method; import org.aikodi.chameleon.oo.plugin.ObjectOrientedFactory; import org.aikodi.chameleon.oo.type.Type; import org.aikodi.chameleon.oo.type.TypeReference; import org.aikodi.chameleon.oo.type.generics.FormalTypeParameter; import org.aikodi.chameleon.oo.type.generics.TypeParameter; import org.aikodi.chameleon.oo.variable.FormalParameter; import org.aikodi.chameleon.oo.variable.VariableDeclaration; import org.aikodi.chameleon.support.member.simplename.variable.MemberVariableDeclarator; import org.aikodi.jlo.model.component.Subobject; import org.aikodi.jlo.model.language.JLo; import org.aikodi.jlo.model.type.TypeMemberDeclarator; import be.kuleuven.cs.distrinet.jnome.core.language.Java7; public abstract class AbstractJava8Generator { protected final String IMPLEMENTATION_SUFFIX = "Impl"; protected String subobjectGetterName(Subobject subobject) { return subobject.name(); } protected String subobjectFieldName(Subobject subobject) { return "subobject$"+subobject.origin().nearestAncestor(Type.class).name()+"$"+subobject.name(); } protected String fieldName(VariableDeclaration variableDeclaration) { return "field$"+variableDeclaration.origin().nearestAncestor(Type.class).name()+"$"+variableDeclaration.name(); } protected String getterName(VariableDeclaration variableDeclaration) { return "get$"+variableDeclaration.origin().nearestAncestor(Type.class).name()+"$"+variableDeclaration.name(); } protected String setterName(VariableDeclaration variableDeclaration) { return "set$"+variableDeclaration.origin().nearestAncestor(Type.class).name()+"$"+variableDeclaration.name(); } protected String implementationName(Type t) { return t.name() + IMPLEMENTATION_SUFFIX; } protected JLo jlo(Element element) { return element.language(JLo.class); } protected Java7 java(Element element) { return element.language(Java7.class); } protected void renameConstructorCalls(Document target) { target.apply(CrossReferenceWithName.class, c -> { CrossReferenceWithName origin = (CrossReferenceWithName) c.origin(); if (origin != c.origin()) { Declaration element; try { element = origin.getElement(); if (element.isTrue(java(element).CONSTRUCTOR)) { c.setName(implementationName((Type) element)); } } catch (Exception e) { throw new IllegalStateException(e); } } }); } protected ModifierAdder add(Modifier modifier) { return new ModifierAdder(modifier); } public static class ModifierAdder { private Modifier modifier; public ModifierAdder(Modifier modifier) { this.modifier = modifier; } public <T extends ElementWithModifiers> ModifierConfiguration<T> to(Class<T> type) { return new ModifierConfiguration<>(modifier, type); } } public static class ModifierConfiguration<T extends ElementWithModifiers> { private Predicate<T> predicate; private Element element; private Class<T> type; private Modifier modifier; public ModifierConfiguration(Modifier modifier, Class<T> type) { this.type = type; this.modifier = modifier; } public ModifierConfiguration<T> in(Element element) { this.element = element; return this; } public void whenOrigin(Predicate<T> predicate) { this.predicate = predicate; element.apply(type, t -> { if (predicate.test((T) t.origin())) { t.addModifier(modifier.clone(modifier)); t.flushCache(); } ; }); } public void whenTranslated(Predicate<T> predicate) { this.predicate = predicate; element.apply(type, t -> { if (predicate.test((T) t)) { t.addModifier(modifier.clone(modifier)); t.flushCache(); } ; }); } } public ModifierStripper strip(ChameleonProperty property) { return new ModifierStripper(m -> m.impliesTrue(property)); } public ModifierStripper strip(Class<? extends Modifier> type) { return new ModifierStripper(m -> type.isInstance(m)); } public static class ModifierStripper { private Predicate<Modifier> predicate; public ModifierStripper(Predicate<Modifier> predicate) { this.predicate = predicate; } public <T extends ElementWithModifiers> ModifierStripperConfiguration<T> from(Class<T> elementType) { return new ModifierStripperConfiguration<T>(predicate, elementType); } } public static class ModifierStripperConfiguration<T extends ElementWithModifiers> { private Predicate<Modifier> modifierPredicate; private Class<T> elementType; private Predicate<T> predicate; public ModifierStripperConfiguration(Predicate<Modifier> modifierPredicate, Class<T> elementType) { super(); this.modifierPredicate = modifierPredicate; this.elementType = elementType; } public ModifierStripperConfiguration<T> when(Predicate<T> predicate) { this.predicate = predicate; return this; } public void in(Element element) { element.apply(elementType, e -> { if (predicate == null || predicate.test(e)) { try { e.modifiers().stream().filter(modifierPredicate).forEach(x -> x.disconnect()); } catch (Exception e1) { // TODO Auto-generated catch block e1.printStackTrace(); } } } ); } } protected Method createGetterTemplate(MemberVariableDeclarator d) { VariableDeclaration variableDeclaration = d.variableDeclarations().get(0); return ooFactory(d).createNormalMethod(getterName(variableDeclaration), d.clone(d.typeReference())); } /** * @param subobject The subobject for which the getter template must be created. * @return a template for the getter of the given subobject. The resulting method * has a header, but not a body. It must be finished by the caller depending on whether * an interface or class is being created. * @throws LookupException */ protected Method createSubobjectGetterTemplate(Subobject subobject, ObjectOrientedLanguage targetLanguage) throws LookupException { //TypeReference subobjectTypeReference = subobject.clone(subobject.superClassReference()); TypeReference subobjectTypeReference = expandedTypeReference(subobject.superClassReference(),targetLanguage); return ooFactory(subobject).createNormalMethod(subobjectGetterName(subobject), subobjectTypeReference); } protected ObjectOrientedFactory ooFactory(Element element) { return java(element).plugin(ObjectOrientedFactory.class); } protected ExpressionFactory expressionFactory(Element element) { return java(element).plugin(ExpressionFactory.class); } protected Method createSetterTemplate(MemberVariableDeclarator d) { VariableDeclaration variableDeclaration = d.variableDeclarations().get(0); ObjectOrientedFactory factory = java(d).plugin(ObjectOrientedFactory.class); TypeReference fieldType = d.clone(d.typeReference()); Method result = factory.createNormalMethod(setterName(variableDeclaration), java(d).createTypeReference("void")); result.header().addFormalParameter(new FormalParameter("value", fieldType)); return result; } protected boolean isGenerated(Element element) { return element.origin() == element; } protected TypeReference expandedTypeReference(TypeReference element, ObjectOrientedLanguage targetLanguage) throws LookupException { TypeReference result = targetLanguage.reference(element.getElement()); //disconnect the type reference. Note that this might give problems inside anonymous inner classes/ result.setUniParent(null); return result; } protected <E extends Element> E clone(E element) { return element.clone(element); } protected void convertTypeMembers(Document javaDocument) { javaDocument.apply(TypeMemberDeclarator.class, d -> { // Type type = d.nearestAncestor(Type.class); // type.addParameter(TypeParameter.class, new FormalTypeParameter(d.name())); d.disconnect(); }); javaDocument.apply(Type.class, t-> { if(! isGenerated(t)) { Type jloType = (Type) t.origin(); try { jloType.members(TypeMemberDeclarator.class).forEach(m -> { t.addParameter(TypeParameter.class, new FormalTypeParameter(m.name())); }); } catch (LookupException e) { throw new ChameleonProgrammerException(e); } } }); } }
package org.bouncycastle.crypto.signers; import org.bouncycastle.asn1.DERNull; import org.bouncycastle.asn1.DERObjectIdentifier; import org.bouncycastle.asn1.nist.NISTObjectIdentifiers; import org.bouncycastle.asn1.pkcs.PKCSObjectIdentifiers; import org.bouncycastle.asn1.teletrust.TeleTrusTObjectIdentifiers; import org.bouncycastle.asn1.x509.AlgorithmIdentifier; import org.bouncycastle.asn1.x509.DigestInfo; import org.bouncycastle.asn1.x509.X509ObjectIdentifiers; import org.bouncycastle.crypto.AsymmetricBlockCipher; import org.bouncycastle.crypto.CipherParameters; import org.bouncycastle.crypto.CryptoException; import org.bouncycastle.crypto.DataLengthException; import org.bouncycastle.crypto.Digest; import org.bouncycastle.crypto.Signer; import org.bouncycastle.crypto.encodings.PKCS1Encoding; import org.bouncycastle.crypto.engines.RSABlindedEngine; import org.bouncycastle.crypto.params.AsymmetricKeyParameter; import org.bouncycastle.crypto.params.ParametersWithRandom; import java.util.Hashtable; public class RSADigestSigner implements Signer { private final AsymmetricBlockCipher rsaEngine = new PKCS1Encoding(new RSABlindedEngine()); private final AlgorithmIdentifier algId; private final Digest digest; private boolean forSigning; private static final Hashtable oidMap = new Hashtable(); /* * Load OID table. */ static { oidMap.put("RIPEMD128", TeleTrusTObjectIdentifiers.ripemd128); oidMap.put("RIPEMD160", TeleTrusTObjectIdentifiers.ripemd160); oidMap.put("RIPEMD256", TeleTrusTObjectIdentifiers.ripemd256); oidMap.put("SHA-1", X509ObjectIdentifiers.id_SHA1); oidMap.put("SHA-224", NISTObjectIdentifiers.id_sha224); oidMap.put("SHA-256", NISTObjectIdentifiers.id_sha256); oidMap.put("SHA-384", NISTObjectIdentifiers.id_sha384); oidMap.put("SHA-512", NISTObjectIdentifiers.id_sha512); oidMap.put("MD2", PKCSObjectIdentifiers.md2); oidMap.put("MD4", PKCSObjectIdentifiers.md4); oidMap.put("MD5", PKCSObjectIdentifiers.md5); } public RSADigestSigner( Digest digest) { this.digest = digest; algId = new AlgorithmIdentifier((DERObjectIdentifier)oidMap.get(digest.getAlgorithmName()), DERNull.INSTANCE); } /** * @deprecated */ public String getAlgorithmName() { return digest.getAlgorithmName() + "withRSA"; } /** * initialise the signer for signing or verification. * * @param forSigning * true if for signing, false otherwise * @param parameters * necessary parameters. */ public void init( boolean forSigning, CipherParameters parameters) { this.forSigning = forSigning; AsymmetricKeyParameter k; if (parameters instanceof ParametersWithRandom) { k = (AsymmetricKeyParameter)((ParametersWithRandom)parameters).getParameters(); } else { k = (AsymmetricKeyParameter)parameters; } if (forSigning && !k.isPrivate()) { throw new IllegalArgumentException("signing requires private key"); } if (!forSigning && k.isPrivate()) { throw new IllegalArgumentException("verification requires public key"); } reset(); rsaEngine.init(forSigning, parameters); } /** * update the internal digest with the byte b */ public void update( byte input) { digest.update(input); } /** * update the internal digest with the byte array in */ public void update( byte[] input, int inOff, int length) { digest.update(input, inOff, length); } /** * Generate a signature for the message we've been loaded with using the key * we were initialised with. */ public byte[] generateSignature() throws CryptoException, DataLengthException { if (!forSigning) { throw new IllegalStateException("RSADigestSigner not initialised for signature generation."); } byte[] hash = new byte[digest.getDigestSize()]; digest.doFinal(hash, 0); byte[] data = derEncode(hash); return rsaEngine.processBlock(data, 0, data.length); } /** * return true if the internal state represents the signature described in * the passed in array. */ public boolean verifySignature( byte[] signature) { if (forSigning) { throw new IllegalStateException("RSADigestSigner not initialised for verification"); } byte[] hash = new byte[digest.getDigestSize()]; digest.doFinal(hash, 0); byte[] sig; byte[] expected; try { sig = rsaEngine.processBlock(signature, 0, signature.length); expected = derEncode(hash); } catch (Exception e) { return false; } if (sig.length == expected.length) { for (int i = 0; i < sig.length; i++) { if (sig[i] != expected[i]) { return false; } } } else if (sig.length == expected.length - 2) // NULL left out { int sigOffset = sig.length - hash.length - 2; int expectedOffset = expected.length - hash.length - 2; expected[1] -= 2; // adjust lengths expected[3] -= 2; for (int i = 0; i < hash.length; i++) { if (sig[sigOffset + i] != expected[expectedOffset + i]) // check hash { return false; } } for (int i = 0; i < sigOffset; i++) { if (sig[i] != expected[i]) // check header less NULL { return false; } } } else { return false; } return true; } public void reset() { digest.reset(); } private byte[] derEncode( byte[] hash) { DigestInfo dInfo = new DigestInfo(algId, hash); return dInfo.getDEREncoded(); } }
package org.cocolab.inpro.features; import java.util.Iterator; import java.util.LinkedList; import edu.cmu.sphinx.util.props.Resetable; /** * class to perform linear regression and mean calculation; * ideal for incrementally added data points. old data points * can be discarded automatically using setMaxLag() * @author timo * */ public class TimeShiftingAnalysis implements Resetable { // linear regression of y(t) = a*t + b // using data points (t1, x1), (t2, x2), ... // container for the data points, newest data is always stored in front protected LinkedList<DataPoint> dataPoints; // maximum lag of the oldest datapoint compared to the current time protected int maxLag; protected int currentTime = 0; protected boolean dirty = false; protected double slope; protected double intercept; protected double mean; public TimeShiftingAnalysis(int maxLag) { dataPoints = new LinkedList<DataPoint>(); this.maxLag = maxLag; } public TimeShiftingAnalysis() { this(0); } public void add(int t, double x) { // add data point and remove data points, that are more than maxLag ago dataPoints.addFirst(new DataPoint(t, x)); shiftTime(t); dirty = true; } public void shiftTime(int t) { currentTime = t; removeOldPoints(); } private void removeOldPoints() { if (maxLag > 0) { while (!dataPoints.isEmpty() && (dataPoints.getLast().t < currentTime - maxLag)) { dataPoints.removeLast(); dirty = true; } } } /** * set the maximum lag of the time shifting analysis * @param maxLag maximum lag of the oldest considered data point * 0: consider all data points */ public void setMaxLag(int maxLag) { this.maxLag = maxLag; removeOldPoints(); } protected void doRegression() { if (dirty) { // sums of t and x double st = 0.0; double sx = 0.0; double stt = 0.0; double stx = 0.0; int n = 0; Iterator<DataPoint> dataIt = dataPoints.listIterator(); while (dataIt.hasNext()) { DataPoint dp = dataIt.next(); st += dp.t; sx += dp.x; stt += dp.t * dp.t; stx += dp.t * dp.x; n++; } assert n > 0; if (n > 1) { slope = (n * stx - st * sx) / (n * stt - st * st); intercept = (sx - slope * st) / n; // while we're there, also compute the mean, // even though it doesn't have anything to do with regression } mean = sx / n; dirty = false; } } public double getSlope() { doRegression(); return slope; } public double getIntercept() { doRegression(); return intercept; } public double getMean() { doRegression(); return mean; } public double predictValueAt(int t) { doRegression(); return intercept + t * slope; } public void reset() { dataPoints.clear(); currentTime = 0; } public String toString() { StringBuffer sb = new StringBuffer(); sb.append("y(t) = "); sb.append(getSlope()); sb.append(" * t + "); sb.append(getIntercept()); sb.append("\nmean: "); sb.append(getMean()); sb.append("; value at 0: "); sb.append(predictValueAt(0)); return sb.toString(); } public static void main(String[] args) { TimeShiftingAnalysis tsa = new TimeShiftingAnalysis(); System.out.println("\tadding point (1, 1.0)"); tsa.add(1, 1.0); System.out.println("\tadding point (2, 2.0)"); tsa.add(2, 2.0); System.out.println("\tadding point (3, 3.0)"); tsa.add(3, 3.0); System.out.println(tsa); System.out.println("\tadding point (4, 2.0)"); tsa.add(4, 2.0); System.out.println(tsa); System.out.println("\tsetting lag to 2"); tsa.setMaxLag(2); System.out.println(tsa); System.out.println("\tadding point (5, 1.0)"); tsa.add(5, 1.0); System.out.println(tsa); } /* utility class for data points */ private class DataPoint { int t; double x; DataPoint(int t, double x) { this.t = t; this.x = x; } } }
/* * @author <a href="mailto:novotny@aei.mpg.de">Jason Novotny</a> * @author <a href="mailto:wehrens@aei.mpg.de">Oliver Wehrens</a> * @version $Id$ */ package org.gridlab.gridsphere.layout; import org.gridlab.gridsphere.core.persistence.PersistenceManagerException; import org.gridlab.gridsphere.layout.event.PortletComponentEvent; import org.gridlab.gridsphere.layout.event.PortletTabEvent; import org.gridlab.gridsphere.layout.event.PortletTabListener; import org.gridlab.gridsphere.layout.view.TabbedPaneView; import org.gridlab.gridsphere.portlet.PortletRequest; import org.gridlab.gridsphere.portlet.impl.SportletProperties; import org.gridlab.gridsphere.portletcontainer.GridSphereConfig; import org.gridlab.gridsphere.portletcontainer.GridSphereEvent; import java.io.IOException; import java.io.Serializable; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.List; /** * The <code>PortletTabbedPane</code> represents the visual portlet tabbed pane interface * and is a container for a {@link PortletTab}. */ public class PortletTabbedPane extends BasePortletComponent implements Serializable, PortletTabListener, Cloneable { private List tabs = new ArrayList(); private int startIndex = 0; private String style = "menu"; private String layoutDescriptor = null; private transient TabbedPaneView tabbedPaneView = null; /** * Constructs an instance of PortletTabbedPane */ public PortletTabbedPane() { } public void setLayoutDescriptor(String layoutDescriptor) { this.layoutDescriptor = layoutDescriptor; } public String getLayoutDescriptor() { return layoutDescriptor; } /** * Sets the tabbed pane style. Currently supported styles are "menu" * and "sub-menu" * * @param style the tabbed pane style */ public void setStyle(String style) { this.style = style; } /** * Returns the tabbed pane style. Currently supported styles are "menu" * and "sub-menu" * * @return the tabbed pane style */ public String getStyle() { return style; } /** * Returns the selected tab if none exists, return null * * @return the selected portlet tab */ public PortletTab getSelectedTab() { for (int i = 0; i < tabs.size(); i++) { PortletTab tab = (PortletTab) tabs.get(i); if (tab.isSelected()) { return tab; } } return null; } /** * Sets the selected portlet tab in this tabbed pane * * @param tab the selected portlet tab */ public void setSelectedPortletTab(PortletTab tab) { PortletTab portletTab; List stabs = Collections.synchronizedList(tabs); synchronized (stabs) { for (int i = 0; i < stabs.size(); i++) { portletTab = (PortletTab) stabs.get(i); if (portletTab.getComponentID() == tab.getComponentID()) { portletTab.setSelected(true); } else { portletTab.setSelected(false); } } } } /** * Returns the tab with the supplied title * * @param label the tab label * @return the tab associated with this title */ public PortletTab getPortletTab(String label) { Iterator it = tabs.iterator(); while (it.hasNext()) { PortletTab tab = (PortletTab) it.next(); if (tab.getLabel().equals(label)) return tab; } return null; } /** * Return the tab contained by this tabbed pane by index * * @param index the tab index * @return the portlet tab */ public PortletTab getPortletTabAt(int index) { if (index >= tabs.size()) return null; return (PortletTab) tabs.get(index); } public int getTabCount() { return tabs.size(); } public void insertTab(PortletTab tab, int index) { tabs.add(index, tab); } /** * Adds a new portlet tab to the tabbed pane * * @param tab a portlet tab to add */ public void addTab(PortletTab tab) { tabs.add(tab); } /** * Removes a portlet tab from the tabbed pane * * @param tab the portlet tab to remove */ public void removeTab(PortletTab tab) { Iterator it = tabs.iterator(); while (it.hasNext()) { PortletTab atab = (PortletTab) it.next(); if (tab.getLabel().equals(atab.getLabel())) it.remove(); } } /** * Removes a portlet tab from the tabbed pane at the specified index * * @param index the index of the tab to remove */ public synchronized void removeTabAt(int index) { tabs.remove(index); } /** * Removes all portlet tabs from the tabbed pane */ public synchronized void removeAll() { for (int i = 0; i < tabs.size(); i++) { tabs.remove(i); } } /** * Sets the portlet tabs in the tabbed pane * * @param tabs an ArrayList containing the portlet tabs to add */ public void setPortletTabs(List tabs) { this.tabs = tabs; } /** * Returns a list containing the portlet tabs * * @return a list containing the portlet tabs */ public List getPortletTabs() { return tabs; } public PortletTab getLastPortletTab() { return (PortletTab) tabs.get(tabs.size() - 1); } /** * Initializes the portlet tabbed pane component. Since the components are isolated * after Castor unmarshalls from XML, the ordering is determined by a * passed in List containing the previous portlet components in the tree. * * @param list a list of component identifiers * @return a list of updated component identifiers * @see ComponentIdentifier */ public List init(PortletRequest req, List list) { ComponentIdentifier compId = new ComponentIdentifier(); compId.setPortletComponent(this); compId.setComponentID(list.size()); compId.setComponentLabel(label); compId.setClassName(this.getClass().getName()); list.add(compId); list = super.init(req, list); PortletTab tab; tabbedPaneView = (TabbedPaneView)getRenderClass("TabbedPane"); Iterator it = tabs.iterator(); while (it.hasNext()) { tab = (PortletTab) it.next(); tab.setTheme(theme); tab.setRenderKit(renderKit); list = tab.init(req, list); tab.addComponentListener(this); tab.setParentComponent(this); } tab = this.getSelectedTab(); if (tab == null) { tab = this.getPortletTabAt(0); if (tab != null) this.setSelectedPortletTab(tab); } return list; } /** * Gives notification that a portlet tab event has occured * * @param event the portlet tab event */ public void handlePortletTabEvent(PortletTabEvent event) { if (event.getAction() == PortletTabEvent.TabAction.TAB_SELECTED) { PortletTab selectedTab = (PortletTab) event.getPortletComponent(); this.setSelectedPortletTab(selectedTab); } } /** * Gives notification that a portlet tab event has occured * * @param event the portlet tab event */ public void actionPerformed(GridSphereEvent event) { super.actionPerformed(event); PortletComponentEvent compEvt = event.getLastRenderEvent(); if ((compEvt != null) && (compEvt instanceof PortletTabEvent)) { PortletTabEvent tabEvent = (PortletTabEvent) compEvt; handlePortletTabEvent(tabEvent); } List l = Collections.synchronizedList(listeners); synchronized (l) { Iterator it = l.iterator(); PortletComponent comp; while (it.hasNext()) { comp = (PortletComponent) it.next(); event.addNewRenderEvent(compEvt); comp.actionPerformed(event); } } } public void setTheme(String theme) { this.theme = theme; } /** * Renders the portlet frame component * * @param event a gridsphere event */ public void doRender(GridSphereEvent event) { super.doRender(event); StringBuffer pane = new StringBuffer(); PortletRequest req = event.getPortletRequest(); List userRoles = req.getRoles(); pane.append(tabbedPaneView.doStart(event, this)); PortletTab tab; List tabs = getPortletTabs(); for (int i = 0; i < tabs.size(); i++) { tab = (PortletTab) tabs.get(i); String tabRole = tab.getRequiredRole(); if (tabRole.equals("") || (userRoles.contains(tabRole))) { pane.append(tabbedPaneView.doRenderTab(event, this, tab)); } else { // if role is < required role we try selecting the next possible tab //System.err.println("in PortletTabbedPane menu: role is < required role we try selecting the next possible tab"); if (tab.isSelected()) { int index = (i + 1); PortletTab newtab = (PortletTab) tabs.get(index); if (index < tabs.size()) { setSelectedPortletTab(newtab); } } } } pane.append(tabbedPaneView.doEndBorder(event, this)); if (!tabs.isEmpty()) { PortletTab selectedTab = getSelectedTab(); if (selectedTab != null) { selectedTab.doRender(event); pane.append(selectedTab.getBufferedOutput(req)); } } req.setAttribute(SportletProperties.RENDER_OUTPUT + componentIDStr, pane); } public void remove(PortletComponent pc, PortletRequest req) { tabs.remove(pc); if (tabs.isEmpty()) parent.remove(this, req); } public void save() throws IOException { try { String layoutMappingFile = GridSphereConfig.getServletContext().getRealPath("/WEB-INF/mapping/layout-mapping.xml"); PortletLayoutDescriptor.savePortletTabbedPane(this, layoutDescriptor, layoutMappingFile); } catch (PersistenceManagerException e) { throw new IOException("Unable to save user's tabbed pane: " + e.getMessage()); } } public Object clone() throws CloneNotSupportedException { PortletTabbedPane t = (PortletTabbedPane) super.clone(); t.style = this.style; t.startIndex = this.startIndex; List stabs = Collections.synchronizedList(tabs); synchronized (stabs) { t.tabs = new ArrayList(stabs.size()); for (int i = 0; i < stabs.size(); i++) { PortletTab tab = (PortletTab) stabs.get(i); t.tabs.add(tab.clone()); } } return t; } }
/* * @author <a href="mailto:novotny@aei.mpg.de">Jason Novotny</a> * @version $Id$ */ package org.gridlab.gridsphere.portlet; import javax.servlet.ServletException; /** * The PortletException class defines a general exception that a portlet can throw when it encounters difficulty. */ public class PortletException extends ServletException { /** * Constructs a new portlet exception. */ public PortletException() { super(); } /** * Constructs a new portlet exception with the given text. * The portlet container may use the text write it to a log. * * @param text the exception text */ public PortletException(String text) { super(text); } public PortletException(String text, Throwable t) { super(text, t); } }
package org.ice4j.attribute; import org.ice4j.*; /** * The REQUESTED-TRANSPORT attribute is used to allocate a * TURN address of certain transport protocol. * * In the original TURN specification, only UDP is supported. * Support of TCP is detailed in draft-ietf-behave-turn-tcp-07. * * @author Sebastien Vincent * @author Aakash Garg */ public class RequestedTransportAttribute extends Attribute { /** * Attribute name. */ public static final String NAME = "REQUESTED-TRANSPORT"; /** * The length of the data contained by this attribute. */ public static final char DATA_LENGTH = 4; public static final byte UDP = 17; public static final byte TCP = 6; /** * Transport protocol. * * 17 = UDP; * 6 = TCP. */ byte transportProtocol = UDP; /** * Constructor. */ RequestedTransportAttribute() { super(REQUESTED_TRANSPORT); } /** * Compares two STUN Attributes. Attributes are considered equal when their * type, length, and all data are the same. * @param obj the object to compare this attribute with. * @return true if the attributes are equal and false otherwise. */ @Override public boolean equals(Object obj) { if (! (obj instanceof RequestedTransportAttribute) || obj == null) return false; if (obj == this) return true; RequestedTransportAttribute att = (RequestedTransportAttribute) obj; if (att.getAttributeType() != getAttributeType() || att.getDataLength() != getDataLength() /* compare data */ || att.transportProtocol != transportProtocol ) return false; return true; } /** * Returns the human readable name of this attribute. Attribute names do * not really matter from the protocol point of view. They are only used * for debugging and readability. * @return this attribute's name. */ @Override public String getName() { return NAME; } /** * Returns the length of this attribute's body. * @return the length of this attribute's value (8 bytes). */ @Override public char getDataLength() { return DATA_LENGTH; } /** * Returns a binary representation of this attribute. * @return a binary representation of this attribute. */ @Override public byte[] encode() { byte binValue[] = new byte[HEADER_LENGTH + DATA_LENGTH]; //Type binValue[0] = (byte)(getAttributeType() >> 8); binValue[1] = (byte)(getAttributeType() & 0x00FF); //Length binValue[2] = (byte)(getDataLength() >> 8); binValue[3] = (byte)(getDataLength() & 0x00FF); //Data binValue[4] = transportProtocol; binValue[5] = 0x00; binValue[6] = 0x00; binValue[7] = 0x00; return binValue; } /** * Sets this attribute's fields according to attributeValue array. * @param attributeValue a binary array containing this attribute's field * values and NOT containing the attribute header. * @param offset the position where attribute values begin (most often * offset is equal to the index of the first byte after * length) * @param length the length of the binary array. * @throws StunException if attrubteValue contains invalid data. */ @Override void decodeAttributeBody(byte[] attributeValue, char offset, char length) throws StunException { if(length != 4) { throw new StunException("length invalid"); } transportProtocol = attributeValue[offset]; } /** * Set the transport protocol. * @param transportProtocol transport protocol */ public void setRequestedTransport(byte transportProtocol) { this.transportProtocol = transportProtocol; } /** * Get the transport protocol. * @return transport protocol */ public int getRequestedTransport() { return transportProtocol; } }
package org.jaudiotagger.tag.id3.valuepair; import java.util.HashMap; import java.util.Map; /** * Represents common image formats support by ID3 and provides a mapping between the format field supported in ID3v22 and the * mimetype field supported by ID3v23/ID3v24. coverImage.getImageData() * * * Note only JPG and PNG are mentioned specifically in the ID3 v22 Spec but it only says 'Image Format is preferably * PNG or JPG' , not mandatory. In the jaudiotagger library we also consider GIF as a portable format, and we recognise * BMP,PDF and TIFF but do not consider these formats as portable. * */ //TODO identifying PICT, bit more difficult because in certain formats has an empty 512byte header public class ImageFormats { public static final String V22_JPG_FORMAT = "JPG"; public static final String V22_PNG_FORMAT = "PNG"; public static final String V22_GIF_FORMAT = "GIF"; public static final String V22_BMP_FORMAT = "BMP"; public static final String V22_TIF_FORMAT = "TIF"; public static final String V22_PDF_FORMAT = "PDF"; public static final String V22_PIC_FORMAT = "PIC"; public static final String MIME_TYPE_JPEG = "image/jpeg"; public static final String MIME_TYPE_PNG = "image/png"; public static final String MIME_TYPE_GIF = "image/gif"; public static final String MIME_TYPE_BMP = "image/bmp"; public static final String MIME_TYPE_TIFF = "image/tiff"; public static final String MIME_TYPE_PDF = "image/pdf"; public static final String MIME_TYPE_PICT = "image/x-pict"; /** * Sometimes this is used for jpg instead :or have I made this up */ public static final String MIME_TYPE_JPG = "image/jpg"; private static Map<String, String> imageFormatsToMimeType = new HashMap<String, String>(); private static Map<String, String> imageMimeTypeToFormat = new HashMap <String, String>(); static { imageFormatsToMimeType.put(V22_JPG_FORMAT, MIME_TYPE_JPEG); imageFormatsToMimeType.put(V22_PNG_FORMAT, MIME_TYPE_PNG); imageFormatsToMimeType.put(V22_GIF_FORMAT, MIME_TYPE_GIF); imageFormatsToMimeType.put(V22_BMP_FORMAT, MIME_TYPE_BMP); imageFormatsToMimeType.put(V22_TIF_FORMAT, MIME_TYPE_TIFF); imageFormatsToMimeType.put(V22_PDF_FORMAT, MIME_TYPE_PDF); imageFormatsToMimeType.put(V22_PIC_FORMAT, MIME_TYPE_PICT); String value; for (String key : imageFormatsToMimeType.keySet()) { value = imageFormatsToMimeType.get(key); imageMimeTypeToFormat.put(value, key); } //The mapping isn't one-one lets add other mimetypes imageMimeTypeToFormat.put(MIME_TYPE_JPG, V22_JPG_FORMAT); } /** * Get v2.3 mimetype from v2.2 format * @param format * @return */ public static String getMimeTypeForFormat(String format) { return imageFormatsToMimeType.get(format); } /** * Get v2.2 format from v2.3 mimetype * @param mimeType * @return */ public static String getFormatForMimeType(String mimeType) { return imageMimeTypeToFormat.get(mimeType); } /** * Is this binary data a png image * * @param data * @return true if binary data matches expected header for a png */ public static boolean binaryDataIsPngFormat(byte[] data) { //Read signature if(data.length<4) { return false; } return (0x89 == (data[0] & 0xff)) && (0x50 == (data[1] & 0xff)) && (0x4E == (data[2] & 0xff)) && (0x47 == (data[3] & 0xff)); } public static boolean binaryDataIsJpgFormat(byte[] data) { if(data.length<4) { return false; } //Read signature //Can be Can be FF D8 FF DB (samsung) , FF D8 FF E0 (standard) or FF D8 FF E1 or some other formats //FF D8 is SOI Marker, FFE0 or FFE1 is JFIF Marker return (0xff == (data[0] & 0xff)) && (0xd8 == (data[1] & 0xff)) && (0xff == (data[2] & 0xff)) && (0xdb <= (data[3] & 0xff)); } /** * Is this binary data a gif image * * @param data * @return true if binary data matches expected header for a gif */ public static boolean binaryDataIsGifFormat(byte[] data) { if(data.length<3) { return false; } //Read signature return (0x47 == (data[0] & 0xff)) && (0x49 == (data[1] & 0xff)) && (0x46 == (data[2] & 0xff)); } /** * * Is this binary data a bmp image * * @param data * @return true if binary data matches expected header for a bmp */ public static boolean binaryDataIsBmpFormat(byte[] data) { if(data.length<2) { return false; } //Read signature return (0x42 == (data[0] & 0xff)) && (0x4d == (data[1] & 0xff)); } public static boolean binaryDataIsPdfFormat(byte[] data) { if(data.length<4) { return false; } //Read signature return (0x25 == (data[0] & 0xff)) && (0x50 == (data[1] & 0xff)) && (0x44 == (data[2] & 0xff)) && (0x46 == (data[3] & 0xff)); } public static boolean binaryDataIsTiffFormat(byte[] data) { if(data.length<4) { return false; } //Read signature Intel return ( ((0x49 == (data[0] & 0xff)) && (0x49 == (data[1] & 0xff)) && (0x2a == (data[2] & 0xff)) && (0x00 == (data[3] & 0xff))) || ((0x4d == (data[0] & 0xff)) && (0x4d == (data[1] & 0xff)) && (0x00 == (data[2] & 0xff)) && (0x2a == (data[3] & 0xff))) ); } /** * * @param data * @return true if the image format is a portable format recognised across operating systems */ public static boolean isPortableFormat(byte[] data) { return binaryDataIsPngFormat(data) || binaryDataIsJpgFormat(data) || binaryDataIsGifFormat(data); } /** * * @param data * @return correct mimetype for the image data represented by this byte data */ public static String getMimeTypeForBinarySignature(byte[] data) { if(binaryDataIsPngFormat(data)) { return MIME_TYPE_PNG; } else if(binaryDataIsJpgFormat(data)) { return MIME_TYPE_JPEG; } else if(binaryDataIsGifFormat(data)) { return MIME_TYPE_GIF; } else if(binaryDataIsBmpFormat(data)) { return MIME_TYPE_BMP; } else if(binaryDataIsPdfFormat(data)) { return MIME_TYPE_PDF; } else if(binaryDataIsTiffFormat(data)) { return MIME_TYPE_TIFF; } else { return null; } } }
package org.opencraft.server.game.impl; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.util.logging.Logger; import org.opencraft.server.Configuration; import org.opencraft.server.game.GameModeAdapter; import org.opencraft.server.model.Level; import org.opencraft.server.model.Player; import org.python.core.Py; import org.python.core.PyObject; import org.python.util.PythonInterpreter; /** * A game mode which delegates methods to a script. * @author Graham Edgecombe * */ public class ScriptedGameMode extends GameModeAdapter { /** * Logger instance. */ private static final Logger logger = Logger.getLogger(ScriptedGameMode.class.getName()); /** * The python interpreter. * We were origianlly using javax.script but that had some issues with * commands. */ private PythonInterpreter interpreter = new PythonInterpreter(); /** * Creates the scripted game mode. * @throws IOException if an I/O error occurs. */ public ScriptedGameMode() throws IOException { init(); } /** * Initializes the script engine and evaluates the script. * @throws IOException if an I/O error occurs. */ private void init() throws IOException { String name = Configuration.getConfiguration().getScriptName(); logger.info("Evaluating script..."); InputStream is = new FileInputStream("./data/scripts/" + name); try { interpreter.execfile(is); } finally { is.close(); } logger.info("Initializing script..."); delegate("init", this); } /** * Delegates a call to the engine. * @param method The method name. * @param args The arguments. */ private boolean delegate(String method, Object... args) { PyObject m = interpreter.get(method); if(m != null) { try { m.__call__(Py.javas2pys(args)); } catch (Exception ex) { logger.log(java.util.logging.Level.SEVERE, "Error invoking method.", ex); } } else { return false; } return true; } @Override public void playerConnected(Player player) { if(!delegate("playerConnected", player)) { super.playerConnected(player); } } @Override public void setBlock(Player player, Level level, int x, int y, int z, int mode, int type) { if(!delegate("setBlock", player, level, x, y, z, mode, type)) { super.setBlock(player, level, x, y, z, mode, type); } } @Override public void playerDisconnected(Player player) { if(!delegate("playerDisconnected", player)) { super.playerDisconnected(player); } } @Override public void broadcastChatMessage(Player player, String message) { if(!delegate("broadcastChatMessage", player, message)) { super.broadcastChatMessage(player, message); } } @Override public void tick() { if(!delegate("tick")) { super.tick(); } } }
package dr.inference.model; import dr.app.beagle.evomodel.branchmodel.lineagespecific.BeagleBranchLikelihood; import dr.util.NumberFormatter; import dr.xml.Reportable; import java.util.*; import java.util.concurrent.*; /** * A likelihood function which is simply the product of a set of likelihood functions. * * @author Alexei Drummond * @author Andrew Rambaut * @version $Id: CompoundLikelihood.java,v 1.19 2005/05/25 09:14:36 rambaut Exp $ */ public class CompoundLikelihood implements Likelihood, Reportable { public final static boolean UNROLL_COMPOUND = true; public final static boolean EVALUATION_TIMERS = true; public final long[] evaluationTimes; public final int[] evaluationCounts; public CompoundLikelihood(int threads, Collection<Likelihood> likelihoods) { int i = 0; for (Likelihood l : likelihoods) { addLikelihood(l, i, true); i++; } if (threads < 0 && this.likelihoods.size() > 1) { // asking for an automatic threadpool size and there is more than one likelihood to compute threadCount = this.likelihoods.size(); // create a threadpool the size of the number of likelihoods // threadCount = -1; // use cached thread pool } else if (threads > 0) { threadCount = threads; // use a thread pool of a specified size } else { // no thread pool requested or only one likelihood threadCount = 0; } if (threadCount > 0) { pool = Executors.newFixedThreadPool(threadCount); } else if (threadCount < 0) { // create a cached thread pool which should create one thread per likelihood... pool = Executors.newCachedThreadPool(); } else { // don't use a threadpool (i.e., compute serially) pool = null; } if (EVALUATION_TIMERS) { evaluationTimes = new long[this.likelihoods.size()]; evaluationCounts = new int[this.likelihoods.size()]; } else { evaluationTimes = null; evaluationCounts = null; } } public CompoundLikelihood(Collection<Likelihood> likelihoods) { pool = null; threadCount = 0; int i = 0; for (Likelihood l : likelihoods) { addLikelihood(l, i, false); i++; } if (EVALUATION_TIMERS) { evaluationTimes = new long[this.likelihoods.size()]; evaluationCounts = new int[this.likelihoods.size()]; } else { evaluationTimes = null; evaluationCounts = null; } } // public CompoundLikelihood(BeagleBranchLikelihoods bbl) { // pool = null; // threadCount = 0; // evaluationTimes = null; // evaluationCounts = null; protected void addLikelihood(Likelihood likelihood, int index, boolean addToPool) { // unroll any compound likelihoods if (UNROLL_COMPOUND && addToPool && likelihood instanceof CompoundLikelihood) { for (Likelihood l : ((CompoundLikelihood)likelihood).getLikelihoods()) { addLikelihood(l, index, addToPool); } } else { if (!likelihoods.contains(likelihood)) { likelihoods.add(likelihood); if (likelihood.getModel() != null) { compoundModel.addModel(likelihood.getModel()); } if (likelihood.evaluateEarly()) { earlyLikelihoods.add(likelihood); } else { // late likelihood list is used to evaluate them if the thread pool is not being used... lateLikelihoods.add(likelihood); if (addToPool) { likelihoodCallers.add(new LikelihoodCaller(likelihood, index)); } } } else { throw new IllegalArgumentException("Attempted to add the same likelihood multiple times to CompoundLikelihood."); } // END: contains check }//END: if unroll check }//END: addLikelihood public Set<Likelihood> getLikelihoodSet() { Set<Likelihood> set = new HashSet<Likelihood>(); for (Likelihood l : likelihoods) { set.add(l); set.addAll(l.getLikelihoodSet()); } return set; } public int getLikelihoodCount() { return likelihoods.size(); } public final Likelihood getLikelihood(int i) { return likelihoods.get(i); } public List<Likelihood> getLikelihoods() { return likelihoods; } public List<Callable<Double>> getLikelihoodCallers() { return likelihoodCallers; } // Likelihood IMPLEMENTATION public Model getModel() { return compoundModel; } // // todo: remove in release // static int DEBUG = 0; public double getLogLikelihood() { double logLikelihood = evaluateLikelihoods(earlyLikelihoods); if( logLikelihood == Double.NEGATIVE_INFINITY ) { return Double.NEGATIVE_INFINITY; } if (pool == null) { // Single threaded logLikelihood += evaluateLikelihoods(lateLikelihoods); } else { try { List<Future<Double>> results = pool.invokeAll(likelihoodCallers); for (Future<Double> result : results) { double logL = result.get(); logLikelihood += logL; } } catch (InterruptedException e) { e.printStackTrace(); } catch (ExecutionException e) { e.printStackTrace(); } } // if( DEBUG > 0 ) { // int t = DEBUG; DEBUG = 0; // System.err.println(getId() + ": " + getDiagnosis(0) + " = " + logLikelihood); // DEBUG = t; if (DEBUG_PARALLEL_EVALUATION) { System.err.println(""); } return logLikelihood; } private double evaluateLikelihoods(ArrayList<Likelihood> likelihoods) { double logLikelihood = 0.0; int i = 0; for (Likelihood likelihood : likelihoods) { if (EVALUATION_TIMERS) { // this code is only compiled if EVALUATION_TIMERS is true long time = System.nanoTime(); double l = likelihood.getLogLikelihood(); evaluationTimes[i] += System.nanoTime() - time; evaluationCounts[i] ++; if( l == Double.NEGATIVE_INFINITY ) return Double.NEGATIVE_INFINITY; logLikelihood += l; i++; } else { final double l = likelihood.getLogLikelihood(); // if the likelihood is zero then short cut the rest of the likelihoods // This means that expensive likelihoods such as TreeLikelihoods should // be put after cheap ones such as BooleanLikelihoods if( l == Double.NEGATIVE_INFINITY ) return Double.NEGATIVE_INFINITY; logLikelihood += l; } } return logLikelihood; } public void makeDirty() { for( Likelihood likelihood : likelihoods ) { likelihood.makeDirty(); } } public boolean evaluateEarly() { return false; } public String getDiagnosis() { return getDiagnosis(0); } public String getDiagnosis(int indent) { String message = ""; boolean first = true; final NumberFormatter nf = new NumberFormatter(6); for( Likelihood lik : likelihoods ) { if( !first ) { message += ", "; } else { first = false; } if (indent >= 0) { message += "\n"; for (int i = 0; i < indent; i++) { message += " "; } } message += lik.prettyName() + "="; if( lik instanceof CompoundLikelihood ) { final String d = ((CompoundLikelihood) lik).getDiagnosis(indent < 0 ? -1 : indent + 2); if( d != null && d.length() > 0 ) { message += "(" + d; if (indent >= 0) { message += "\n"; for (int i = 0; i < indent; i++) { message += " "; } } message += ")"; } } else { final double logLikelihood = lik.getLogLikelihood(); if( logLikelihood == Double.NEGATIVE_INFINITY ) { message += "-Inf"; } else if( Double.isNaN(logLikelihood) ) { message += "NaN"; } else if( logLikelihood == Double.POSITIVE_INFINITY ) { message += "+Inf"; } else { message += nf.formatDecimal(logLikelihood, 4); } } } message += "\n"; for (int i = 0; i < indent; i++) { message += " "; } message += "Total = " + this.getLogLikelihood(); return message; } public String toString() { return getId(); // really bad for debugging //return Double.toString(getLogLikelihood()); } public String prettyName() { return Abstract.getPrettyName(this); } public boolean isUsed() { return used; } public void setUsed() { used = true; for (Likelihood l : likelihoods) { l.setUsed(); } } public int getThreadCount() { return threadCount; } public long[] getEvaluationTimes() { return evaluationTimes; } public int[] getEvaluationCounts() { return evaluationCounts; } public void resetEvaluationTimes() { for (int i = 0; i < evaluationTimes.length; i++) { evaluationTimes[i] = 0; evaluationCounts[i] = 0; } } // Loggable IMPLEMENTATION /** * @return the log columns. */ public dr.inference.loggers.LogColumn[] getColumns() { return new dr.inference.loggers.LogColumn[]{ new LikelihoodColumn(getId() == null ? "likelihood" : getId()) }; } private class LikelihoodColumn extends dr.inference.loggers.NumberColumn { public LikelihoodColumn(String label) { super(label); } public double getDoubleValue() { return getLogLikelihood(); } } // Reportable IMPLEMENTATION public String getReport() { return getReport(0); } public String getReport(int indent) { if (EVALUATION_TIMERS) { String message = "\n"; boolean first = true; final NumberFormatter nf = new NumberFormatter(6); int index = 0; for( Likelihood lik : likelihoods ) { if( !first ) { message += ", "; } else { first = false; } if (indent >= 0) { message += "\n"; for (int i = 0; i < indent; i++) { message += " "; } } message += lik.prettyName() + "="; if( lik instanceof CompoundLikelihood ) { final String d = ((CompoundLikelihood) lik).getReport(indent < 0 ? -1 : indent + 2); if( d != null && d.length() > 0 ) { message += "(" + d; if (indent >= 0) { message += "\n"; for (int i = 0; i < indent; i++) { message += " "; } } message += ")"; } } else { double secs = (double)evaluationTimes[index] / 1.0E9; message += evaluationCounts[index] + " evaluations in " + nf.format(secs) + " secs (" + nf.format(secs / evaluationCounts[index]) + " secs/eval)"; } index++; } return message; } else { return "No evaluation timer report available"; } } // Identifiable IMPLEMENTATION private String id = null; public void setId(String id) { this.id = id; } public String getId() { return id; } private boolean used = false; private final int threadCount; private final ExecutorService pool; private final ArrayList<Likelihood> likelihoods = new ArrayList<Likelihood>(); private final CompoundModel compoundModel = new CompoundModel("compoundModel"); private final ArrayList<Likelihood> earlyLikelihoods = new ArrayList<Likelihood>(); private final ArrayList<Likelihood> lateLikelihoods = new ArrayList<Likelihood>(); private final List<Callable<Double>> likelihoodCallers = new ArrayList<Callable<Double>>(); class LikelihoodCaller implements Callable<Double> { public LikelihoodCaller(Likelihood likelihood, int index) { this.likelihood = likelihood; this.index = index; } public Double call() throws Exception { if (DEBUG_PARALLEL_EVALUATION) { System.err.print("Invoking thread #" + index + " for " + likelihood.getId() + ": "); } if (EVALUATION_TIMERS) { long time = System.nanoTime(); double logL = likelihood.getLogLikelihood(); evaluationTimes[index] += System.nanoTime() - time; evaluationCounts[index] ++; return logL; } return likelihood.getLogLikelihood(); } private final Likelihood likelihood; private final int index; } public static final boolean DEBUG_PARALLEL_EVALUATION = false; }
package edu.jhu.hltcoe.util.tuple; import java.util.ArrayList; import java.util.Collection; import java.util.HashSet; import edu.jhu.hltcoe.util.Prng; public class PairSampler { private PairSampler() { // private constructor. } /** * Sample with replacement ordered pairs of integers. * * @param minI The minimum value for i (inclusive). * @param maxI The maximum value for i (exclusive). * @param minJ The minimum value for j (inclusive). * @param maxJ The maximum value for j (exclusive). * @param prop The proportion of possible pairs to return. * @return A collection of ordered pairs. */ public static Collection<OrderedPair> sampleOrderedPairs(int minI, int maxI, int minJ, int maxJ, double prop) { int numI = maxI - minI; int numJ = maxJ - minJ; // Count the max number possible: long maxPairs = numI * numJ; Collection<OrderedPair> samples; if (maxPairs < 400000000 || prop > 0.1) { samples = new ArrayList<OrderedPair>(); for (int i=minI; i<maxI; i++) { for (int j=minJ; j<maxJ; j++) { if (prop >= 1.0 || Prng.nextDouble() < prop) { samples.add(new OrderedPair(i, j)); } } } } else { samples = new HashSet<OrderedPair>(); double numSamples = maxPairs * prop; while (samples.size() < numSamples) { int i = Prng.nextInt(numI) + minI; int j = Prng.nextInt(numJ) + minJ; samples.add(new OrderedPair(i, j)); } } return samples; } /** * Sample with replacement unordered pairs of integers. * * @param minI The minimum value for i (inclusive). * @param maxI The maximum value for i (exclusive). * @param minJ The minimum value for j (inclusive). * @param maxJ The maximum value for j (exclusive). * @param prop The proportion of possible pairs to return. * @return A collection of unordered pairs represented as ordered pairs s.t. i <= j. */ public static Collection<UnorderedPair> sampleUnorderedPairs(int minI, int maxI, int minJ, int maxJ, double prop) { int numI = maxI - minI; int numJ = maxJ - minJ; // Count the max number possible: long maxPairs = PairSampler.countUnorderedPairs(minI, maxI, minJ, maxJ); Collection<UnorderedPair> samples; if (maxPairs < 400000000 || prop > 0.1) { samples = new ArrayList<UnorderedPair>(); int min = Math.min(minI, minJ); int max = Math.max(maxI, maxJ); for (int i=min; i<max; i++) { for (int j=i; j<max; j++) { if ((minI <= i && i < maxI && minJ <= j && j < maxJ) || (minJ <= i && i < maxJ && minI <= j && j < maxI)) { if (prop >= 1.0 || Prng.nextDouble() < prop) { samples.add(new UnorderedPair(i, j)); } } } } } else { samples = new HashSet<UnorderedPair>(); double numSamples = maxPairs * prop; while (samples.size() < numSamples) { int i = Prng.nextInt(numI) + minI; int j = Prng.nextInt(numJ) + minJ; if (i <= j) { // We must reject samples for which j < i, or else we would // be making pairs with i==j half as likely. samples.add(new UnorderedPair(i, j)); } } } return samples; } /** * Count the number of unordered pairs that satisfy the constraint the * constraints: minI <= i < maxI and minJ <= j < maxJ. * * Note that since these are unordered pairs. We can think of this as being * the count of ordered pairs (i,j) s.t. i<=j and either of the following * two conditions holds: * * minI <= i < maxI and minJ <= j < maxJ. * * minI <= j < maxI and minJ <= i < maxJ. * * @param minI The minimum value for i (inclusive). * @param maxI The maximum value for i (exclusive). * @param minJ The minimum value for j (inclusive). * @param maxJ The maximum value for j (exclusive). * @return The number of unordered pairs. */ public static long countUnorderedPairs(int minI, int maxI, int minJ, int maxJ) { long maxPairs = 0; int min = Math.min(minI, minJ); int max = Math.max(maxI, maxJ); for (int i=min; i<max; i++) { for (int j=i; j<max; j++) { if ((minI <= i && i < maxI && minJ <= j && j < maxJ) || (minJ <= i && i < maxJ && minI <= j && j < maxI)) { maxPairs++; } } } return maxPairs; } }
package bisq.core.dao.node.full; import bisq.core.dao.node.full.rpc.BitcoindClient; import bisq.core.dao.node.full.rpc.BitcoindDaemon; import bisq.core.dao.node.full.rpc.dto.DtoPubKeyScript; import bisq.core.dao.node.full.rpc.dto.RawDtoBlock; import bisq.core.dao.node.full.rpc.dto.RawDtoInput; import bisq.core.dao.node.full.rpc.dto.RawDtoTransaction; import bisq.core.dao.state.model.blockchain.PubKeyScript; import bisq.core.dao.state.model.blockchain.ScriptType; import bisq.core.dao.state.model.blockchain.TxInput; import bisq.core.user.Preferences; import bisq.common.UserThread; import bisq.common.config.Config; import bisq.common.handlers.ResultHandler; import bisq.common.util.Utilities; import org.bitcoinj.core.Utils; import com.google.inject.Inject; import javax.inject.Named; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Strings; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import com.google.common.collect.Range; import com.google.common.primitives.Chars; import com.google.common.util.concurrent.FutureCallback; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; import java.io.IOException; import java.math.BigDecimal; import java.util.List; import java.util.concurrent.RejectedExecutionException; import java.util.concurrent.TimeUnit; import java.util.function.Consumer; import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; import org.jetbrains.annotations.NotNull; /** * Request blockchain data via RPC from Bitcoin Core for a FullNode. * Runs in a custom thread. * See the rpc.md file in the doc directory for more info about the setup. */ @Slf4j public class RpcService { private static final int ACTIVATE_HARD_FORK_2_HEIGHT_MAINNET = 680300; private static final int ACTIVATE_HARD_FORK_2_HEIGHT_TESTNET = 1943000; private static final int ACTIVATE_HARD_FORK_2_HEIGHT_REGTEST = 1; private static final Range<Integer> SUPPORTED_NODE_VERSION_RANGE = Range.closedOpen(180000, 210100); private final String rpcUser; private final String rpcPassword; private final String rpcHost; private final int rpcPort; private final int rpcBlockPort; private final String rpcBlockHost; private BitcoindClient client; private BitcoindDaemon daemon; // We could use multiple threads, but then we need to support ordering of results in a queue // Keep that for optimization after measuring performance differences private final ListeningExecutorService executor = Utilities.getSingleThreadListeningExecutor("RpcService"); private volatile boolean isShutDown; // Constructor @Inject private RpcService(Preferences preferences, @Named(Config.RPC_HOST) String rpcHost, @Named(Config.RPC_PORT) int rpcPort, @Named(Config.RPC_BLOCK_NOTIFICATION_PORT) int rpcBlockPort, @Named(Config.RPC_BLOCK_NOTIFICATION_HOST) String rpcBlockHost) { this.rpcUser = preferences.getRpcUser(); this.rpcPassword = preferences.getRpcPw(); // mainnet is 8332, testnet 18332, regtest 18443 boolean isHostSet = !rpcHost.isEmpty(); boolean isPortSet = rpcPort != Config.UNSPECIFIED_PORT; boolean isMainnet = Config.baseCurrencyNetwork().isMainnet(); boolean isTestnet = Config.baseCurrencyNetwork().isTestnet(); boolean isDaoBetaNet = Config.baseCurrencyNetwork().isDaoBetaNet(); this.rpcHost = isHostSet ? rpcHost : "127.0.0.1"; this.rpcPort = isPortSet ? rpcPort : isMainnet || isDaoBetaNet ? 8332 : isTestnet ? 18332 : 18443; // regtest boolean isBlockPortSet = rpcBlockPort != Config.UNSPECIFIED_PORT; boolean isBlockHostSet = !rpcBlockHost.isEmpty(); this.rpcBlockPort = isBlockPortSet ? rpcBlockPort : 5125; this.rpcBlockHost = isBlockHostSet ? rpcBlockHost : "127.0.0.1"; } // API public void shutDown() { isShutDown = true; if (daemon != null) { daemon.shutdown(); log.info("daemon shut down"); } // A hard shutdown is justified for the RPC service. executor.shutdown(); } void setup(ResultHandler resultHandler, Consumer<Throwable> errorHandler) { try { ListenableFuture<Void> future = executor.submit(() -> { try { log.info("Starting RpcService on {}:{} with user {}, listening for blocknotify on port {} from {}", this.rpcHost, this.rpcPort, this.rpcUser, this.rpcBlockPort, this.rpcBlockHost); long startTs = System.currentTimeMillis(); client = BitcoindClient.builder() .rpcHost(rpcHost) .rpcPort(rpcPort) .rpcUser(rpcUser) .rpcPassword(rpcPassword) .build(); checkNodeVersionAndHealth(); daemon = new BitcoindDaemon(rpcBlockHost, rpcBlockPort, throwable -> { log.error(throwable.toString()); throwable.printStackTrace(); UserThread.execute(() -> errorHandler.accept(new RpcException(throwable))); }); log.info("Setup took {} ms", System.currentTimeMillis() - startTs); } catch (Throwable e) { log.error(e.toString()); e.printStackTrace(); throw new RpcException(e.toString(), e); } return null; }); Futures.addCallback(future, new FutureCallback<>() { public void onSuccess(Void ignore) { UserThread.execute(resultHandler::handleResult); } public void onFailure(@NotNull Throwable throwable) { UserThread.execute(() -> errorHandler.accept(throwable)); } }, MoreExecutors.directExecutor()); } catch (Exception e) { if (!isShutDown || !(e instanceof RejectedExecutionException)) { log.warn(e.toString(), e); throw e; } } } private String decodeNodeVersion(Integer encodedVersion) { var paddedEncodedVersion = Strings.padStart(encodedVersion.toString(), 8, '0'); return Lists.partition(Chars.asList(paddedEncodedVersion.toCharArray()), 2).stream() .map(chars -> new String(Chars.toArray(chars)).replaceAll("^0", "")) .collect(Collectors.joining(".")) .replaceAll("\\.0$", ""); } private void checkNodeVersionAndHealth() throws IOException { var networkInfo = client.getNetworkInfo(); var nodeVersion = decodeNodeVersion(networkInfo.getVersion()); if (SUPPORTED_NODE_VERSION_RANGE.contains(networkInfo.getVersion())) { log.info("Got Bitcoin Core version: {}", nodeVersion); } else { log.warn("Server version mismatch - client optimized for '[{} .. {})', node responded with '{}'", decodeNodeVersion(SUPPORTED_NODE_VERSION_RANGE.lowerEndpoint()), decodeNodeVersion(SUPPORTED_NODE_VERSION_RANGE.upperEndpoint()), nodeVersion); } var bestRawBlock = client.getBlock(client.getBestBlockHash(), 1); long currentTime = System.currentTimeMillis() / 1000; if ((currentTime - bestRawBlock.getTime()) > TimeUnit.HOURS.toSeconds(6)) { log.warn("Last available block was mined >{} hours ago; please check your network connection", ((currentTime - bestRawBlock.getTime()) / 3600)); } } void addNewDtoBlockHandler(Consumer<RawBlock> dtoBlockHandler, Consumer<Throwable> errorHandler) { daemon.setBlockListener(blockHash -> { try { var rawDtoBlock = client.getBlock(blockHash, 2); log.info("New block received: height={}, id={}", rawDtoBlock.getHeight(), rawDtoBlock.getHash()); var block = getBlockFromRawDtoBlock(rawDtoBlock); UserThread.execute(() -> dtoBlockHandler.accept(block)); } catch (Throwable t) { errorHandler.accept(t); } }); } void requestChainHeadHeight(Consumer<Integer> resultHandler, Consumer<Throwable> errorHandler) { try { ListenableFuture<Integer> future = executor.submit(client::getBlockCount); Futures.addCallback(future, new FutureCallback<>() { public void onSuccess(Integer chainHeight) { UserThread.execute(() -> resultHandler.accept(chainHeight)); } public void onFailure(@NotNull Throwable throwable) { UserThread.execute(() -> errorHandler.accept(throwable)); } }, MoreExecutors.directExecutor()); } catch (Exception e) { if (!isShutDown || !(e instanceof RejectedExecutionException)) { log.warn(e.toString(), e); throw e; } } } void requestDtoBlock(int blockHeight, Consumer<RawBlock> resultHandler, Consumer<Throwable> errorHandler) { try { ListenableFuture<RawBlock> future = executor.submit(() -> { long startTs = System.currentTimeMillis(); String blockHash = client.getBlockHash(blockHeight); var rawDtoBlock = client.getBlock(blockHash, 2); var block = getBlockFromRawDtoBlock(rawDtoBlock); log.info("requestDtoBlock from bitcoind at blockHeight {} with {} txs took {} ms", blockHeight, block.getRawTxs().size(), System.currentTimeMillis() - startTs); return block; }); Futures.addCallback(future, new FutureCallback<>() { @Override public void onSuccess(RawBlock block) { UserThread.execute(() -> resultHandler.accept(block)); } @Override public void onFailure(@NotNull Throwable throwable) { log.error("Error at requestDtoBlock: blockHeight={}", blockHeight); UserThread.execute(() -> errorHandler.accept(throwable)); } }, MoreExecutors.directExecutor()); } catch (Exception e) { if (!isShutDown || !(e instanceof RejectedExecutionException)) { log.warn(e.toString(), e); throw e; } } } // Private private static RawBlock getBlockFromRawDtoBlock(RawDtoBlock rawDtoBlock) { List<RawTx> txList = rawDtoBlock.getTx().stream() .map(e -> getTxFromRawTransaction(e, rawDtoBlock)) .collect(Collectors.toList()); return new RawBlock(rawDtoBlock.getHeight(), rawDtoBlock.getTime() * 1000, // rawDtoBlock.getTime() is in sec but we want ms rawDtoBlock.getHash(), rawDtoBlock.getPreviousBlockHash(), ImmutableList.copyOf(txList)); } private static RawTx getTxFromRawTransaction(RawDtoTransaction rawDtoTx, RawDtoBlock rawDtoBlock) { String txId = rawDtoTx.getTxId(); long blockTime = rawDtoBlock.getTime() * 1000; // We convert block time from sec to ms int blockHeight = rawDtoBlock.getHeight(); String blockHash = rawDtoBlock.getHash(); // Extracting pubKeys for segwit (P2WPKH) inputs, instead of just P2PKH inputs as // originally, changes the DAO state and thus represents a hard fork. We disallow // it until the fork activates, which is determined by block height. boolean allowSegwit = blockHeight >= getActivateHardFork2Height(); final List<TxInput> txInputs = rawDtoTx.getVIn() .stream() .filter(rawInput -> rawInput != null && rawInput.getVOut() != null && rawInput.getTxId() != null) .map(rawInput -> { String pubKeyAsHex = extractPubKeyAsHex(rawInput, allowSegwit); if (pubKeyAsHex == null) { log.debug("pubKeyAsHex is not set as we received a not supported sigScript. " + "txId={}, asm={}, txInWitness={}", rawDtoTx.getTxId(), rawInput.getScriptSig().getAsm(), rawInput.getTxInWitness()); } return new TxInput(rawInput.getTxId(), rawInput.getVOut(), pubKeyAsHex); }) .collect(Collectors.toList()); final List<RawTxOutput> txOutputs = rawDtoTx.getVOut() .stream() .filter(e -> e != null && e.getN() != null && e.getValue() != null && e.getScriptPubKey() != null) .map(rawDtoTxOutput -> { byte[] opReturnData = null; DtoPubKeyScript scriptPubKey = rawDtoTxOutput.getScriptPubKey(); if (ScriptType.NULL_DATA.equals(scriptPubKey.getType()) && scriptPubKey.getAsm() != null) { String[] chunks = scriptPubKey.getAsm().split(" "); // We get on testnet a lot of "OP_RETURN 0" data, so we filter those away if (chunks.length == 2 && "OP_RETURN".equals(chunks[0]) && !"0".equals(chunks[1])) { try { opReturnData = Utils.HEX.decode(chunks[1]); } catch (Throwable t) { log.debug("Error at Utils.HEX.decode(chunks[1]): " + t.toString() + " / chunks[1]=" + chunks[1] + "\nWe get sometimes exceptions with opReturn data, seems BitcoinJ " + "cannot handle all " + "existing OP_RETURN data, but we ignore them anyway as the OP_RETURN " + "data used for DAO transactions are all valid in BitcoinJ"); } } } // We don't support raw MS which are the only case where scriptPubKey.getAddresses()>1 String address = scriptPubKey.getAddresses() != null && scriptPubKey.getAddresses().size() == 1 ? scriptPubKey.getAddresses().get(0) : null; PubKeyScript pubKeyScript = new PubKeyScript(scriptPubKey); return new RawTxOutput(rawDtoTxOutput.getN(), BigDecimal.valueOf(rawDtoTxOutput.getValue()).movePointRight(8).longValueExact(), rawDtoTx.getTxId(), pubKeyScript, address, opReturnData, blockHeight); } ) .collect(Collectors.toList()); return new RawTx(txId, blockHeight, blockHash, blockTime, ImmutableList.copyOf(txInputs), ImmutableList.copyOf(txOutputs)); } private static int getActivateHardFork2Height() { return Config.baseCurrencyNetwork().isMainnet() ? ACTIVATE_HARD_FORK_2_HEIGHT_MAINNET : Config.baseCurrencyNetwork().isTestnet() ? ACTIVATE_HARD_FORK_2_HEIGHT_TESTNET : ACTIVATE_HARD_FORK_2_HEIGHT_REGTEST; } @VisibleForTesting static String extractPubKeyAsHex(RawDtoInput rawInput, boolean allowSegwit) { // We only allow inputs with a single SIGHASH_ALL signature. That is, multisig or // signing of only some of the tx inputs/outputs is intentionally disallowed... if (rawInput.getScriptSig() == null) { // coinbase input - no pubKey to extract return null; } String[] split = rawInput.getScriptSig().getAsm().split(" "); if (split.length == 2 && split[0].endsWith("[ALL]")) { // P2PKH input return split[1]; } List<String> txInWitness = rawInput.getTxInWitness() != null ? rawInput.getTxInWitness() : List.of(); if (allowSegwit && split.length < 2 && txInWitness.size() == 2 && txInWitness.get(0).endsWith("01")) { // P2WPKH or P2SH-P2WPKH input return txInWitness.get(1); } // If we receive a pay to pubkey tx, the pubKey is not included as it is in the // output already. return null; } }
package io.grpc.internal; import com.google.common.base.Charsets; import com.google.common.base.Preconditions; import io.grpc.Metadata; import io.grpc.Status; import java.nio.charset.Charset; import javax.annotation.Nullable; /** * Base implementation for client streams using HTTP2 as the transport. */ public abstract class Http2ClientStream extends AbstractClientStream<Integer> { /** * Metadata marshaller for HTTP status lines. */ private static final Metadata.AsciiMarshaller<Integer> HTTP_STATUS_LINE_MARSHALLER = new Metadata.AsciiMarshaller<Integer>() { @Override public String toAsciiString(Integer value) { return value.toString(); } @Override public Integer parseAsciiString(String serialized) { return Integer.parseInt(serialized.split(" ", 2)[0]); } }; private static final Metadata.Key<Integer> HTTP2_STATUS = Metadata.Key.of(":status", HTTP_STATUS_LINE_MARSHALLER); /** When non-{@code null}, {@link #transportErrorMetadata} must also be non-{@code null}. */ private Status transportError; private Metadata transportErrorMetadata; private Charset errorCharset = Charsets.UTF_8; private boolean contentTypeChecked; protected Http2ClientStream(WritableBufferAllocator bufferAllocator, int maxMessageSize) { super(bufferAllocator, maxMessageSize); } /** * Called by subclasses whenever {@code Headers} are received from the transport. * * @param headers the received headers */ protected void transportHeadersReceived(Metadata headers) { Preconditions.checkNotNull(headers); if (transportError != null) { // Already received a transport error so just augment it. transportError = transportError.augmentDescription(headers.toString()); return; } Status httpStatus = statusFromHttpStatus(headers); if (httpStatus == null) { transportError = Status.INTERNAL.withDescription( "received non-terminal headers with no :status"); } else if (!httpStatus.isOk()) { transportError = httpStatus; } else { transportError = checkContentType(headers); } if (transportError != null) { // Note we don't immediately report the transport error, instead we wait for more data on the // stream so we can accumulate more detail into the error before reporting it. transportError = transportError.augmentDescription("\n" + headers.toString()); transportErrorMetadata = headers; errorCharset = extractCharset(headers); } else { stripTransportDetails(headers); inboundHeadersReceived(headers); } } /** * Called by subclasses whenever a data frame is received from the transport. * * @param frame the received data frame * @param endOfStream {@code true} if there will be no more data received for this stream */ protected void transportDataReceived(ReadableBuffer frame, boolean endOfStream) { if (transportError == null && inboundPhase() == Phase.HEADERS) { // Must receive headers prior to receiving any payload as we use headers to check for // protocol correctness. transportError = Status.INTERNAL.withDescription("no headers received prior to data"); transportErrorMetadata = new Metadata(); } if (transportError != null) { // We've already detected a transport error and now we're just accumulating more detail // for it. transportError = transportError.augmentDescription("DATA + ReadableBuffers.readAsString(frame, errorCharset)); frame.close(); if (transportError.getDescription().length() > 1000 || endOfStream) { inboundTransportError(transportError, transportErrorMetadata); // We have enough error detail so lets cancel. sendCancel(Status.CANCELLED); } } else { inboundDataReceived(frame); if (endOfStream) { // This is a protocol violation as we expect to receive trailers. transportError = Status.INTERNAL.withDescription("Received EOS on DATA frame"); transportErrorMetadata = new Metadata(); inboundTransportError(transportError, transportErrorMetadata); } } } /** * Called by subclasses for the terminal trailer metadata on a stream. * * @param trailers the received terminal trailer metadata */ protected void transportTrailersReceived(Metadata trailers) { Preconditions.checkNotNull(trailers); if (transportError != null) { // Already received a transport error so just augment it. transportError = transportError.augmentDescription(trailers.toString()); } else { transportError = checkContentType(trailers); transportErrorMetadata = trailers; } if (transportError != null) { inboundTransportError(transportError, transportErrorMetadata); sendCancel(Status.CANCELLED); } else { Status status = statusFromTrailers(trailers); stripTransportDetails(trailers); inboundTrailersReceived(trailers, status); } } private static Status statusFromHttpStatus(Metadata metadata) { Integer httpStatus = metadata.get(HTTP2_STATUS); if (httpStatus != null) { Status status = GrpcUtil.httpStatusToGrpcStatus(httpStatus); return status.isOk() ? status : status.augmentDescription("extracted status from HTTP :status " + httpStatus); } return null; } /** * Extract the response status from trailers. */ private Status statusFromTrailers(Metadata trailers) { Status status = trailers.get(Status.CODE_KEY); if (status == null) { status = statusFromHttpStatus(trailers); if (status == null || status.isOk()) { status = Status.UNKNOWN.withDescription("missing GRPC status in response"); } else { status = status.withDescription( "missing GRPC status, inferred error from HTTP status code"); } } String message = trailers.get(Status.MESSAGE_KEY); if (message != null) { status = status.augmentDescription(message); } return status; } /** * Inspect the content type field from received headers or trailers and return an error Status if * content type is invalid or not present. Returns null if no error was found. */ @Nullable private Status checkContentType(Metadata headers) { if (contentTypeChecked) { return null; } contentTypeChecked = true; String contentType = headers.get(GrpcUtil.CONTENT_TYPE_KEY); if (!GrpcUtil.isGrpcContentType(contentType)) { return Status.INTERNAL.withDescription("Invalid content-type: " + contentType); } return null; } /** * Inspect the raw metadata and figure out what charset is being used. */ private static Charset extractCharset(Metadata headers) { String contentType = headers.get(GrpcUtil.CONTENT_TYPE_KEY); if (contentType != null) { String[] split = contentType.split("charset="); try { return Charset.forName(split[split.length - 1].trim()); } catch (Exception t) { // Ignore and assume UTF-8 } } return Charsets.UTF_8; } /** * Strip HTTP transport implementation details so they don't leak via metadata into * the application layer. */ private static void stripTransportDetails(Metadata metadata) { metadata.removeAll(HTTP2_STATUS); metadata.removeAll(Status.CODE_KEY); metadata.removeAll(Status.MESSAGE_KEY); } }
package org.jasig.cas.web; import javax.servlet.http.Cookie; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.jasig.cas.CentralAuthenticationService; import org.jasig.cas.web.support.ViewNames; import org.jasig.cas.web.support.WebConstants; import org.springframework.beans.factory.InitializingBean; import org.springframework.util.Assert; import org.springframework.web.servlet.ModelAndView; import org.springframework.web.servlet.mvc.AbstractController; import org.springframework.web.servlet.view.RedirectView; import org.springframework.web.util.WebUtils; /** * Controller to delete ticket granting ticket cookie in order to log out of * single sign on. This controller implements the idea of the ESUP Portail's * Logout patch to allow for redirecting to a url on logout. It also exposes a * log out link to the view via the WebConstants.LOGOUT constant. * * @author Scott Battaglia * @version $Revision$ $Date$ * @since 3.0 */ public final class LogoutController extends AbstractController implements InitializingBean { /** The log instance. */ private final Log log = LogFactory.getLog(getClass()); /** The CORE to which we delegate for all CAS functionality. */ private CentralAuthenticationService centralAuthenticationService; /** * Boolean to determine if we will redirect to any url provided in the * service request parameter. */ private boolean followServiceRedirects = false; public void afterPropertiesSet() throws Exception { Assert.notNull(this.centralAuthenticationService, "centralAuthenticationService must be set on " + this.getClass().getName()); } protected ModelAndView handleRequestInternal( final HttpServletRequest request, final HttpServletResponse response) throws Exception { Cookie tgcCookie = WebUtils.getCookie(request, WebConstants.COOKIE_TGC_ID); String service = request.getParameter(WebConstants.SERVICE); if (tgcCookie != null) { this.centralAuthenticationService .destroyTicketGrantingTicket(tgcCookie.getValue()); destroyTicketGrantingTicketCookie(request, response); destroyPrivacyCookie(request, response); } if (this.followServiceRedirects && service != null) { return new ModelAndView(new RedirectView(service)); } return new ModelAndView(ViewNames.CONST_LOGOUT, WebConstants.LOGOUT, request.getParameter(WebConstants.LOGOUT)); } /** * Method to destroy the cookie for the TicketGrantingTicket. * * @param request The HttpServletRequest * @param response The HttpServletResponse */ private void destroyTicketGrantingTicketCookie( final HttpServletRequest request, final HttpServletResponse response) { log.debug("Destroying TicketGrantingTicket cookie."); Cookie cookie = new Cookie(WebConstants.COOKIE_TGC_ID, ""); cookie.setMaxAge(0); cookie.setPath(request.getContextPath()); cookie.setSecure(true); response.addCookie(cookie); } /** * Method to destroy the privacy (warn) cookie. * * @param request The HttpServletRequest * @param response The HttpServletResponse */ private void destroyPrivacyCookie( final HttpServletRequest request, final HttpServletResponse response) { log.debug("Destroying privacy cookie."); Cookie cookie = new Cookie(WebConstants.COOKIE_PRIVACY, ""); cookie.setMaxAge(0); cookie.setPath(request.getContextPath()); cookie.setSecure(true); response.addCookie(cookie); } /** * @param centralAuthenticationService The centralAuthenticationService to * set. */ public void setCentralAuthenticationService( final CentralAuthenticationService centralAuthenticationService) { this.centralAuthenticationService = centralAuthenticationService; } public void setFollowServiceRedirects(final boolean followServiceRedirects) { this.followServiceRedirects = followServiceRedirects; } }
package fitnesse.responders.run; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.Writer; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import util.TimeMeasurement; import fitnesse.responders.run.formatters.BaseFormatter; import fitnesse.wiki.WikiPage; import fitnesse.wiki.WikiPagePath; /** * Used to run tests from a JUnit test suite. * * @see {@link fitnesse.junit.FitNesseSuite} */ public class JavaFormatter extends BaseFormatter { private String mainPageName; private boolean isSuite = true; public static final String SUMMARY_FOOTER = "</table>"; public static final String SUMMARY_HEADER = "<table><tr><td>Name</td><td>Right</td><td>Wrong</td><td>Exceptions</td></tr>"; public interface ResultsRepository { void open(String string) throws IOException; void close() throws IOException; void write(String content) throws IOException; } public static class FolderResultsRepository implements ResultsRepository { private String outputPath; private Writer currentWriter; public FolderResultsRepository(String outputPath, String fitNesseRoot) throws IOException { this.outputPath = outputPath; initFolder(fitNesseRoot); } public void close() throws IOException { if (currentWriter != null) { currentWriter.write("</body></html>"); currentWriter.close(); } } public void open(String testName) throws IOException { File outputFile = new File(outputPath, testName + ".html"); currentWriter = new OutputStreamWriter(new FileOutputStream(outputFile), "UTF-8"); currentWriter.write("<html><head><title>"); currentWriter.write(testName); currentWriter .write("</title><meta http-equiv='Content-Type' content='text/html;charset=utf-8'/>" + "<link rel='stylesheet' type='text/css' href='fitnesse.css'/>" + "<script src='fitnesse.js' type='text/javascript'></script>" + "</head><body><h2>"); currentWriter.write(testName); currentWriter.write("</h2>"); } public void write(String content) throws IOException { currentWriter.write(content.replace("src=\"/files/images/", "src=\"images/")); } public void addFile(String r, String relativeFilePath) throws IOException { File dst = new File(outputPath, relativeFilePath); dst.getParentFile().mkdirs(); copy(r, dst); } private void copy(String src, File dst) throws IOException { InputStream in = getClass().getResourceAsStream(src); OutputStream out = new FileOutputStream(dst); // Transfer bytes from in to out byte[] buf = new byte[1024]; int len; while ((len = in.read(buf)) > 0) { out.write(buf, 0, len); } in.close(); out.close(); } private void initFolder(String fitnesseRoot) throws IOException { String base = "/fitnesse/resources/"; String cssDir = base + "css/"; addFile(cssDir + "fitnesse_wiki.css", "fitnesse.css"); String javascriptDir = base + "javascript/"; addFile(javascriptDir + "fitnesse.js", "fitnesse.js"); String imagesDir = base + "images/"; addFile(imagesDir + "collapsibleOpen.png", "images/collapsibleOpen.png"); addFile(imagesDir + "collapsibleClosed.png", "images/collapsibleClosed.png"); } } private TestSummary totalSummary = new TestSummary(); public String getFullPath(final WikiPage wikiPage) { return new WikiPagePath(wikiPage).toString(); } private List<String> visitedTestPages = new ArrayList<String>(); private Map<String, TestSummary> testSummaries = new HashMap<String, TestSummary>(); @Override public void newTestStarted(TestPage test, TimeMeasurement timeMeasurement) throws IOException { resultsRepository.open(getFullPath(test.getSourcePage())); if (listener != null) listener.newTestStarted(test, timeMeasurement); } @Override public void setExecutionLogAndTrackingId(String stopResponderId, CompositeExecutionLog log) { } public void testComplete(TestPage test, TestSummary testSummary, TimeMeasurement timeMeasurement) throws IOException { String fullPath = getFullPath(test.getSourcePage()); visitedTestPages.add(fullPath); totalSummary.add(testSummary); testSummaries.put(fullPath, new TestSummary(testSummary)); resultsRepository.close(); isSuite = isSuite && (!mainPageName.equals(fullPath)); if (listener != null) listener.testComplete(test, testSummary, timeMeasurement); } TestSummary getTestSummary(String testPath) { return testSummaries.get(testPath); } @Override public void testOutputChunk(String output) throws IOException { resultsRepository.write(output); } @Override public void testSystemStarted(TestSystem testSystem, String testSystemName, String testRunner) { } private ResultsRepository resultsRepository; public TestSummary getTotalSummary() { return totalSummary; } public void setTotalSummary(TestSummary testSummary) { totalSummary = testSummary; } public void setResultsRepository(ResultsRepository mockResultsRepository) { this.resultsRepository = mockResultsRepository; } /** package-private to prevent instantiation apart from getInstance and tests */ JavaFormatter(String suiteName) { this.mainPageName = suiteName; } private static Map<String, JavaFormatter> allocatedInstances = new HashMap<String, JavaFormatter>(); private ResultsListener listener; public synchronized static JavaFormatter getInstance(String testName) { JavaFormatter existing = allocatedInstances.get(testName); if (existing != null) return existing; existing = new JavaFormatter(testName); allocatedInstances.put(testName, existing); return existing; } @Override public void allTestingComplete(TimeMeasurement totalTimeMeasurement) throws IOException { if (isSuite) writeSummary(mainPageName); if (listener != null) listener.allTestingComplete(totalTimeMeasurement); } public void writeSummary(String suiteName) throws IOException { resultsRepository.open(suiteName); resultsRepository.write(SUMMARY_HEADER); for (String s : visitedTestPages) { resultsRepository.write(summaryRow(s, testSummaries.get(s))); } resultsRepository.write(SUMMARY_FOOTER); resultsRepository.close(); } public String summaryRow(String testName, TestSummary testSummary) { StringBuffer sb = new StringBuffer(); sb.append("<tr class=\"").append(getCssClass(testSummary)).append("\"><td>").append( "<a href=\"").append(testName).append(".html\">").append(testName).append("</a>").append( "</td><td>").append(testSummary.right).append("</td><td>").append(testSummary.wrong) .append("</td><td>").append(testSummary.exceptions).append("</td></tr>"); return sb.toString(); } private String getCssClass(TestSummary ts) { if (ts.exceptions > 0) return "error"; if (ts.wrong > 0) return "fail"; if (ts.right > 0) return "pass"; return "plain"; } public void setListener(ResultsListener listener) { this.listener = listener; } public List<String> getTestsExecuted() { return visitedTestPages; } public static void dropInstance(String testName) { allocatedInstances.remove(testName); } }
package org.voltdb; import java.io.IOException; import java.nio.ByteBuffer; import java.util.*; import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.TreeMap; import com.google.common.collect.MapDifference; import com.google.common.collect.Maps; import com.google.common.collect.SortedMapDifference; import org.apache.cassandra_voltpatches.MurmurHash3; import org.voltcore.utils.Pair; import com.google.common.base.Preconditions; import com.google.common.base.Supplier; import com.google.common.base.Suppliers; import com.google.common.collect.ImmutableSortedMap; import com.google.common.collect.UnmodifiableIterator; import org.voltdb.utils.CompressionService; /** * A hashinator that uses Murmur3_x64_128 to hash values and a consistent hash ring * to pick what partition to route a particular value. */ public class ElasticHashinator extends TheHashinator { public static int DEFAULT_TOTAL_TOKENS = Integer.parseInt(System.getProperty("ELASTIC_TOTAL_TOKENS", "16384")); private static final sun.misc.Unsafe unsafe; private static sun.misc.Unsafe getUnsafe() { try { return sun.misc.Unsafe.getUnsafe(); } catch (SecurityException se) { try { return java.security.AccessController.doPrivileged (new java.security .PrivilegedExceptionAction<sun.misc.Unsafe>() { public sun.misc.Unsafe run() throws Exception { java.lang.reflect.Field f = sun.misc .Unsafe.class.getDeclaredField("theUnsafe"); f.setAccessible(true); return (sun.misc.Unsafe) f.get(null); }}); } catch (java.security.PrivilegedActionException e) { throw new RuntimeException("Could not initialize intrinsics", e.getCause()); } } } static { sun.misc.Unsafe unsafeTemp = null; try { unsafeTemp = getUnsafe(); } catch (Exception e) { e.printStackTrace(); } unsafe = unsafeTemp; } /** * Tokens on the ring. A value hashes to a token if the token is the first value <= * the value's hash */ private final Supplier<ImmutableSortedMap<Integer, Integer>> m_tokensMap; /* * Pointer to an array of integers containing the tokens and partitions. Even values are tokens and odd values * are partition ids. */ private final long m_tokens; private final int m_tokenCount; private final Supplier<byte[]> m_configBytes; private final Supplier<byte[]> m_configBytesSupplier = Suppliers.memoize(new Supplier<byte[]>() { @Override public byte[] get() { return toBytes(); } }); private final Supplier<byte[]> m_cookedBytes; private final Supplier<byte[]> m_cookedBytesSupplier = Suppliers.memoize(new Supplier<byte[]>() { @Override public byte[] get() { return toCookedBytes(); } }); private final Supplier<Long> m_signature = Suppliers.memoize(new Supplier<Long>() { @Override public Long get() { return TheHashinator.computeConfigurationSignature(m_configBytes.get()); } }); @Override public int pHashToPartition(VoltType type, Object obj) { return hashinateBytes(valueToBytes(obj)); } /** * The serialization format is big-endian and the first value is the number of tokens * Construct the hashinator from a binary description of the ring. * followed by the token values where each token value consists of the 8-byte position on the ring * and and the 4-byte partition id. All values are signed. * @param configBytes config data * @param cooked compressible wire serialization format if true */ public ElasticHashinator(byte configBytes[], boolean cooked) { Pair<Long, Integer> p = (cooked ? updateCooked(configBytes) : updateRaw(configBytes)); m_tokens = p.getFirst(); m_tokenCount = p.getSecond(); m_configBytes = !cooked ? Suppliers.ofInstance(configBytes) : m_configBytesSupplier; m_cookedBytes = cooked ? Suppliers.ofInstance(configBytes) : m_cookedBytesSupplier; m_tokensMap = Suppliers.memoize(new Supplier<ImmutableSortedMap<Integer, Integer>>() { @Override public ImmutableSortedMap<Integer, Integer> get() { ImmutableSortedMap.Builder<Integer, Integer> builder = ImmutableSortedMap.naturalOrder(); for (int ii = 0; ii < m_tokenCount; ii++) { final long ptr = m_tokens + (ii * 8); final int token = unsafe.getInt(ptr); final int partition = unsafe.getInt(ptr + 4); builder.put(token, partition); } return builder.build(); } }); } /** * Private constructor to initialize a hashinator with known tokens. Used for adding/removing * partitions from existing hashinator. * @param tokens */ private ElasticHashinator(SortedMap<Integer, Integer> tokens) { m_tokensMap = Suppliers.ofInstance(ImmutableSortedMap.copyOf(tokens)); Preconditions.checkArgument(m_tokensMap.get().firstEntry().getKey().equals(Integer.MIN_VALUE)); m_tokens = unsafe.allocateMemory(8 * tokens.size()); int ii = 0; for (Map.Entry<Integer, Integer> e : tokens.entrySet()) { final long ptr = m_tokens + (ii * 8); unsafe.putInt(ptr, e.getKey()); unsafe.putInt(ptr + 4, e.getValue()); ii++; } m_tokenCount = tokens.size(); m_configBytes = m_configBytesSupplier; m_cookedBytes = m_cookedBytesSupplier; } public static byte[] addPartitions(TheHashinator oldHashinator, int partitionsToAdd) { Preconditions.checkArgument(oldHashinator instanceof ElasticHashinator); ElasticHashinator oldElasticHashinator = (ElasticHashinator) oldHashinator; Buckets buckets = new Buckets(oldElasticHashinator.m_tokensMap.get()); buckets.addPartitions(partitionsToAdd); return new ElasticHashinator(buckets.getTokens()).getConfigBytes(); } /** * Convenience method for generating a deterministic token distribution for the ring based * on a given partition count and tokens per partition. Each partition will have N tokens * placed randomly on the ring. */ public static byte[] getConfigureBytes(int partitionCount, int tokenCount) { Preconditions.checkArgument(partitionCount > 0); Preconditions.checkArgument(tokenCount > partitionCount); Buckets buckets = new Buckets(partitionCount, tokenCount); ElasticHashinator hashinator = new ElasticHashinator(buckets.getTokens()); return hashinator.getConfigBytes(); } /** * Serializes the configuration into bytes, also updates the currently cached m_configBytes. * @return The byte[] of the current configuration. */ private byte[] toBytes() { ByteBuffer buf = ByteBuffer.allocate(4 + (m_tokenCount * 8)); buf.putInt(m_tokenCount); int lastToken = Integer.MIN_VALUE; for (int ii = 0; ii < m_tokenCount; ii++) { final long ptr = m_tokens + (ii * 8); final int token = unsafe.getInt(ptr); Preconditions.checkArgument(token >= lastToken); lastToken = token; final int pid = unsafe.getInt(ptr + 4); buf.putInt(token); buf.putInt(pid); } return buf.array(); } /** * For a given a value hash, find the token that corresponds to it. This will * be the first token <= the value hash, or if the value hash is < the first token in the ring, * it wraps around to the last token in the ring closest to Long.MAX_VALUE */ public int partitionForToken(int hash) { long token = getTokenPtr(hash); return unsafe.getInt(token + 4); } /** * Get all the tokens on the ring. */ public ImmutableSortedMap<Integer, Integer> getTokens() { return m_tokensMap.get(); } /** * Add the given tokens to the ring and generate the new hashinator. The current hashinator is not changed. * @param tokensToAdd Tokens to add as a map of tokens to partitions * @return The new hashinator */ public ElasticHashinator addTokens(Map<Integer, Integer> tokensToAdd) { ImmutableSortedMap.Builder<Integer, Integer> b = ImmutableSortedMap.naturalOrder(); for (Map.Entry<Integer, Integer> e : m_tokensMap.get().entrySet()) { if (tokensToAdd.containsKey(e.getKey())) { continue; } b.put(e.getKey(), e.getValue()); } b.putAll(tokensToAdd); return new ElasticHashinator(b.build()); } @Override public int pHashinateLong(long value) { if (value == Long.MIN_VALUE) return 0; return partitionForToken(MurmurHash3.hash3_x64_128(value)); } @Override public int pHashinateBytes(byte[] bytes) { ByteBuffer buf = ByteBuffer.wrap(bytes); final int token = MurmurHash3.hash3_x64_128(buf, 0, bytes.length, 0); return partitionForToken(token); } @Override protected HashinatorConfig pGetCurrentConfig() { return new HashinatorConfig(HashinatorType.ELASTIC, m_configBytes.get(), m_tokens, m_tokenCount) { //Store a reference to this hashinator in the config so it doesn't get GCed and release //the pointer to the config data that is off heap private final ElasticHashinator myHashinator = ElasticHashinator.this; }; } /** * Find the predecessors of the given partition on the ring. This method runs in linear time, * use with caution when the set of partitions is large. * @param partition * @return The map of tokens to partitions that are the predecessors of the given partition. * If the given partition doesn't exist or it's the only partition on the ring, the * map will be empty. */ @Override public Map<Integer, Integer> pPredecessors(int partition) { Map<Integer, Integer> predecessors = new TreeMap<Integer, Integer>(); UnmodifiableIterator<Map.Entry<Integer,Integer>> iter = m_tokensMap.get().entrySet().iterator(); Set<Integer> pTokens = new HashSet<Integer>(); while (iter.hasNext()) { Map.Entry<Integer, Integer> next = iter.next(); if (next.getValue() == partition) { pTokens.add(next.getKey()); } } for (Integer token : pTokens) { Map.Entry<Integer, Integer> predecessor = null; if (token != null) { predecessor = m_tokensMap.get().headMap(token).lastEntry(); // If null, it means partition is the first one on the ring, so predecessor // should be the last entry on the ring because it wraps around. if (predecessor == null) { predecessor = m_tokensMap.get().lastEntry(); } } if (predecessor != null && predecessor.getValue() != partition) { predecessors.put(predecessor.getKey(), predecessor.getValue()); } } return predecessors; } /** * Find the predecessor of the given token on the ring. * @param partition The partition that maps to the given token * @param token The token on the ring * @return The predecessor of the given token. */ @Override public Pair<Integer, Integer> pPredecessor(int partition, int token) { Integer partForToken = m_tokensMap.get().get(token); if (partForToken != null && partForToken == partition) { Map.Entry<Integer, Integer> predecessor = m_tokensMap.get().headMap(token).lastEntry(); if (predecessor == null) { predecessor = m_tokensMap.get().lastEntry(); } if (predecessor.getKey() != token) { return Pair.of(predecessor.getKey(), predecessor.getValue()); } else { // given token is the only one on the ring, umpossible throw new RuntimeException("There is only one token on the hash ring"); } } else { // given token doesn't map to partition throw new IllegalArgumentException("The given token " + token + " does not map to partition " + partition); } } /** * This runs in linear time with respect to the number of tokens on the ring. */ @Override public Map<Integer, Integer> pGetRanges(int partition) { Map<Integer, Integer> ranges = new TreeMap<Integer, Integer>(); Integer first = null; // start of the very first token on the ring Integer start = null; // start of a range UnmodifiableIterator<Map.Entry<Integer,Integer>> iter = m_tokensMap.get().entrySet().iterator(); // Iterate through the token map to find the ranges assigned to // the given partition while (iter.hasNext()) { Map.Entry<Integer, Integer> next = iter.next(); int token = next.getKey(); int pid = next.getValue(); if (first == null) { first = token; } // if start is not null, there's an open range, now is // the time to close it. // else there is no open range, keep on going. if (start != null) { //Range end is inclusive so do token - 1 ranges.put(start, token - 1); start = null; } if (pid == partition) { // if start is null, there's no open range, start one. start = token; } } // if there is an open range when we get here // It is the last token which implicity ends at the next max value if (start != null) { assert first != null; ranges.put(start, Integer.MAX_VALUE); } return ranges; } /** * Returns the configuration signature */ @Override public long pGetConfigurationSignature() { return m_signature.get(); } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append(" Token ").append(" Partition\n"); for (Map.Entry<Integer, Integer> entry : m_tokensMap.get().entrySet()) { sb.append(String.format("[%11d => %9d]\n", entry.getKey(), entry.getValue())); } return sb.toString(); } /** * Returns raw config bytes. * @return config bytes */ @Override public byte[] getConfigBytes() { return m_configBytes.get(); } /** * Returns compressed config bytes. * @return config bytes * @throws IOException */ private byte[] toCookedBytes() { // Allocate for a int pair per token/partition ID entry, plus a size. ByteBuffer buf = ByteBuffer.allocate(4 + (m_tokenCount * 8)); buf.putInt(m_tokenCount); // Keep tokens and partition ids separate to aid compression. for (int zz = 3; zz >= 0; zz int lastToken = Integer.MIN_VALUE; for (int ii = 0; ii < m_tokenCount; ii++) { int token = unsafe.getInt(m_tokens + (ii * 8)); Preconditions.checkArgument(token >= lastToken); lastToken = token; token = token >>> (zz * 8); token = token & 0xFF; buf.put((byte)token); } } for (int ii = 0; ii < m_tokenCount; ii++) { buf.putInt(unsafe.getInt(m_tokens + (ii * 8) + 4)); } try { return CompressionService.gzipBytes(buf.array()); } catch (IOException e) { throw new RuntimeException("Failed to compress bytes", e); } } /** * Update from raw config bytes. * token-1/partition-1 * token-2/partition-2 * ... * tokens are 8 bytes * @param configBytes raw config data * @return token/partition map */ private Pair<Long, Integer> updateRaw(byte configBytes[]) { ByteBuffer buf = ByteBuffer.wrap(configBytes); int numEntries = buf.getInt(); if (numEntries < 0) { throw new RuntimeException("Bad elastic hashinator config"); } long tokens = unsafe.allocateMemory(8 * numEntries); int lastToken = Integer.MIN_VALUE; for (int ii = 0; ii < numEntries; ii++) { long ptr = tokens + (ii * 8); final int token = buf.getInt(); Preconditions.checkArgument(token >= lastToken); lastToken = token; unsafe.putInt(ptr, token); final int partitionId = buf.getInt(); unsafe.putInt(ptr + 4, partitionId); } return Pair.of(tokens, numEntries); } private long getTokenPtr(int hash) { int min = 0; int max = m_tokenCount - 1; while (min <= max) { int mid = (min + max) >>> 1; final long midPtr = m_tokens + (8 * mid); int midval = unsafe.getInt(midPtr); if (midval < hash) { min = mid + 1; } else if (midval > hash) { max = mid - 1; } else { return midPtr; } } return m_tokens + (min - 1) * 8; } /** * Update from optimized (cooked) wire format. * token-1 token-2 ... * partition-1 partition-2 ... * tokens are 4 bytes * @param compressedData optimized and compressed config data * @return token/partition map */ private Pair<Long, Integer> updateCooked(byte[] compressedData) { // Uncompress (inflate) the bytes. byte[] cookedBytes; try { cookedBytes = CompressionService.gunzipBytes(compressedData); } catch (IOException e) { throw new RuntimeException("Unable to decompress elastic hashinator data."); } int numEntries = (cookedBytes.length >= 4 ? ByteBuffer.wrap(cookedBytes).getInt() : 0); int tokensSize = 4 * numEntries; int partitionsSize = 4 * numEntries; if (numEntries <= 0 || cookedBytes.length != 4 + tokensSize + partitionsSize) { throw new RuntimeException("Bad elastic hashinator cooked config size."); } long tokens = unsafe.allocateMemory(8 * numEntries); ByteBuffer tokenBuf = ByteBuffer.wrap(cookedBytes, 4, tokensSize); ByteBuffer partitionBuf = ByteBuffer.wrap(cookedBytes, 4 + tokensSize, partitionsSize); int tokensArray[] = new int[numEntries]; for (int zz = 3; zz >= 0; zz for (int ii = 0; ii < numEntries; ii++) { int value = tokenBuf.get(); value = (value << (zz * 8)) & (0xFF << (zz * 8)); tokensArray[ii] = (tokensArray[ii] | value); } } int lastToken = Integer.MIN_VALUE; for (int ii = 0; ii < numEntries; ii++) { int token = tokensArray[ii]; Preconditions.checkArgument(token >= lastToken); lastToken = token; long ptr = tokens + (ii * 8); unsafe.putInt(ptr, token); final int partitionId = partitionBuf.getInt(); unsafe.putInt(ptr + 4, partitionId); } return Pair.of(tokens, numEntries); } /** * Return (cooked) bytes optimized for serialization. * @return optimized config bytes */ @Override public byte[] getCookedBytes() { return m_cookedBytes.get(); } @Override public HashinatorType getConfigurationType() { return TheHashinator.HashinatorType.ELASTIC; } @Override public void finalize() { unsafe.freeMemory(m_tokens); } /** * Checks if the current hashinator and the given one are equal to each other. One hashinator may have more * tokens than the other, but as long as all possible tokens hash to the same partitions, * they are considered equal. */ @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; ElasticHashinator that = (ElasticHashinator) o; if (m_signature.get().equals(that.m_signature.get())) return true; SortedMapDifference<Integer,Integer> diff = Maps.difference(m_tokensMap.get(), that.m_tokensMap.get()); if (!diff.entriesDiffering().isEmpty()) { System.err.println("Differ: " + diff.entriesDiffering().toString()); return false; } for (Map.Entry<Integer, Integer> e : diff.entriesOnlyOnLeft().entrySet()) { if (that.partitionForToken(e.getKey()) != e.getValue()) { System.err.println(e.getValue() + " not the same as " + that.partitionForToken(e.getKey()) + " for token " + e.getKey()); return false; } } for (Map.Entry<Integer, Integer> e : diff.entriesOnlyOnRight().entrySet()) { if (partitionForToken(e.getKey()) != e.getValue()) { System.err.println(e.getValue() + " not the same as " + partitionForToken(e.getKey()) + " for token " + e.getKey()); return false; } } return true; } }
package org.voltdb; import java.nio.ByteBuffer; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Random; import java.util.Set; import java.util.TreeMap; import com.google.common.collect.UnmodifiableIterator; import org.apache.cassandra_voltpatches.MurmurHash3; import org.voltcore.utils.Pair; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableSortedMap; /** * A hashinator that uses Murmur3_x64_128 to hash values and a consistent hash ring * to pick what partition to route a particular value. */ public class ElasticHashinator extends TheHashinator { public static int DEFAULT_TOKENS_PER_PARTITION = Integer.parseInt(System.getProperty("ELASTIC_TOKENS_PER_PARTITION", "256")); /** * Tokens on the ring. A value hashes to a token if the token is the first value <= * the value's hash */ private final ImmutableSortedMap<Long, Integer> tokens; private final byte m_configBytes[]; /** * Initialize the hashinator from a binary description of the ring. * The serialization format is big-endian and the first value is the number of tokens * followed by the token values where each token value consists of the 8-byte position on the ring * and and the 4-byte partition id. All values are signed. */ public ElasticHashinator(byte configureBytes[]) { m_configBytes = Arrays.copyOf(configureBytes, configureBytes.length); ByteBuffer buf = ByteBuffer.wrap(configureBytes); int numEntries = buf.getInt(); TreeMap<Long, Integer> buildMap = new TreeMap<Long, Integer>(); for (int ii = 0; ii < numEntries; ii++) { final long token = buf.getLong(); final int partitionId = buf.getInt(); if (buildMap.containsKey(token)) { throw new RuntimeException( "Duplicate token " + token + " partition " + partitionId + " and " + buildMap.get(token)); } buildMap.put( token, partitionId); } ImmutableSortedMap.Builder<Long, Integer> builder = ImmutableSortedMap.naturalOrder(); for (Map.Entry<Long, Integer> e : buildMap.entrySet()) { builder.put(e.getKey(), e.getValue()); } tokens = builder.build(); } /** * Private constructor to initialize a hashinator with known tokens. Used for adding/removing * partitions from existing hashinator. * @param tokens */ private ElasticHashinator(Map<Long, Integer> tokens) { this.tokens = ImmutableSortedMap.copyOf(tokens); m_configBytes = toBytes(); } /** * Given an existing elastic hashinator, add a set of new partitions to the existing hash ring. * @param oldHashinator An elastic hashinator * @param newPartitions A set of new partitions to add * @param tokensPerPartition The number of times a partition appears on the ring * @return The config bytes of the new hash ring */ public static byte[] addPartitions(TheHashinator oldHashinator, Collection<Integer> newPartitions, int tokensPerPartition) { Preconditions.checkArgument(oldHashinator instanceof ElasticHashinator); ElasticHashinator oldElasticHashinator = (ElasticHashinator) oldHashinator; Random r = new Random(0); Map<Long, Integer> newConfig = new HashMap<Long, Integer>(oldElasticHashinator.tokens); Set<Integer> existingPartitions = new HashSet<Integer>(oldElasticHashinator.tokens.values()); Set<Long> checkSet = new HashSet<Long>(oldElasticHashinator.tokens.keySet()); for (int pid : newPartitions) { if (existingPartitions.contains(pid)) { throw new RuntimeException("Partition " + pid + " already exists in the " + "hashinator"); } for (int i = 0; i < tokensPerPartition; i++) { while (true) { long candidateToken = MurmurHash3.hash3_x64_128(r.nextLong()); if (!checkSet.add(candidateToken)) { continue; } newConfig.put(candidateToken, pid); break; } } } return new ElasticHashinator(newConfig).toBytes(); } /** * Given an existing elastic hashinator, add a set of new partitions to the existing hash ring * with calculated ranges. * @param oldHashinator An elastic hashinator * @param partitionsAndRanges A set of new partitions and their associated ranges * @return The config bytes of the new hash ring */ public static byte[] addPartitions(TheHashinator oldHashinator, Map<Long, Integer> tokensToPartitions) { Preconditions.checkArgument(oldHashinator instanceof ElasticHashinator); ElasticHashinator oldElasticHashinator = (ElasticHashinator) oldHashinator; Map<Long, Integer> newConfig = new HashMap<Long, Integer>(oldElasticHashinator.tokens); Set<Integer> existingPartitions = new HashSet<Integer>(oldElasticHashinator.tokens.values()); for (Map.Entry<Long, Integer> entry : tokensToPartitions.entrySet()) { long token = entry.getKey(); int pid = entry.getValue(); if (existingPartitions.contains(pid)) { throw new RuntimeException("Partition " + pid + " already exists in the " + "hashinator"); } Integer oldPartition = newConfig.put(token, pid); if (oldPartition != null) { throw new RuntimeException("Token " + token + " used to map to partition " + oldPartition + " but now maps to " + pid); } } return new ElasticHashinator(newConfig).toBytes(); } /** * Convenience method for generating a deterministic token distribution for the ring based * on a given partition count and tokens per partition. Each partition will have N tokens * placed randomly on the ring. */ public static byte[] getConfigureBytes(int partitionCount, int tokensPerPartition) { Preconditions.checkArgument(partitionCount > 0); Preconditions.checkArgument(tokensPerPartition > 0); ElasticHashinator emptyHashinator = new ElasticHashinator(new HashMap<Long, Integer>()); Set<Integer> partitions = new HashSet<Integer>(); for (int ii = 0; ii < partitionCount; ii++) { partitions.add(ii); } return addPartitions(emptyHashinator, partitions, tokensPerPartition); } /** * Serializes the configuration into bytes, also updates the currently cached m_configBytes. * @return The byte[] of the current configuration. */ private byte[] toBytes() { ByteBuffer buf = ByteBuffer.allocate(4 + (tokens.size() * 12));//long and an int per buf.putInt(tokens.size()); for (Map.Entry<Long, Integer> e : tokens.entrySet()) { long token = e.getKey(); int pid = e.getValue(); buf.putLong(token); buf.putInt(pid); } return buf.array(); } /** * For a given a value hash, find the token that corresponds to it. This will * be the first token <= the value hash, or if the value hash is < the first token in the ring, * it wraps around to the last token in the ring closest to Long.MAX_VALUE */ int partitionForToken(long hash) { Map.Entry<Long, Integer> entry = tokens.floorEntry(hash); //System.out.println("Finding partition for token " + token); /* * Because the tokens are randomly distributed it is likely there is a range * near Long.MIN_VALUE that isn't covered by a token. Conceptually this is a ring * so the correct token is the one near Long.MAX_VALUE. */ if (entry != null) { //System.out.println("Floor token was " + entry.getKey()); return entry.getValue(); } else { //System.out.println("Last entry token " + tokens.lastEntry().getKey()); return tokens.lastEntry().getValue(); } } @Override protected int pHashinateLong(long value) { if (value == Long.MIN_VALUE) return 0; return partitionForToken(MurmurHash3.hash3_x64_128(value)); } @Override protected int pHashinateBytes(byte[] bytes) { ByteBuffer buf = ByteBuffer.wrap(bytes); final long token = MurmurHash3.hash3_x64_128(buf, 0, bytes.length, 0); return partitionForToken(token); } @Override protected Pair<HashinatorType, byte[]> pGetCurrentConfig() { return Pair.of(HashinatorType.ELASTIC, m_configBytes); } /** * Find the predecessors of the given partition on the ring. This method runs in linear time, * use with caution when the set of partitions is large. * @param partition * @return The map of tokens to partitions that are the predecessors of the given partition. * If the given partition doesn't exist or it's the only partition on the ring, the * map will be empty. */ @Override protected Map<Long, Integer> pPredecessors(int partition) { Map<Long, Integer> predecessors = new TreeMap<Long, Integer>(); UnmodifiableIterator<Map.Entry<Long,Integer>> iter = tokens.entrySet().iterator(); Set<Long> pTokens = new HashSet<Long>(); while (iter.hasNext()) { Map.Entry<Long, Integer> next = iter.next(); if (next.getValue() == partition) { pTokens.add(next.getKey()); } } for (Long token : pTokens) { Map.Entry<Long, Integer> predecessor = null; if (token != null) { predecessor = tokens.headMap(token).lastEntry(); // If null, it means partition is the first one on the ring, so predecessor // should be the last entry on the ring because it wraps around. if (predecessor == null) { predecessor = tokens.lastEntry(); } } if (predecessor != null && predecessor.getValue() != partition) { predecessors.put(predecessor.getKey(), predecessor.getValue()); } } return predecessors; } /** * Find the predecessor of the given token on the ring. * @param partition The partition that maps to the given token * @param token The token on the ring * @return The predecessor of the given token. */ @Override protected Pair<Long, Integer> pPredecessor(int partition, long token) { Integer partForToken = tokens.get(token); if (partForToken != null && partForToken == partition) { Map.Entry<Long, Integer> predecessor = tokens.headMap(token).lastEntry(); if (predecessor == null) { predecessor = tokens.lastEntry(); } if (predecessor.getKey() != token) { return Pair.of(predecessor.getKey(), predecessor.getValue()); } else { // given token is the only one on the ring, umpossible throw new RuntimeException("There is only one token on the hash ring"); } } else { // given token doesn't map to partition throw new IllegalArgumentException("The given token " + token + " does not map to partition " + partition); } } /** * This runs in linear time with respect to the number of tokens on the ring. */ @Override protected Map<Long, Long> pGetRanges(int partition) { Map<Long, Long> ranges = new TreeMap<Long, Long>(); Long first = null; // start of the very first token on the ring Long start = null; // start of a range UnmodifiableIterator<Map.Entry<Long,Integer>> iter = tokens.entrySet().iterator(); // Iterate through the token map to find the ranges assigned to // the given partition while (iter.hasNext()) { Map.Entry<Long, Integer> next = iter.next(); long token = next.getKey(); int pid = next.getValue(); if (first == null) { first = token; } // if start is not null, there's an open range, now is // the time to close it. // else there is no open range, keep on going. if (start != null) { ranges.put(start, token); start = null; } if (pid == partition) { // if start is null, there's no open range, start one. start = token; } } // if there is an open range when we get here, it means that // the last token on the ring belongs to the partition, and // it wraps around the origin of the ring, so close the range // with the the very first token on the ring. if (start != null) { assert first != null; ranges.put(start, first); } return ranges; } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append(" Token ").append(" Partition\n"); for (Map.Entry<Long, Integer> entry : tokens.entrySet()) { sb.append(String.format("[%20d => %8d]\n", entry.getKey(), entry.getValue())); } return sb.toString(); } }
package info.ata4.unity.cli.utils; import info.ata4.unity.asset.AssetFile; import info.ata4.unity.asset.struct.AssetFieldType; import info.ata4.unity.asset.struct.AssetObjectPath; import info.ata4.unity.asset.struct.AssetTypeTree; import info.ata4.unity.cli.classfilter.ClassFilter; import info.ata4.unity.serdes.Deserializer; import info.ata4.unity.serdes.UnityBuffer; import info.ata4.unity.serdes.UnityField; import info.ata4.unity.serdes.UnityList; import info.ata4.unity.serdes.UnityObject; import info.ata4.unity.util.ClassID; import java.io.PrintStream; import java.nio.ByteBuffer; import java.util.List; import java.util.Set; import java.util.logging.Level; import java.util.logging.Logger; import javax.xml.bind.DatatypeConverter; /** * * @author Nico Bergemann <barracuda415 at yahoo.de> */ public class AssetDumper { private static final Logger L = Logger.getLogger(AssetDumper.class.getName()); private static final String INDENT_STRING = " "; private PrintStream ps; private ClassFilter cf; private int indentLevel; public AssetDumper(PrintStream ps) { this.ps = ps; } public PrintStream getPrintStream() { return ps; } public void setPrintStream(PrintStream ps) { this.ps = ps; } public ClassFilter getClassFilter() { return cf; } public void setClassFilter(ClassFilter cf) { this.cf = cf; } public void printData(AssetFile asset) { Deserializer deser = new Deserializer(asset); for (AssetObjectPath path : asset.getObjectPaths()) { try { if (path.classID1 < 0) { continue; } // skip filtered classes if (cf != null && !cf.accept(path)) { continue; } printObject(deser.deserialize(path)); } catch (Exception ex) { L.log(Level.SEVERE, "Deserialization failed for " + path.pathID + " (" + ClassID.getNameForID(path.classID2) + ")", ex); break; } } } public void printStruct(AssetFile asset) { AssetTypeTree typeTree = asset.getTypeTree(); if (typeTree.isStandalone()) { L.info("No type tree available"); return; } Set<Integer> classIDs = asset.getClassIDs(); for (Integer classID : classIDs) { AssetFieldType classField = typeTree.get(classID); // skip filtered classes if (cf != null && !cf.accept(classID)) { continue; } if (classField == null) { continue; } printType(classField); } } public void printObject(UnityObject obj) { ps.print(obj.getType()); if (!obj.getName().equals("Base")) { ps.print(" "); ps.println(obj.getName()); } else { ps.println(); } indentLevel++; for (UnityField field : obj.getFields()) { printIndent(); Object value = field.getValue(); if (value instanceof UnityObject) { printObject((UnityObject) value); } else { printField(field); } } indentLevel } public void printField(UnityField field) { String name = field.getName(); String type = field.getType(); Object value = field.getValue(); if (name.contains(" ")) { ps.printf("%s \"%s\" = ", type, name, value); } else { ps.printf("%s %s = ", type, name, value); } indentLevel++; printValue(value); indentLevel } private void printValue(Object value) { if (value instanceof UnityObject) { printObject((UnityObject) value); } else if (value instanceof UnityList) { UnityList array = (UnityList) value; List<Object> list = array.getList(); ps.printf("%s[%d]\n", array.getType(), list.size()); for (Object value2 : list) { printIndent(); printValue(value2); } } else if (value instanceof UnityBuffer) { ByteBuffer bb = ((UnityBuffer) value).getBuffer(); ps.printf("byte[%d]\n", bb.capacity()); printBytes(bb); } else if (value instanceof String) { ps.printf("\"%s\"\n", value); } else { ps.println(value); } } private void printType(AssetFieldType field) { String name = field.name; String type = field.type; printIndent(); ps.print(type); if (!name.equals("Base")) { ps.print(" "); ps.println(name); } else { ps.println(); } indentLevel++; for (AssetFieldType subField : field) { printType(subField); } indentLevel } private void printBytes(ByteBuffer bb) { byte[] block = new byte[256]; ByteBuffer bb2 = bb.duplicate(); bb2.rewind(); while (bb2.hasRemaining()) { int len = Math.min(bb2.remaining(), block.length); bb2.get(block, 0, len); printIndent(); ps.println(DatatypeConverter.printHexBinary(block)); } } private void printIndent() { for (int i = 0; i < indentLevel; i++) { ps.print(INDENT_STRING); } } }
package io.miti.beetle.exporters; import io.miti.beetle.util.FakeNode; import io.miti.beetle.util.FakeSpecParser; import io.miti.beetle.util.FakeType; import io.miti.beetle.util.Logger; import io.miti.beetle.util.NodeInfo; import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.OutputStreamWriter; import java.io.PrintWriter; import java.nio.charset.StandardCharsets; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.sql.Timestamp; import java.sql.Types; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.List; public abstract class DBFileWriter { protected StringBuilder sb = null; protected String filename = null; protected String fileData = null; protected File file = null; private static final SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZ"); // Save info about the result set - column names and types protected List<NodeInfo> nodes = null; protected static final String EOL = "\r\n"; @SuppressWarnings("unused") private DBFileWriter() { super(); } /** * Constructor taking the output filename and result set. * * @param sFilename the output filename * @param pRSMD the database result set metadata (column names, etc.) */ public DBFileWriter(final String sFilename, final String sFileData, final ResultSetMetaData pRSMD) { // Initialize this class init(sFilename); fileData = sFileData; // Save just the info we need - column names and types - into an array initializeNodeList(pRSMD); } /** * Constructor taking the output filename and result set. * * @param sFilename the output filename * @param specData the parsed fake-data spec */ public DBFileWriter(final String sFilename, final String sFileData, final FakeSpecParser specData) { // Initialize this class init(sFilename); fileData = sFileData; // Save just the info we need - column names and types - into an array initializeNodeList(specData); } /** * Simple constructor to handle just having a filename. * * @param sFilename the name of the output file */ public DBFileWriter(final String sFilename) { init(sFilename); fileData = null; nodes = null; } public void init(final String sFilename) { sb = new StringBuilder(100); file = new File(sFilename); filename = sFilename; // Check the output file - if it exists as a file, empty it if (file.exists() && file.isFile()) { try { file.delete(); } catch (Exception ex) { Logger.error("Exception deleting file: " + ex.getMessage()); } } } public void initializeNodeList(final FakeSpecParser specData) { resetNodes(); // Save the column names and classes into NodeList final List<FakeNode> fakeNodes = specData.getNodes(); nodes = new ArrayList<NodeInfo>(); for (FakeNode fake : fakeNodes) { nodes.add(new NodeInfo(fake.getName(), fake.getClazz())); } } public void addString(final String str) { sb.append(str); } public void addEOL() { sb.append(EOL); } private void initializeNodeList(final ResultSetMetaData pRSMD) { // Check if the metadata has results if (pRSMD == null) { return; } resetNodes(); // Get the number of columns int count = 0; try { count = pRSMD.getColumnCount(); } catch (SQLException e) { Logger.error("Error getting metadata column count: " + e.getMessage()); } if (count <= 0) { return; } nodes = new ArrayList<NodeInfo>(count); for (int i = 1; i <= count; ++i) { try { // Save the column name final String name = pRSMD.getColumnName(i); // Derive the correct class final int nClassType = pRSMD.getColumnType(i); final Class<?> clazz = getJavaClassFromSqlType(nClassType); // Save the node data final NodeInfo node = new NodeInfo(name, clazz); nodes.add(node); // Log the info Logger.debug("Node #" + i + ": " + node.toString()); } catch (SQLException e) { Logger.error("Error getting metadata column info: " + e.getMessage()); } } } private void resetNodes() { if (nodes != null) { nodes.clear(); nodes = null; } } public Object getValueFromSpec(final FakeNode fake) { return FakeType.getValue(fake); } public Object getValueFromRow(final ResultSet rs, Class<?> clazz, final int index) { // Check the output class if (clazz == null) { // Unsupported datresetNodesa type return ""; } try { if (clazz.equals(Boolean.class)) { boolean value = rs.getBoolean(index); return (rs.wasNull() ? null : value); } else if (clazz.equals(String.class)) { return rs.getString(index); } else if (clazz.equals(Long.class)) { long value = rs.getLong(index); return (rs.wasNull() ? null : value); } else if (clazz.equals(Double.class)) { double value = rs.getDouble(index); return (rs.wasNull() ? null : value); } else if (clazz.equals(java.util.Date.class)) { final Timestamp ts = rs.getTimestamp(index); final java.util.Date date = (ts == null) ? null : new java.util.Date(ts.getTime()); return date; } else { // Unknown data type return ""; } } catch (SQLException se) { Logger.error(se); Logger.error("* Retrieving column #" + index); } return ""; } private Class<?> getJavaClassFromSqlType(final int nClassType) { switch (nClassType) { case Types.BOOLEAN: case Types.BIT: return Boolean.class; case Types.TIME_WITH_TIMEZONE: case Types.TIMESTAMP_WITH_TIMEZONE: case Types.DATE: case Types.TIME: case Types.TIMESTAMP: return java.util.Date.class; case Types.TINYINT: case Types.SMALLINT: case Types.INTEGER: case Types.BIGINT: return Long.class; case Types.FLOAT: case Types.REAL: case Types.DOUBLE: case Types.NUMERIC: case Types.DECIMAL: return Double.class; case Types.CHAR: case Types.VARCHAR: case Types.LONGVARCHAR: case Types.BINARY: case Types.VARBINARY: case Types.LONGVARBINARY: case Types.BLOB: case Types.CLOB: case Types.NCHAR: case Types.NVARCHAR: case Types.LONGNVARCHAR: case Types.NCLOB: return String.class; default: return null; } } /** * Abstract method for writing the file's header. */ public abstract void writeHeader(); /** * Abstract method for writing the file's footer. */ public abstract void writeFooter(); /** * Abstract method for writing a database result set row. * * @param rsj the result set */ public abstract void writeObject(final ResultSet rsj); public abstract void writeObject(final FakeSpecParser str); /** * Write a string to the file. Default to not forcing a write. */ public final void writeString() { writeString(false); } public static final String toGMTDate(final java.util.Date date) { return sdf.format(date); } /** * If the string buffer is big enough, flush it to disk. */ public final void writeString(final boolean forceWrite) { // If nothing to write, nothing to do if (sb.length() == 0) { return; } // Check the length of the string if (forceWrite || sb.length() > 10000) { // The input buffer is long enough that we need to // write it to file and then clear out the buffer PrintWriter filePw = null; try { // Open the file for appending filePw = new PrintWriter(new OutputStreamWriter(new FileOutputStream( filename, true), StandardCharsets.UTF_8)); filePw.print(sb.toString()); filePw.close(); filePw = null; } catch (FileNotFoundException e) { } finally { if (filePw != null) { // Close the writer filePw.close(); } } sb.setLength(0); } } }
package it.cnr.droidpark; import java.util.Calendar; import java.util.HashSet; import java.util.Hashtable; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import android.app.Application; import android.util.Log; public class ApplicationDroidPark extends Application { // TODO: persistence of the data missing private static final String TAG = "ApplicationDroidPark"; public final Integer NUMBER_OF_GAMES = 4; public final Integer NUMBER_OF_COPIES = 50; private Set<ApplicationMsg> jobs; private Map<Integer, QueueMsg> queueList; // < IDgioco, QueueMsg > private Map<Integer, Map<Integer, RatingMsg>> ratingList; // < IDgioco, <IDutente,RatingMsg> > private Map<Integer, Map<Integer, Opinion>> opinionList; // < IDgioco, <IDutente,Opinion> > @Override public void onCreate() { super.onCreate(); jobs = new HashSet<ApplicationMsg>(); queueList = new Hashtable<Integer, QueueMsg>(NUMBER_OF_GAMES); ratingList = new Hashtable<Integer, Map<Integer, RatingMsg>>(NUMBER_OF_GAMES); opinionList = new Hashtable<Integer, Map<Integer, Opinion>>(NUMBER_OF_GAMES); Map<Integer, Opinion> demoMap = new Hashtable<Integer, Opinion>(); demoMap.put(466546, new Opinion(1, 466546, Calendar.getInstance().getTime(), "Questa è un'opinione")); demoMap.put(87678, new Opinion(1, 87678, Calendar.getInstance().getTime(), "Questa è un'altra opinione")); opinionList.put(1, demoMap); } public float getRatingAverage(Integer gameID) { float average = 0f; int numGameRatings = 0; Set<Entry<Integer, RatingMsg>> currentGameRatings = ratingList.get(gameID).entrySet(); for(Entry<Integer, RatingMsg> entry : currentGameRatings) { average += entry.getValue().getEval(); numGameRatings++; } return average / numGameRatings; } public Map<Integer, RatingMsg> getRatingMsg(Integer gameID) { return ratingList.get(gameID); } public QueueMsg getQueueMsg(Integer gameID) { return queueList.get(gameID); } public Opinion getGameOpinion(Integer gameID, Integer userID) { Map<Integer, Opinion> gameOpinions = opinionList.get(gameID); if(gameOpinions != null) return gameOpinions.get(userID); else return null; } public Map<Integer, Opinion> getAllGameOpinions(Integer gameID) { return opinionList.get(gameID); } public Set<ApplicationMsg> getJobs(){ return new HashSet<ApplicationMsg>(jobs); } /** * Change the numCopies of an ApplicationMsg. Use this function to update * correctly the job list, don't do it by yourself! If you want to insert a * new msg that you have produced or received, use <code>insertRating</code> * or <code>insertQueue</code>. This function is used only to manage * previous messages, got from <code>getJobs</code> * * @param msg * @param newNumCopies */ public void updateNumCopies(ApplicationMsg msg, int newNumCopies) { msg.setNumCopies(newNumCopies); if(newNumCopies <= 0) { // FIXME: remove debug check boolean debug = jobs.remove(msg); Log.d(TAG, "Removed msg"); if(!debug) Log.d(TAG, "Tried to remove a msg not in the job list! It's probably an error"); } } /** * Insert the opinion. If there is an opinion of the same user about the * same game, keep the most recent one only * * @param gameID * @param userID * @param opinion * @return true if the opinion was inserted/updated (because more recent or new), * false otherwise */ public boolean insertUpdateOpinion(Integer gameID, Integer userID, Opinion opinion) { Map<Integer, Opinion> gameOpinions = opinionList.get(gameID); if(gameOpinions != null) { Opinion currentUserOpinion = gameOpinions.get(userID); if(currentUserOpinion != null) { if(currentUserOpinion.getTimestamp().compareTo(opinion.getTimestamp()) > 0) // The local opinion is newer than the "new" one. Don't do anything return false; } } Log.d(TAG, "inserted/updated opinion"); // Insert new opinion Map<Integer, Opinion> userOpinion = new Hashtable<Integer, Opinion>(); userOpinion.put(userID, opinion); opinionList.put(gameID, userOpinion); return true; } /** * Insert the rating. If there is a rating of the same user about the same * game, keep the most recent one only. If the user, game and timestamp are * the same, sums the copies * * @param gameID * @param userID * @param rating * @return true if the rating was inserted/updated (because number of copies are not * 0, or because it is more recent or new), false otherwise */ public boolean insertRating(Integer gameID, Integer userID, RatingMsg rating) { Map<Integer, RatingMsg> gameRatings = ratingList.get(gameID); RatingMsg currentUserRating = null; if(gameRatings != null) { currentUserRating = gameRatings.get(userID); if(currentUserRating != null) { int compare = currentUserRating.getTimestamp().compareTo(rating.getTimestamp()); if(compare > 0) { // The local rating is newer than the "new" one. Don't do anything Log.d(TAG, "\"new\" rating is older than present"); return false; } if(compare == 0) { // The local rating is the same of the "new" one. Sum the copies. int newNumCopies = currentUserRating.getNumCopies() + rating.getNumCopies(); Log.d(TAG, "added copies in rating"); if(newNumCopies > 0) // if already present in the job list, it doesn't matter jobs.add(currentUserRating); updateNumCopies(currentUserRating, newNumCopies); return true; } } } Log.d(TAG, "inserted/updated rating"); // Decrement numCopies by 1 int ratingNumCopies = rating.getNumCopies(); if(ratingNumCopies > 0) rating.setNumCopies(ratingNumCopies-1); // Insert new rating Map<Integer, RatingMsg> userRating = ratingList.get(gameID); if(userRating == null) userRating = new Hashtable<Integer, RatingMsg>(); userRating.put(userID, rating); ratingList.put(gameID, userRating); // New/more recent msg -> remove from the job list the older one (if present) and insert this new one. // "older" means same gameID and userID but older timestamp if((ratingNumCopies = rating.getNumCopies()) > 0) { // If there is an older rating, remove it from the job list if(currentUserRating != null) jobs.remove(currentUserRating); jobs.add(rating); } return true; } /** * Insert the queue. If there is a queue of the same game, keep the most * recent one only. If the game and timestamp are the same, sums the * copies * * @param gameID * @param queue * @return true if the queue was inserted/updated (because number of copies are not * 0, or because it is more recent or new), false otherwise */ public boolean insertQueue(Integer gameID, QueueMsg queue) { QueueMsg currentQueue = queueList.get(gameID); if(currentQueue != null) { int compare = currentQueue.getTimestamp().compareTo(queue.getTimestamp()); if(compare > 0) { // The local queue is newer than the "new" one. Don't do anything Log.d(TAG, "\"new\" queue is older than present"); return false; } if(compare == 0) { // The local queue is the same of the "new" one. Sum the copies. int newNumCopies = currentQueue.getNumCopies() + queue.getNumCopies(); Log.d(TAG, "added copies in queue"); if(newNumCopies > 0) // if already present in the job list, it doesn't matter jobs.add(currentQueue); updateNumCopies(currentQueue, newNumCopies); return true; } } Log.d(TAG, "inserted/updated queue"); // Decrement numCopies by 1 int queueNumCopies = queue.getNumCopies(); if(queueNumCopies > 0) queue.setNumCopies(queueNumCopies-1); // Insert new queue queueList.put(gameID, queue); // New/more recent msg -> remove from the job list the older one (if present) and insert this new one. // "older" means same gameID but older timestamp if((queueNumCopies = queue.getNumCopies()) > 0) { // If there is an older queue, remove it from the job list if(currentQueue != null) jobs.remove(currentQueue); jobs.add(queue); } return true; } }
package com.dstevens.user; import java.math.BigDecimal; import java.time.Instant; import java.time.Year; import java.util.Date; import java.util.function.Supplier; import org.apache.commons.lang3.StringUtils; import org.apache.log4j.Logger; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.security.crypto.password.PasswordEncoder; import org.springframework.stereotype.Service; import com.dstevens.mail.MailMessage; import com.dstevens.mail.MailMessageFactory; import com.dstevens.suppliers.ClockSupplier; import com.dstevens.user.patronage.Patronage; import com.dstevens.user.patronage.PatronagePaymentReceipt; import com.dstevens.user.patronage.PaymentType; import static com.dstevens.collections.Sets.set; @Service public class AccountCreator { private static final Logger logger = Logger.getLogger(AccountCreator.class); private final UserDao userDao; private final MailMessageFactory messageFactory; private final ClockSupplier clockSupplier; private final Supplier<PasswordEncoder> passwordEncoderSupplier; @Autowired public AccountCreator(UserDao userDao, Supplier<PasswordEncoder> passwordEncoderSupplier, MailMessageFactory messageFactory, ClockSupplier clockSupplier) { this.userDao = userDao; this.passwordEncoderSupplier = passwordEncoderSupplier; this.messageFactory = messageFactory; this.clockSupplier = clockSupplier; } public User createUser(String email, String password, String firstName, String lastName, String originalUsername, String paymentReceiptIdentifier) { User user = new User(email, passwordEncoderSupplier.get().encode(password), set(Role.USER)).withFirstName(firstName).withLastName(lastName); Instant now = clockSupplier.get().instant(); if(!StringUtils.isBlank(originalUsername)) { Patronage patronage = new Patronage(Year.now(clockSupplier.get()).getValue(), Date.from(now), null); patronage = patronage.withOriginalUsername(originalUsername.trim()); if(!StringUtils.isBlank(paymentReceiptIdentifier)) { patronage = patronage.withPayment(new PatronagePaymentReceipt(PaymentType.PAYPAL, new BigDecimal("20.00"), paymentReceiptIdentifier, Date.from(now))); } user = user.withPatronage(patronage); } User newUser = userDao.save(user); sendConfirmatoryEmailTo(newUser); sendAdminEmailFor(newUser); return newUser; } private void sendAdminEmailFor(User user) { StringBuilder body = new StringBuilder(); body.append(user.getEmail() + " has just created an account on the database."); if(user.getPatronage() != null) { body.append("\nTheir original username in the old database is " + user.getPatronage().getOriginalUsername()); if(!user.getPatronage().getPayments().isEmpty()) { body.append("\nTheir paypal receipt id for paying for patronage is " + user.getPatronage().getPayments().get(0).getPaymentReceiptIdentifier()); } } send(messageFactory.message(). from("services@undergroundtheater.org", "UT Database"). to("services@undergroundtheater.org"). subject("A new Underground Theater User Account has been created: " + user.getEmail()). body(body.toString())); } private void sendConfirmatoryEmailTo(User user) { send(messageFactory.message(). from("services@undergroundtheater.org", "UT Database"). to(user.getEmail()). subject("Your Underground Theater User Account has been created"). body("Thank you for creating an account with Underground Theater's character database, The Green Room!")); } private void send(MailMessage mailMessage) { try { mailMessage.send(); } catch(Exception e) { logger.error("Failed to send " + mailMessage, e); } } }
// This library is free software; you can redistribute it and/or // modify it under the terms of the GNU Lesser General Public // This library is distributed in the hope that it will be useful, // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // You should have received a copy of the GNU Lesser General Public // Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. package opennlp.tools.postag; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStreamReader; import java.io.Reader; import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; import java.util.List; import java.util.StringTokenizer; import opennlp.maxent.DataStream; import opennlp.maxent.Evalable; import opennlp.maxent.EventCollector; import opennlp.maxent.EventStream; import opennlp.maxent.GISModel; import opennlp.maxent.MaxentModel; import opennlp.maxent.PlainTextByLineDataStream; import opennlp.maxent.TwoPassDataIndexer; import opennlp.maxent.io.SuffixSensitiveGISModelWriter; import opennlp.tools.dictionary.Dictionary; import opennlp.tools.ngram.NGramModel; import opennlp.tools.ngram.Token; import opennlp.tools.ngram.TokenList; import opennlp.tools.util.BeamSearch; import opennlp.tools.util.InvalidFormatException; import opennlp.tools.util.Pair; import opennlp.tools.util.Sequence; public class POSTaggerME implements Evalable, POSTagger { /** * The maximum entropy model to use to evaluate contexts. */ protected MaxentModel posModel; /** * The feature context generator. */ protected POSContextGenerator contextGen; /** * Tag dictionary used for restricting words to a fixed set of tags. */ protected TagDictionary tagDictionary; protected Dictionary ngramDictionary; /** * Says whether a filter should be used to check whether a tag assignment * is to a word outside of a closed class. */ protected boolean useClosedClassTagsFilter = false; private static final int DEFAULT_BEAM_SIZE =3; /** The size of the beam to be used in determining the best sequence of pos tags.*/ protected int size; private Sequence bestSequence; /** The search object used for search multiple sequences of tags. */ protected BeamSearch beam; /** * Creates a new tagger with the specified model and tag dictionary. * @param model The model used for tagging. * @param tagdict The tag dictionary used for specifing a set of valid tags. */ public POSTaggerME(MaxentModel model, TagDictionary tagdict) { this(model, new DefaultPOSContextGenerator(null),tagdict); } /** * Creates a new tagger with the specified model and n-gram dictionary. * @param model The model used for tagging. * @param dict The n-gram dictionary used for feature generation. */ public POSTaggerME(MaxentModel model, Dictionary dict) { this(model, new DefaultPOSContextGenerator(dict)); } /** * Creates a new tagger with the specified model, n-gram dictionary, and tag dictionary. * @param model The model used for tagging. * @param dict The n-gram dictionary used for feature generation. * @param tagdict The dictionary which specifies the valid set of tags for some words. */ public POSTaggerME(MaxentModel model, Dictionary dict, TagDictionary tagdict) { this(DEFAULT_BEAM_SIZE,model, new DefaultPOSContextGenerator(dict),tagdict); } /** * Creates a new tagger with the specified model and context generator. * @param model The model used for tagging. * @param cg The context generator used for feature creation. */ public POSTaggerME(MaxentModel model, POSContextGenerator cg) { this(DEFAULT_BEAM_SIZE, model, cg, null); } /** * Creates a new tagger with the specified model, context generator, and tag dictionary. * @param model The model used for tagging. * @param cg The context generator used for feature creation. * @param tagdict The dictionary which specifies the valid set of tags for some words. */ public POSTaggerME(MaxentModel model, POSContextGenerator cg, TagDictionary tagdict) { this(DEFAULT_BEAM_SIZE, model, cg, tagdict); } /** * Creates a new tagger with the specified beam size, model, context generator, and tag dictionary. * @param beamSize The number of alturnate tagging considered when tagging. * @param model The model used for tagging. * @param cg The context generator used for feature creation. * @param tagdict The dictionary which specifies the valid set of tags for some words. */ public POSTaggerME(int beamSize, MaxentModel model, POSContextGenerator cg, TagDictionary tagdict) { size = beamSize; posModel = model; contextGen = cg; beam = new PosBeamSearch(size, cg, model); tagDictionary = tagdict; } public String getNegativeOutcome() { return ""; } /** * Returns the number of different tags predicted by this model. * @return the number of different tags predicted by this model. */ public int getNumTags() { return posModel.getNumOutcomes(); } public EventCollector getEventCollector(Reader r) { return new POSEventCollector(r, contextGen); } public List<String> tag(List<String> sentence) { bestSequence = beam.bestSequence(sentence,null); return bestSequence.getOutcomes(); } public String[] tag(String[] sentence) { bestSequence = beam.bestSequence(sentence,null); List<String> t = bestSequence.getOutcomes(); return t.toArray(new String[t.size()]); } /** * Returns at most the specified number of taggings for the specified sentence. * @param numTaggings The number of tagging to be returned. * @param sentence An array of tokens which make up a sentence. * @return At most the specified number of taggings for the specified sentence. */ public String[][] tag(int numTaggings, String[] sentence) { Sequence[] bestSequences = beam.bestSequences(numTaggings, sentence,null); String[][] tags = new String[bestSequences.length][]; for (int si=0;si<tags.length;si++) { List<String> t = bestSequences[si].getOutcomes(); tags[si] = t.toArray(new String[t.size()]); } return tags; } /** * Populates the specified array with the probabilities for each tag of the last tagged sentence. * @param probs An array to put the probabilities into. */ public void probs(double[] probs) { bestSequence.getProbs(probs); } /** * Returns an array with the probabilities for each tag of the last tagged sentence. * @return an array with the probabilities for each tag of the last tagged sentence. */ public double[] probs() { return bestSequence.getProbs(); } public String tag(String sentence) { List<String> toks = new ArrayList<String>(); StringTokenizer st = new StringTokenizer(sentence); while (st.hasMoreTokens()) toks.add(st.nextToken()); List<String> tags = tag(toks); StringBuffer sb = new StringBuffer(); for (int i = 0; i < tags.size(); i++) sb.append(toks.get(i) + "/" + tags.get(i) + " "); return sb.toString().trim(); } public void localEval(MaxentModel posModel, Reader r, Evalable e, boolean verbose) { this.posModel = posModel; float total = 0, correct = 0, sentences = 0, sentsCorrect = 0; BufferedReader br = new BufferedReader(r); String line; try { while ((line = br.readLine()) != null) { sentences++; Pair p = POSEventCollector.convertAnnotatedString(line); List<String> words = (List<String>) p.a; List<String> outcomes = (List<String>) p.b; List<String> tags = beam.bestSequence(words, null).getOutcomes(); int c = 0; boolean sentOk = true; for (Iterator<String> t = tags.iterator(); t.hasNext(); c++) { total++; String tag = (String) t.next(); if (tag.equals(outcomes.get(c))) correct++; else sentOk = false; } if (sentOk) sentsCorrect++; } } catch (IOException E) { E.printStackTrace(); } System.out.println("Accuracy : " + correct / total); System.out.println("Sentence Accuracy: " + sentsCorrect / sentences); } private class PosBeamSearch extends BeamSearch { PosBeamSearch(int size, POSContextGenerator cg, MaxentModel model) { super(size, cg, model); } PosBeamSearch(int size, POSContextGenerator cg, MaxentModel model, int cacheSize) { super(size, cg, model, cacheSize); } protected boolean validSequence(int i, Object[] inputSequence, String[] outcomesSequence, String outcome) { if (tagDictionary == null) { return true; } else { String[] tags = tagDictionary.getTags(inputSequence[i].toString()); if (tags == null) { return true; } else { return Arrays.asList(tags).contains(outcome); } } } } public String[] getOrderedTags(List<String> words, List<String> tags, int index) { return getOrderedTags(words,tags,index,null); } public String[] getOrderedTags(List<String> words, List<String> tags, int index,double[] tprobs) { double[] probs = posModel.eval(contextGen.getContext(index,words.toArray(), tags.toArray(new String[tags.size()]),null)); String[] orderedTags = new String[probs.length]; for (int i = 0; i < probs.length; i++) { int max = 0; for (int ti = 1; ti < probs.length; ti++) { if (probs[ti] > probs[max]) { max = ti; } } orderedTags[i] = posModel.getOutcome(max); if (tprobs != null){ tprobs[i]=probs[max]; } probs[max] = 0; } return orderedTags; } /** * Trains a new model. * * @param evc * @param modelFile * @throws IOException */ public static void train(EventStream evc, File modelFile) throws IOException { GISModel model = train(evc, 100,5); new SuffixSensitiveGISModelWriter(model, modelFile).persist(); } /** * Trains a new model * * @param es * @param iterations * @param cut * @return the new model * @throws IOException */ public static GISModel train(EventStream es, int iterations, int cut) throws IOException { return opennlp.maxent.GIS.trainModel(iterations, new TwoPassDataIndexer(es, cut)); } private static void usage() { System.err.println("Usage: POSTaggerME [-encoding encoding] [-dict dict_file] training model [cutoff] [iterations]"); System.err.println("This trains a new model on the specified training file and writes the trained model to the model file."); System.err.println("-encoding Specifies the encoding of the training file"); System.err.println("-dict Specifies that a dictionary file should be created for use in distinguising between rare and non-rare words"); System.exit(1); } /** * <p>Trains a new pos model.</p> * * <p>Usage: java opennlp.postag.POStaggerME [-encoding charset] [-d dict_file] data_file new_model_name (iterations cutoff)?</p> * @param args * @throws IOException * @throws InvalidFormatException * */ public static void main(String[] args) throws IOException, InvalidFormatException { if (args.length == 0){ usage(); } int ai=0; try { String encoding = null; String dict = null; while (args[ai].startsWith("-")) { if (args[ai].equals("-encoding")) { ai++; if (ai < args.length) { encoding = args[ai++]; } else { usage(); } } else if (args[ai].equals("-dict")) { ai++; if (ai < args.length) { dict = args[ai++]; } else { usage(); } } else { System.err.println("Unknown option "+args[ai]); usage(); } } File inFile = new File(args[ai++]); File outFile = new File(args[ai++]); int cutoff = 5; int iterations = 100; if (args.length > ai) { cutoff = Integer.parseInt(args[ai++]); iterations = Integer.parseInt(args[ai++]); } GISModel mod; if (dict != null) { System.err.println("Building dictionary"); NGramModel ngramModel = new NGramModel(); DataStream data = new opennlp.maxent.PlainTextByLineDataStream(new java.io.FileReader(inFile)); while(data.hasNext()) { String tagStr = (String) data.nextToken(); String[] tt = tagStr.split(" "); Token[] words = new Token[tt.length]; for (int wi=0;wi<words.length;wi++) { words[wi] = Token.create(tt[wi].substring(0,tt[wi].lastIndexOf('_'))); } ngramModel.add(new TokenList(words), 1, 1); } System.out.println("Saving the dictionary"); ngramModel.cutoff(cutoff, Integer.MAX_VALUE); Dictionary dictionary = ngramModel.toDictionary(true); dictionary.serialize(new FileOutputStream(dict)); } EventStream es; if (encoding == null) { if (dict == null) { es = new POSEventStream(new PlainTextByLineDataStream( new InputStreamReader(new FileInputStream(inFile)))); } else { es = new POSEventStream(new PlainTextByLineDataStream( new InputStreamReader(new FileInputStream(inFile))), new Dictionary(new FileInputStream(dict))); } } else { if (dict == null) { es = new POSEventStream(new PlainTextByLineDataStream( new InputStreamReader(new FileInputStream(inFile),encoding))); } else { es = new POSEventStream(new PlainTextByLineDataStream( new InputStreamReader(new FileInputStream(inFile),encoding)), new Dictionary(new FileInputStream(dict))); } } mod = train(es, iterations, cutoff); System.out.println("Saving the model as: " + outFile); new SuffixSensitiveGISModelWriter(mod, outFile).persist(); } catch (Exception e) { e.printStackTrace(); } } }
package org.apache.james.util; import java.net.InetAddress; import java.util.Collection; import java.util.ArrayList; import java.util.Iterator; public class NetMatcher { private ArrayList networks; public void initInetNetworks(final Collection nets) { networks = new ArrayList(); for (Iterator iter = nets.iterator(); iter.hasNext(); ) try { InetNetwork net = InetNetwork.getFromString((String) iter.next()); if (!networks.contains(net)) networks.add(net); } catch (java.net.UnknownHostException uhe) { log("Cannot resolve address: " + uhe.getMessage()); } networks.trimToSize(); } public void initInetNetworks(final String[] nets) { networks = new ArrayList(); for (int i = 0; i < nets.length; i++) try { InetNetwork net = InetNetwork.getFromString(nets[i]); if (!networks.contains(net)) networks.add(net); } catch (java.net.UnknownHostException uhe) { log("Cannot resolve address: " + uhe.getMessage()); } networks.trimToSize(); } public boolean matchInetNetwork(final String hostIP) { InetAddress ip = null; try { ip = InetAddress.getByName(hostIP); } catch (java.net.UnknownHostException uhe) { log("Cannot resolve address: " + uhe.getMessage()); } boolean sameNet = false; for (Iterator iter = networks.iterator(); (!sameNet) && iter.hasNext(); ) { InetNetwork network = (InetNetwork) iter.next(); sameNet = network.contains(ip); } return sameNet; } public boolean matchInetNetwork(final InetAddress ip) { boolean sameNet = false; for (Iterator iter = networks.iterator(); (!sameNet) && iter.hasNext(); ) { InetNetwork network = (InetNetwork) iter.next(); sameNet = network.contains(ip); } return sameNet; } public NetMatcher() { } public NetMatcher(final String[] nets) { initInetNetworks(nets); } public NetMatcher(final Collection nets) { initInetNetworks(nets); } public String toString() { return networks.toString(); } protected void log(String s) { } } class InetNetwork { /* * Implements network masking, and is compatible with RFC 1518 and * RFC 1519, which describe CIDR: Classless Inter-Domain Routing. */ private InetAddress network; private InetAddress netmask; public InetNetwork(InetAddress ip, InetAddress netmask) { network = maskIP(ip, netmask); this.netmask = netmask; } public boolean contains(final String name) throws java.net.UnknownHostException { return network.equals(maskIP(InetAddress.getByName(name), netmask)); } public boolean contains(final InetAddress ip) { return network.equals(maskIP(ip, netmask)); } public String toString() { return network.getHostAddress() + "/" + netmask.getHostAddress(); } public int hashCode() { return maskIP(network, netmask).hashCode(); } public boolean equals(Object obj) { return (obj != null) && (obj instanceof InetNetwork) && ((((InetNetwork)obj).network.equals(network)) && (((InetNetwork)obj).netmask.equals(netmask))); } public static InetNetwork getFromString(String netspec) throws java.net.UnknownHostException { if (netspec.endsWith("*")) netspec = normalizeFromAsterisk(netspec); else { int iSlash = netspec.indexOf('/'); if (iSlash == -1) netspec += "/255.255.255.255"; else if (netspec.indexOf('.', iSlash) == -1) netspec = normalizeFromCIDR(netspec); } return new InetNetwork(InetAddress.getByName(netspec.substring(0, netspec.indexOf('/'))), InetAddress.getByName(netspec.substring(netspec.indexOf('/') + 1))); } public static InetAddress maskIP(final byte[] ip, final byte[] mask) { try { return getByAddress(new byte[] { (byte) (mask[0] & ip[0]), (byte) (mask[1] & ip[1]), (byte) (mask[2] & ip[2]), (byte) (mask[3] & ip[3]) }); } catch(Exception _) {} { return null; } } public static InetAddress maskIP(final InetAddress ip, final InetAddress mask) { return maskIP(ip.getAddress(), mask.getAddress()); } /* * This converts from an uncommon "wildcard" CIDR format * to "address + mask" format: * * * => 000.000.000.0/000.000.000.0 * xxx.* => xxx.000.000.0/255.000.000.0 * xxx.xxx.* => xxx.xxx.000.0/255.255.000.0 * xxx.xxx.xxx.* => xxx.xxx.xxx.0/255.255.255.0 */ static private String normalizeFromAsterisk(final String netspec) { String[] masks = { "0.0.0.0/0.0.0.0", "0.0.0/255.0.0.0", "0.0/255.255.0.0", "0/255.255.255.0" }; char[] srcb = netspec.toCharArray(); int octets = 0; for (int i = 1; i < netspec.length(); i++) { if (srcb[i] == '.') octets++; } return (octets == 0) ? masks[0] : netspec.substring(0, netspec.length() -1 ).concat(masks[octets]); } /* * RFC 1518, 1519 - Classless Inter-Domain Routing (CIDR) * This converts from "prefix + prefix-length" format to * "address + mask" format, e.g. from xxx.xxx.xxx.xxx/yy * to xxx.xxx.xxx.xxx/yyy.yyy.yyy.yyy. */ static private String normalizeFromCIDR(final String netspec) { final int bits = 32 - Integer.parseInt(netspec.substring(netspec.indexOf('/')+1)); final int mask = (bits == 32) ? 0 : 0xFFFFFFFF - ((1 << bits)-1); return netspec.substring(0, netspec.indexOf('/') + 1) + Integer.toString(mask >> 24 & 0xFF, 10) + "." + Integer.toString(mask >> 16 & 0xFF, 10) + "." + Integer.toString(mask >> 8 & 0xFF, 10) + "." + Integer.toString(mask >> 0 & 0xFF, 10); } private static java.lang.reflect.Method getByAddress = null; static { try { Class inetAddressClass = Class.forName("java.net.InetAddress"); Class[] parameterTypes = { byte[].class }; getByAddress = inetAddressClass.getMethod("getByAddress", parameterTypes); } catch (Exception e) { getByAddress = null; } } private static InetAddress getByAddress(byte[] ip) throws java.net.UnknownHostException { InetAddress addr = null; if (getByAddress != null) try { addr = (InetAddress) getByAddress.invoke(null, new Object[] { ip }); } catch (IllegalAccessException e) { } catch (java.lang.reflect.InvocationTargetException e) { } if (addr == null) { addr = InetAddress.getByName ( Integer.toString(ip[0] & 0xFF, 10) + "." + Integer.toString(ip[1] & 0xFF, 10) + "." + Integer.toString(ip[2] & 0xFF, 10) + "." + Integer.toString(ip[3] & 0xFF, 10) ); } return addr; } }
package org.jdesktop.swingx; import java.awt.Component; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.ItemEvent; import java.awt.event.ItemListener; import java.util.ArrayList; import java.util.List; import javax.swing.AbstractButton; import javax.swing.Box; import javax.swing.BoxLayout; import javax.swing.ButtonGroup; import javax.swing.ButtonModel; import javax.swing.JPanel; import javax.swing.JRadioButton; /** * @author Amy Fowler * @version 1.0 */ public class JXRadioGroup extends JPanel { private static final long serialVersionUID = 3257285842266567986L; private ButtonGroup buttonGroup; private List<Object> values = new ArrayList<Object>(); private ActionSelectionListener actionHandler; private List<ActionListener> actionListeners; private int gapWidth; public JXRadioGroup() { this(0); } public JXRadioGroup(int gapWidth) { setLayout(new BoxLayout(this, BoxLayout.X_AXIS)); buttonGroup = new ButtonGroup(); this.gapWidth = gapWidth; } public JXRadioGroup(Object radioValues[]) { this(); for(int i = 0; i < radioValues.length; i++) { add(radioValues[i]); } } public void setValues(Object[] radioValues) { clearAll(); for(int i = 0; i < radioValues.length; i++) { add(radioValues[i]); } } private void clearAll() { values.clear(); removeAll(); buttonGroup = new ButtonGroup(); } public void add(Object radioValue) { values.add(radioValue); addButton(new JRadioButton(radioValue.toString())); } public void add(AbstractButton button) { values.add(button.getText()); // PENDING: mapping needs cleanup... addButton(button); } private void addButton(AbstractButton button) { buttonGroup.add(button); super.add(button); if (actionHandler == null) { actionHandler = new ActionSelectionListener(); // actionHandler = new ActionListener() { // public void actionPerformed(ActionEvent e) { // fireActionEvent(e); } button.addActionListener(actionHandler); button.addItemListener(actionHandler); } private class ActionSelectionListener implements ActionListener, ItemListener { public void actionPerformed(ActionEvent e) { fireActionEvent(e); } public void itemStateChanged(ItemEvent e) { fireActionEvent(null); } } private void checkGap() { if ((getGapWidth() > 0) && (getComponentCount() > 0)) { add(Box.createHorizontalStrut(getGapWidth())); } } private int getGapWidth() { return gapWidth; } public AbstractButton getSelectedButton() { ButtonModel selectedModel = buttonGroup.getSelection(); AbstractButton children[] = getButtonComponents(); for(int i = 0; i < children.length; i++) { AbstractButton button = (AbstractButton)children[i]; if (button.getModel() == selectedModel) { return button; } } return null; } private AbstractButton[] getButtonComponents() { Component[] children = getComponents(); List buttons = new ArrayList(); for (int i = 0; i < children.length; i++) { if (children[i] instanceof AbstractButton) { buttons.add(children[i]); } } return (AbstractButton[]) buttons.toArray(new AbstractButton[buttons.size()]); } private int getSelectedIndex() { ButtonModel selectedModel = buttonGroup.getSelection(); Component children[] = getButtonComponents(); for (int i = 0; i < children.length; i++) { AbstractButton button = (AbstractButton) children[i]; if (button.getModel() == selectedModel) { return i; } } return -1; } public Object getSelectedValue() { int index = getSelectedIndex(); return (index < 0 || index >= values.size()) ? null : values.get(index); } public void setSelectedValue(Object value) { int index = values.indexOf(value); AbstractButton button = getButtonComponents()[index]; button.setSelected(true); } public void addActionListener(ActionListener l) { if (actionListeners == null) { actionListeners = new ArrayList<ActionListener>(); } actionListeners.add(l); } public void removeActionListener(ActionListener l) { if (actionListeners != null) { actionListeners.remove(l); } } public ActionListener[] getActionListeners() { if (actionListeners != null) { return (ActionListener[])actionListeners.toArray(new ActionListener[0]); } return new ActionListener[0]; } protected void fireActionEvent(ActionEvent e) { if (actionListeners != null) { for (int i = 0; i < actionListeners.size(); i++) { ActionListener l = (ActionListener) actionListeners.get(i); l.actionPerformed(e); } } } }
package org.jaxen.test; import junit.framework.TestCase; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import java.io.IOException; import java.util.Iterator; import java.util.List; import org.jaxen.JaxenException; import org.jaxen.XPath; import org.jaxen.dom.DOMXPath; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.xml.sax.SAXException; public class DOMXPathTest extends TestCase { private static final String BASIC_XML = "xml/basic.xml"; private Document doc; private DocumentBuilderFactory factory; public DOMXPathTest(String name) { super( name ); } public void setUp() throws ParserConfigurationException { factory = DocumentBuilderFactory.newInstance(); factory.setNamespaceAware(true); doc = factory.newDocumentBuilder().newDocument(); } public void testConstruction() throws JaxenException { DOMXPath xpath = new DOMXPath( "/foo/bar/baz" ); assertEquals("/foo/bar/baz", xpath.toString()); } public void testConstructionWithNamespacePrefix() throws JaxenException { DOMXPath xpath = new DOMXPath( "/p:foo/p:bar/a:baz" ); assertEquals("/p:foo/p:bar/a:baz", xpath.toString()); } public void testNamespaceDeclarationsAreNotAttributes() throws JaxenException { Element root = doc.createElementNS("http: doc.appendChild(root); root.setAttributeNS("http: DOMXPath xpath = new DOMXPath( "count(/*/@*)" ); Number value = xpath.numberValueOf(doc); assertEquals(0, value.intValue()); } // see JAXEN-105 public void testConsistentNamespaceDeclarations() throws JaxenException { Element root = doc.createElement("root"); doc.appendChild(root); Element child = doc.createElementNS("http: root.appendChild(child); // different prefix child.setAttributeNS("http: XPath xpath = new DOMXPath("//namespace::node()"); List result = xpath.selectNodes(doc); assertEquals(4, result.size()); } // see JAXEN-105 public void testInconsistentNamespaceDeclarations() throws JaxenException { Element root = doc.createElement("root"); doc.appendChild(root); Element child = doc.createElementNS("http: root.appendChild(child); // same prefix child.setAttributeNS("http: XPath xpath = new DOMXPath("//namespace::node()"); List result = xpath.selectNodes(doc); assertEquals(3, result.size()); } // see JAXEN-105 public void testIntrinsicNamespaceDeclarationOfElementBeatsContradictoryXmlnsPreAttr() throws JaxenException { Element root = doc.createElement("root"); doc.appendChild(root); Element child = doc.createElementNS("http: root.appendChild(child); // same prefix child.setAttributeNS("http: XPath xpath = new DOMXPath("//namespace::node()[name(.)='foo']"); List result = xpath.selectNodes(doc); assertEquals(1, result.size()); Node node = (Node) result.get(0); assertEquals("http: } // see JAXEN-105 public void testIntrinsicNamespaceDeclarationOfAttrBeatsContradictoryXmlnsPreAttr() throws JaxenException { Element root = doc.createElement("root"); doc.appendChild(root); root.setAttributeNS("http: // same prefix, different namespace root.setAttributeNS("http: XPath xpath = new DOMXPath("//namespace::node()[name(.)='foo']"); List result = xpath.selectNodes(doc); assertEquals(1, result.size()); Node node = (Node) result.get(0); assertEquals("http: } // see JAXEN-105 public void testIntrinsicNamespaceDeclarationOfElementBeatsContradictoryIntrinsicNamespaceOfAttr() throws JaxenException { Element root = doc.createElementNS("http: doc.appendChild(root); // same prefix root.setAttributeNS("http: XPath xpath = new DOMXPath("//namespace::node()[name(.)='pre']"); List result = xpath.selectNodes(doc); assertEquals(1, result.size()); Node node = (Node) result.get(0); assertEquals("http: } // Jaxen-54 public void testUpdateDOMNodesReturnedBySelectNodes() throws JaxenException { Element root = doc.createElementNS("http: doc.appendChild(root); root.appendChild(doc.createComment("data")); DOMXPath xpath = new DOMXPath( "//comment()" ); List results = xpath.selectNodes(doc); Node backroot = (Node) results.get(0); backroot.setNodeValue("test"); assertEquals("test", backroot.getNodeValue()); } public void testSelection() throws JaxenException, ParserConfigurationException, SAXException, IOException { XPath xpath = new DOMXPath( "/foo/bar/baz" ); DocumentBuilder builder = factory.newDocumentBuilder(); Document document = builder.parse( BASIC_XML ); List results = xpath.selectNodes( document ); assertEquals( 3, results.size() ); Iterator iter = results.iterator(); assertEquals( "baz", ((Element)iter.next()).getLocalName() ); assertEquals( "baz", ((Element)iter.next()).getLocalName() ); assertEquals( "baz", ((Element)iter.next()).getLocalName() ); assertTrue( ! iter.hasNext() ); } // Jaxen-22 public void testPrecedingAxisWithPositionalPredicate() throws JaxenException, ParserConfigurationException, SAXException, IOException { XPath xpath = new DOMXPath( "//c/preceding::*[1][name()='d']" ); DocumentBuilder builder = factory.newDocumentBuilder(); Document document = builder.parse( "xml/web2.xml" ); List result = xpath.selectNodes(document); assertEquals(1, result.size()); } // Jaxen-22 public void testJaxen22() throws JaxenException, ParserConfigurationException, SAXException, IOException { XPath xpath = new DOMXPath( "name(//c/preceding::*[1])" ); DocumentBuilder builder = factory.newDocumentBuilder(); doc = builder.parse("xml/web2.xml"); Object result = xpath.evaluate(doc); assertEquals("d", result); } public void testPrecedingAxisInDocumentOrder() throws JaxenException { XPath xpath = new DOMXPath( "preceding::*" ); Element root = doc.createElement("root"); doc.appendChild(root); Element a = doc.createElement("a"); root.appendChild(a); Element b = doc.createElement("b"); root.appendChild(b); Element c = doc.createElement("c"); a.appendChild(c); List result = xpath.selectNodes(b); assertEquals(2, result.size()); assertEquals(a, result.get(0)); assertEquals(c, result.get(1)); } }
package nz.mega.sdk; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.net.URLConnection; import android.content.ContentResolver; import android.content.Context; import android.database.Cursor; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.graphics.Canvas; import android.graphics.Matrix; import android.graphics.Rect; import android.media.ExifInterface; import android.media.MediaMetadataRetriever; import android.media.ThumbnailUtils; import android.net.Uri; import android.provider.BaseColumns; import android.provider.MediaStore; public class AndroidGfxProcessor extends MegaGfxProcessor { Rect size; int orientation; String srcPath; Bitmap bitmap; static boolean isVideo; byte[] bitmapData; static Context context = null; protected AndroidGfxProcessor() { if (context == null) { try { context = (Context) Class.forName("android.app.AppGlobals") .getMethod("getInitialApplication") .invoke(null, (Object[]) null); } catch (Exception e) { } } } public static boolean isVideoFile(String path) { try { String mimeType = URLConnection.guessContentTypeFromName(path); return mimeType != null && mimeType.startsWith("video"); } catch(Exception e){ return false; } } public static Rect getImageDimensions(String path, int orientation) { Rect rect = new Rect(); if(isVideoFile(path)){ try { MediaMetadataRetriever retriever = new MediaMetadataRetriever(); retriever.setDataSource(path); int width = Integer.valueOf(retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_WIDTH)); int height = Integer.valueOf(retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_HEIGHT)); retriever.release(); rect.right = width; rect.bottom = height; } catch (Exception e) { } } else{ try { BitmapFactory.Options options = new BitmapFactory.Options(); options.inJustDecodeBounds = true; BitmapFactory.decodeStream(new FileInputStream(path), null, options); if ((options.outWidth > 0) && (options.outHeight > 0)) { if ((orientation < 5) || (orientation > 8)) { rect.right = options.outWidth; rect.bottom = options.outHeight; } else { rect.bottom = options.outWidth; rect.right = options.outHeight; } } } catch (Exception e) { } } return rect; } public boolean readBitmap(String path) { if(isVideoFile(path)){ isVideo = true; srcPath = path; size = getImageDimensions(srcPath, orientation); return (size.right != 0) && (size.bottom != 0); } else{ isVideo = false; srcPath = path; orientation = getExifOrientation(path); size = getImageDimensions(srcPath, orientation); return (size.right != 0) && (size.bottom != 0); } } public int getWidth() { return size.right; } public int getHeight() { return size.bottom; } static public Bitmap getBitmap(String path, Rect rect, int orientation, int w, int h) { int width; int height; if(AndroidGfxProcessor.isVideo){ try { Bitmap bmThumbnail; bmThumbnail = ThumbnailUtils.createVideoThumbnail(path, MediaStore.Video.Thumbnails.FULL_SCREEN_KIND); if(context != null && bmThumbnail == null) { String SELECTION = MediaStore.MediaColumns.DATA + "=?"; String[] PROJECTION = {BaseColumns._ID}; Uri uri = MediaStore.Video.Media.EXTERNAL_CONTENT_URI; String[] selectionArgs = {path}; ContentResolver cr = context.getContentResolver(); Cursor cursor = cr.query(uri, PROJECTION, SELECTION, selectionArgs, null); if (cursor.moveToFirst()) { long videoId = cursor.getLong(0); bmThumbnail = MediaStore.Video.Thumbnails.getThumbnail(cr, videoId, MediaStore.Video.Thumbnails.FULL_SCREEN_KIND, null); } cursor.close(); } if (bmThumbnail != null) { return Bitmap.createScaledBitmap(bmThumbnail, w, h, true); } }catch (Exception e){ } } else{ if ((orientation < 5) || (orientation > 8)) { width = rect.right; height = rect.bottom; } else { width = rect.bottom; height = rect.right; } try { int scale = 1; while (width / scale / 2 >= w && height / scale / 2 >= h) scale *= 2; BitmapFactory.Options options = new BitmapFactory.Options(); options.inJustDecodeBounds = false; options.inSampleSize = scale; Bitmap tmp = BitmapFactory.decodeStream(new FileInputStream(path), null, options); tmp = fixExifOrientation(tmp, orientation); return Bitmap.createScaledBitmap(tmp, w, h, true); } catch (Exception e) { } } return null; } public static int getExifOrientation(String srcPath) { int orientation = ExifInterface.ORIENTATION_UNDEFINED; int i = 0; while ((i < 5) && (orientation == ExifInterface.ORIENTATION_UNDEFINED)) { try { ExifInterface exif = new ExifInterface(srcPath); orientation = exif.getAttributeInt(ExifInterface.TAG_ORIENTATION, orientation); } catch (IOException e) { try { Thread.sleep(100); } catch (InterruptedException e1) {} } i++; } return orientation; } /* * Change image orientation based on EXIF image data */ public static Bitmap fixExifOrientation(Bitmap bitmap, int orientation) { if (bitmap == null) return null; if ((orientation < 2) || (orientation > 8)) { // No changes required or invalid orientation return bitmap; } Matrix matrix = new Matrix(); switch (orientation) { case ExifInterface.ORIENTATION_TRANSPOSE: case ExifInterface.ORIENTATION_ROTATE_90: matrix.postRotate(90); break; case ExifInterface.ORIENTATION_ROTATE_180: case ExifInterface.ORIENTATION_FLIP_VERTICAL: matrix.postRotate(180); break; case ExifInterface.ORIENTATION_TRANSVERSE: case ExifInterface.ORIENTATION_ROTATE_270: matrix.postRotate(270); break; default: break; } if ((orientation == ExifInterface.ORIENTATION_FLIP_HORIZONTAL) || (orientation == ExifInterface.ORIENTATION_FLIP_VERTICAL)) matrix.preScale(-1, 1); else if ((orientation == ExifInterface.ORIENTATION_TRANSPOSE) || (orientation == ExifInterface.ORIENTATION_TRANSVERSE)) matrix.preScale(1, -1); return Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(), bitmap.getHeight(), matrix, true); } public static Bitmap extractRect(Bitmap bitmap, int px, int py, int rw, int rh) { if (bitmap == null) return null; int w = bitmap.getWidth(); int h = bitmap.getHeight(); if ((px != 0) || (py != 0) || (rw != w) || (rh != h)) { Bitmap.Config conf = Bitmap.Config.ARGB_8888; Bitmap scaled = Bitmap.createBitmap(rw, rh, conf); Canvas canvas = new Canvas(scaled); canvas.drawBitmap(bitmap, new Rect(px, py, px + rw, py + rh), new Rect(0, 0, rw, rh), null); bitmap = scaled; } return bitmap; } public static boolean saveBitmap(Bitmap bitmap, File file) { if (bitmap == null) return false; FileOutputStream stream; try { stream = new FileOutputStream(file); if (!bitmap.compress(Bitmap.CompressFormat.JPEG, 85, stream)) return false; stream.close(); return true; } catch (Exception e) { } return false; } public int getBitmapDataSize(int w, int h, int px, int py, int rw, int rh) { if (bitmap == null) bitmap = getBitmap(srcPath, size, orientation, w, h); else bitmap = Bitmap.createScaledBitmap(bitmap, w, h, true); bitmap = extractRect(bitmap, px, py, rw, rh); if (bitmap == null) return 0; try { ByteArrayOutputStream stream = new ByteArrayOutputStream(); if (!bitmap.compress(Bitmap.CompressFormat.JPEG, 85, stream)) return 0; bitmapData = stream.toByteArray(); return bitmapData.length; } catch (Exception e) { } return 0; } public boolean getBitmapData(byte[] buffer) { try { System.arraycopy(bitmapData, 0, buffer, 0, bitmapData.length); return true; } catch (Exception e) { } return false; } public void freeBitmap() { bitmap = null; bitmapData = null; size = null; srcPath = null; orientation = 0; } }
package com.cv4j.core.datamodel; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.util.Log; import com.cv4j.exception.CV4JException; import com.cv4j.image.util.IOUtils; import java.io.InputStream; public class ColorImage implements ImageData { private int[] pdata; private int width; private int height; private int type; public ColorImage(Bitmap bitmap) { if (bitmap == null) { throw new CV4JException("bitmap is null"); } width = bitmap.getWidth(); height = bitmap.getHeight(); pdata = new int[width*height]; bitmap.getPixels(pdata, 0, width, 0, 0, width, height); } public ColorImage(InputStream inputStream) { if (inputStream == null) { throw new CV4JException("inputStream is null"); } Bitmap bitmap = BitmapFactory.decodeStream(inputStream); width = bitmap.getWidth(); height = bitmap.getHeight(); pdata = new int[width*height]; bitmap.getPixels(pdata, 0, width, 0, 0, width, height); IOUtils.closeQuietly(inputStream); } @Override public int[] getPixels() { return pdata; } @Override public int getWidth() { return width; } @Override public int getHeight() { return height; } @Override public int getType() { return this.type; } @Override public byte[] getChannel(int index) { byte[] data = new byte[width*height]; int len = width*height; if(type == CV4J_IMAGE_TYPE_RGB) { for(int i=0; i<len; i++) { int c = pdata[i]; int b = 0; if(index == 0) { b = (c&0xff0000)>>16; } else if(index == 1) { b = (c&0xff00)>>8; } else if(index == 2) { b = c&0xff; } data[i] = (byte)b; } } else if(CV4J_IMAGE_TYPE_GRAY == type || CV4J_IMAGE_TYPE_BINARY == type) { for(int i=0; i<len; i++) { int c = pdata[i]; int b = 0; b = (c&0xff0000)>>16; data[i] = (byte)b; } } return data; } @Override public void putPixels(int[] pixels) { System.arraycopy(pixels, 0, pdata, 0, width*height); } @Override public int getPixel(int row, int col) { int index = row*width + col; // check OutOfBoundary return pdata[index]; } @Override public void setPixel(int row, int col, int rgb) { int index = row*width + col; pdata[index] = rgb; } @Override public int[] getPixelByRowNumber(int rowIndex) { int[] pixels = new int[width]; System.arraycopy(pdata, rowIndex*width, pixels, 0, width); return pixels; } public void convert2Gray() { int[] gray = new int[pdata.length]; int offset = 0; int g=0; for(int row=0; row < height; row++) { offset = row*width; int ta=0, tr=0, tg=0, tb=0; for(int col=0; col<width; col++) { ta = (pdata[offset] >> 24) & 0xff; tr = (pdata[offset] >> 16) & 0xff; tg = (pdata[offset] >> 8) & 0xff; tb = pdata[offset] & 0xff; g= (int)(0.299 *tr + 0.587*tg + 0.114*tb); gray[offset] = g; offset++; } } type = CV4J_IMAGE_TYPE_GRAY; System.arraycopy(gray, 0, pdata, 0, pdata.length); gray = null; } public Bitmap toBitmap() { return toBitmap(Bitmap.Config.RGB_565); } public Bitmap toBitmap(Bitmap.Config bitmapConfig) { Bitmap bitmap = Bitmap.createBitmap(width, height, bitmapConfig); if(type == CV4J_IMAGE_TYPE_RGB) { bitmap.setPixels(pdata, 0, width, 0, 0, width, height); } else if(CV4J_IMAGE_TYPE_GRAY == type || CV4J_IMAGE_TYPE_BINARY == type) { int[] rgb = new int[pdata.length]; int offset = 0; for(int row=0; row < height; row++) { offset = row*width; int ta=255, tr=0, tg=0, tb=0; for(int col=0; col<width; col++) { rgb[offset] = (ta << 24) | (pdata[offset] << 16) | (pdata[offset] << 8) | pdata[offset]; offset++; } } bitmap.setPixels(rgb, 0, width, 0, 0, width, height); } else { // Exception Log.e("ColorImage","can not convert to bitmap!"); } return bitmap; } }
package com.dmdirc.util; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.NoSuchElementException; import java.util.Queue; /** * A LinkedHashSet with a Queue implementation added, also supports readding * unique items to the head of the queue. * * @param <E> the type of elements held in this collection */ public class QueuedLinkedHashSet<E> extends LinkedHashSet<E> implements Queue<E> { /** * A version number for this class. It should be changed whenever the class * structure is changed (or anything else that would prevent serialized * objects being unserialized with the new class). */ private static final long serialVersionUID = 1; /** {@inheritDoc} */ @Override public boolean offer(final E e) { return add(e); } /** {@inheritDoc} */ @Override public E remove() { if (isEmpty()) { throw new NoSuchElementException("Queue is empty."); } return poll(); } /** {@inheritDoc} */ @Override public E poll() { final E object = peek(); if (object != null) { remove(object); } return object; } /** {@inheritDoc} */ @Override public E element() { if (isEmpty()) { throw new NoSuchElementException("Queue is empty."); } return peek(); } /** {@inheritDoc} */ @Override public E peek() { if (isEmpty()) { return null; } final Iterator<E> iterator = iterator(); E object = null; while (iterator.hasNext()) { object = iterator.next(); } return object; } /** * Offers an item to this queue, if the item exists in the queue it removes * it and re-adds it to the queue. * * @param e Object to add * * @return true iif the item was added */ public boolean offerAndMove(final E e) { if (contains(e)) { remove(e); } return offer(e); } }
package com.fsck.k9.view; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.InputStream; import java.io.UnsupportedEncodingException; import android.app.Activity; import android.app.Fragment; import android.app.PendingIntent; import android.content.Context; import android.content.Intent; import android.content.IntentSender.SendIntentException; import android.util.AttributeSet; import android.util.Log; import android.view.View; import android.widget.Button; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.ProgressBar; import android.widget.RelativeLayout; import android.widget.TextView; import android.widget.Toast; import com.fsck.k9.Account; import com.fsck.k9.Identity; import com.fsck.k9.K9; import com.fsck.k9.R; import com.fsck.k9.crypto.CryptoHelper; import com.fsck.k9.crypto.OpenPgpApiHelper; import com.fsck.k9.fragment.MessageViewFragment; import com.fsck.k9.helper.IdentityHelper; import com.fsck.k9.mail.Message; import com.fsck.k9.mail.MessagingException; import com.fsck.k9.mail.Part; import com.fsck.k9.mail.internet.MimeUtility; import org.openintents.openpgp.OpenPgpError; import org.openintents.openpgp.OpenPgpSignatureResult; import org.openintents.openpgp.util.OpenPgpApi; import org.openintents.openpgp.util.OpenPgpServiceConnection; public class MessageOpenPgpView extends LinearLayout { private Context mContext; private MessageViewFragment mFragment; private RelativeLayout mSignatureLayout = null; private ImageView mSignatureStatusImage = null; private TextView mSignatureUserId = null; private TextView mText = null; private ProgressBar mProgress; private Button mGetKeyButton; private OpenPgpServiceConnection mOpenPgpServiceConnection; private OpenPgpApi mOpenPgpApi; private String mOpenPgpProvider; private Message mMessage; private PendingIntent mMissingKeyPI; private static final int REQUEST_CODE_DECRYPT_VERIFY = 12; String mData; Account mAccount; public MessageOpenPgpView(Context context, AttributeSet attrs) { super(context, attrs); mContext = context; } public void setupChildViews() { mSignatureLayout = (RelativeLayout) findViewById(R.id.openpgp_signature_layout); mSignatureStatusImage = (ImageView) findViewById(R.id.openpgp_signature_status); mSignatureUserId = (TextView) findViewById(R.id.openpgp_user_id); mText = (TextView) findViewById(R.id.openpgp_text); mProgress = (ProgressBar) findViewById(R.id.openpgp_progress); mGetKeyButton = (Button) findViewById(R.id.openpgp_get_key); mGetKeyButton.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { getMissingKey(); } }); } public void setFragment(Fragment fragment) { mFragment = (MessageViewFragment) fragment; } /** * Fill the decrypt layout with signature data, if known, make controls * visible, if they should be visible. */ public void updateLayout(Account account, String decryptedData, final OpenPgpSignatureResult signatureResult, final Message message) { // set class variables mAccount = account; mOpenPgpProvider = mAccount.getOpenPgpProvider(); mMessage = message; // only use this view if a OpenPGP Provider is set if (mOpenPgpProvider == null) { return; } Activity activity = mFragment.getActivity(); if (activity == null) { return; } // bind to service mOpenPgpServiceConnection = new OpenPgpServiceConnection(activity, mOpenPgpProvider); mOpenPgpServiceConnection.bindToService(); if ((message == null) && (decryptedData == null)) { this.setVisibility(View.GONE); // don't process further return; } if (decryptedData != null && signatureResult == null) { // encrypted-only MessageOpenPgpView.this.setBackgroundColor(mFragment.getResources().getColor( R.color.openpgp_blue)); mText.setText(R.string.openpgp_successful_decryption); // don't process further return; } else if (decryptedData != null && signatureResult != null) { // signed-only and signed-and-encrypted switch (signatureResult.getStatus()) { case OpenPgpSignatureResult.SIGNATURE_ERROR: // TODO: signature error but decryption works? mText.setText(R.string.openpgp_signature_invalid); MessageOpenPgpView.this.setBackgroundColor(mFragment.getResources().getColor( R.color.openpgp_red)); mGetKeyButton.setVisibility(View.GONE); mSignatureStatusImage.setImageResource(R.drawable.overlay_error); mSignatureLayout.setVisibility(View.GONE); break; case OpenPgpSignatureResult.SIGNATURE_SUCCESS_CERTIFIED: if (signatureResult.isSignatureOnly()) { mText.setText(R.string.openpgp_signature_valid_certified); } else { mText.setText(R.string.openpgp_successful_decryption_valid_signature_certified); } MessageOpenPgpView.this.setBackgroundColor(mFragment.getResources().getColor( R.color.openpgp_green)); mGetKeyButton.setVisibility(View.GONE); mSignatureUserId.setText(signatureResult.getUserId()); mSignatureStatusImage.setImageResource(R.drawable.overlay_ok); mSignatureLayout.setVisibility(View.VISIBLE); break; case OpenPgpSignatureResult.SIGNATURE_UNKNOWN_PUB_KEY: if (signatureResult.isSignatureOnly()) { mText.setText(R.string.openpgp_signature_unknown_text); } else { mText.setText(R.string.openpgp_successful_decryption_unknown_signature); } MessageOpenPgpView.this.setBackgroundColor(mFragment.getResources().getColor( R.color.openpgp_orange)); mGetKeyButton.setVisibility(View.VISIBLE); mSignatureUserId.setText(R.string.openpgp_signature_unknown); mSignatureStatusImage.setImageResource(R.drawable.overlay_error); mSignatureLayout.setVisibility(View.VISIBLE); break; case OpenPgpSignatureResult.SIGNATURE_SUCCESS_UNCERTIFIED: if (signatureResult.isSignatureOnly()) { mText.setText(R.string.openpgp_signature_valid_uncertified); } else { mText.setText(R.string.openpgp_successful_decryption_valid_signature_uncertified); } MessageOpenPgpView.this.setBackgroundColor(mFragment.getResources().getColor( R.color.openpgp_orange)); mGetKeyButton.setVisibility(View.GONE); mSignatureUserId.setText(signatureResult.getUserId()); mSignatureStatusImage.setImageResource(R.drawable.overlay_ok); mSignatureLayout.setVisibility(View.VISIBLE); break; default: break; } // don't process further return; } // Start new decryption/verification CryptoHelper helper = new CryptoHelper(); if (helper.isEncrypted(message) || helper.isSigned(message)) { // start automatic decrypt decryptAndVerify(message); } else { try { // check for PGP/MIME encryption Part pgp = MimeUtility .findFirstPartByMimeType(message, "application/pgp-encrypted"); if (pgp != null) { Toast.makeText(mContext, R.string.pgp_mime_unsupported, Toast.LENGTH_LONG) .show(); } } catch (MessagingException e) { // nothing to do... } } } private void decryptAndVerify(final Message message) { this.setVisibility(View.VISIBLE); mProgress.setVisibility(View.VISIBLE); MessageOpenPgpView.this.setBackgroundColor(mFragment.getResources().getColor( R.color.openpgp_orange)); mText.setText(R.string.openpgp_decrypting_verifying); // waiting in a new thread Runnable r = new Runnable() { @Override public void run() { try { // get data String Part part = MimeUtility.findFirstPartByMimeType(message, "text/plain"); if (part == null) { part = MimeUtility.findFirstPartByMimeType(message, "text/html"); } if (part != null) { mData = MimeUtility.getTextFromPart(part); } // wait for service to be bound while (!mOpenPgpServiceConnection.isBound()) { try { Thread.sleep(100); } catch (InterruptedException e) { } } mOpenPgpApi = new OpenPgpApi(getContext(), mOpenPgpServiceConnection.getService()); decryptVerify(new Intent()); } catch (MessagingException me) { Log.e(K9.LOG_TAG, "Unable to decrypt email.", me); } } }; new Thread(r).start(); } private void decryptVerify(Intent intent) { intent.setAction(OpenPgpApi.ACTION_DECRYPT_VERIFY); intent.putExtra(OpenPgpApi.EXTRA_REQUEST_ASCII_ARMOR, true); Identity identity = IdentityHelper.getRecipientIdentityFromMessage(mAccount, mMessage); String accName = OpenPgpApiHelper.buildAccountName(identity); intent.putExtra(OpenPgpApi.EXTRA_ACCOUNT_NAME, accName); InputStream is = null; try { is = new ByteArrayInputStream(mData.getBytes("UTF-8")); } catch (UnsupportedEncodingException e) { Log.e(K9.LOG_TAG, "UnsupportedEncodingException.", e); } final ByteArrayOutputStream os = new ByteArrayOutputStream(); DecryptVerifyCallback callback = new DecryptVerifyCallback(os, REQUEST_CODE_DECRYPT_VERIFY); mOpenPgpApi.executeApiAsync(intent, is, os, callback); } private void getMissingKey() { try { mFragment.getActivity().startIntentSenderForResult( mMissingKeyPI.getIntentSender(), REQUEST_CODE_DECRYPT_VERIFY, null, 0, 0, 0); } catch (SendIntentException e) { Log.e(K9.LOG_TAG, "SendIntentException", e); } } /** * Called on successful decrypt/verification */ private class DecryptVerifyCallback implements OpenPgpApi.IOpenPgpCallback { ByteArrayOutputStream os; int requestCode; private DecryptVerifyCallback(ByteArrayOutputStream os, int requestCode) { this.os = os; this.requestCode = requestCode; } @Override public void onReturn(Intent result) { switch (result.getIntExtra(OpenPgpApi.RESULT_CODE, OpenPgpApi.RESULT_CODE_ERROR)) { case OpenPgpApi.RESULT_CODE_SUCCESS: { try { final String output = os.toString("UTF-8"); OpenPgpSignatureResult sigResult = null; if (result.hasExtra(OpenPgpApi.RESULT_SIGNATURE)) { sigResult = result.getParcelableExtra(OpenPgpApi.RESULT_SIGNATURE); } if (K9.DEBUG) Log.d(K9.LOG_TAG, "result: " + os.toByteArray().length + " str=" + output); // missing key -> PendingIntent to get keys mMissingKeyPI = result.getParcelableExtra(OpenPgpApi.RESULT_INTENT); mProgress.setVisibility(View.GONE); mFragment.setMessageWithOpenPgp(output, sigResult); } catch (UnsupportedEncodingException e) { Log.e(K9.LOG_TAG, "UnsupportedEncodingException", e); } break; } case OpenPgpApi.RESULT_CODE_USER_INTERACTION_REQUIRED: { PendingIntent pi = result.getParcelableExtra(OpenPgpApi.RESULT_INTENT); try { mFragment.getActivity().startIntentSenderForResult( pi.getIntentSender(), requestCode, null, 0, 0, 0); } catch (SendIntentException e) { Log.e(K9.LOG_TAG, "SendIntentException", e); } break; } case OpenPgpApi.RESULT_CODE_ERROR: { OpenPgpError error = result.getParcelableExtra(OpenPgpApi.RESULT_ERROR); handleError(error); break; } } } } public boolean handleOnActivityResult(int requestCode, int resultCode, Intent data) { if (K9.DEBUG) Log.d(K9.LOG_TAG, "onActivityResult resultCode: " + resultCode); // try again after user interaction if (resultCode == Activity.RESULT_OK && requestCode == REQUEST_CODE_DECRYPT_VERIFY) { /* * The data originally given to the decryptVerify() method, is again * returned here to be used when calling decryptVerify() after user * interaction. The Intent now also contains results from the user * interaction, for example selected key ids. */ decryptVerify(data); return true; } return false; } private void handleError(final OpenPgpError error) { Activity activity = mFragment.getActivity(); if (activity == null) { return; } activity.runOnUiThread(new Runnable() { @Override public void run() { mProgress.setVisibility(View.GONE); if (K9.DEBUG) { Log.d(K9.LOG_TAG, "OpenPGP Error ID:" + error.getErrorId()); Log.d(K9.LOG_TAG, "OpenPGP Error Message:" + error.getMessage()); } mText.setText(mFragment.getString(R.string.openpgp_error) + " " + error.getMessage()); MessageOpenPgpView.this.setBackgroundColor(mFragment.getResources().getColor( R.color.openpgp_red)); } }); } @Override protected void onAttachedToWindow() { super.onAttachedToWindow(); // bind to service if a OpenPGP Provider is available if (mOpenPgpProvider != null) { mOpenPgpServiceConnection = new OpenPgpServiceConnection(mFragment.getActivity(), mOpenPgpProvider); mOpenPgpServiceConnection.bindToService(); } } @Override protected void onDetachedFromWindow() { super.onDetachedFromWindow(); if (mOpenPgpServiceConnection != null) { mOpenPgpServiceConnection.unbindFromService(); } } }
package com.psddev.dari.db; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.TreeSet; import org.joda.time.DateTime; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.psddev.dari.util.ObjectUtils; import com.psddev.dari.util.RepeatingTask; import com.psddev.dari.util.Stats; import com.psddev.dari.util.StringUtils; /** * Periodically updates indexes annotated with {@code \@Recalculate}. */ public class RecalculationTask extends RepeatingTask { private static final int UPDATE_LATEST_EVERY_SECONDS = 60; private static final int OVERDUE_REINDEX_HOURS = 6; private static final int QUERY_ITERABLE_SIZE = 200; private static final Logger LOGGER = LoggerFactory.getLogger(RecalculationTask.class); private static final Stats STATS = new Stats("Recalculation Task"); private long recordsProcessed = 0L; private long recordsTotal = 0L; @Override protected DateTime calculateRunTime(DateTime currentTime) { return everyMinute(currentTime); } @Override protected void doRepeatingTask(DateTime runTime) throws Exception { recordsProcessed = 0L; recordsTotal = 0L; for (RecalculationContext context : getIndexableMethods()) { Stats.Timer timer = STATS.startTimer(); long recalculated = recalculateIfNecessary(context); if (recalculated > 0L) { timer.stop("Recalculate " + context.getKey(), recalculated); } } } /** * Checks the LastRecalculation and DistributedLock and executes recalculate if it should. */ private long recalculateIfNecessary(RecalculationContext context) { String updateKey = context.getKey(); long recalculated = 0; boolean shouldExecute = false; boolean canExecute = false; LastRecalculation last = Query.from(LastRecalculation.class).master().noCache().where("key = ?", updateKey).first(); if (last == null) { last = new LastRecalculation(); last.setKey(updateKey); shouldExecute = true; context.setReindexAll(true); } else { if (last.getLastExecutedDate() == null && last.getCurrentRunningDate() == null) { // this has never been executed, and it's not currently executing. shouldExecute = true; context.setReindexAll(true); } else if (last.getLastExecutedDate() != null && context.delay.isUpdateDue(new DateTime(), last.getLastExecutedDate())) { // this has been executed before and an update is due. shouldExecute = true; if (context.delay.isUpdateDue(new DateTime().minusHours(OVERDUE_REINDEX_HOURS), last.getLastExecutedDate())) { // this has been executed before, but it is over // OVERDUE_REINDEX_HOURS hours overdue, so reindex all. context.setReindexAll(true); } } if (last.getCurrentRunningDate() != null) { // this is currently executing shouldExecute = false; if (context.delay.isUpdateDue(new DateTime(), last.getCurrentRunningDate())) { // the task is running, but another update is already due. // It probably died. Clear it out and pick it up next // time if it hasn't updated, but don't run this time. last.setCurrentRunningDate(null); last.saveImmediately(); } } } if (shouldExecute) { // Check to see if any other processes are currently running on other hosts. if (Query.from(LastRecalculation.class).where("currentRunningDate > ?", new DateTime().minusSeconds(UPDATE_LATEST_EVERY_SECONDS * 5)).hasMoreThan(0)) { shouldExecute = false; } } if (shouldExecute) { boolean locked = false; DistributedLock lock = new DistributedLock(Database.Static.getDefault(), updateKey); try { if (lock.tryLock()) { locked = true; last.setCurrentRunningDate(new DateTime()); last.saveImmediately(); canExecute = true; } } finally { if (locked) { lock.unlock(); } } if (canExecute) { try { recalculated = recalculate(context, last); } finally { last.setLastExecutedDate(new DateTime()); last.setCurrentRunningDate(null); last.saveImmediately(); } } } return recalculated; } /** * Actually does the work of iterating through the records and updating indexes. */ private long recalculate(RecalculationContext context, LastRecalculation last) { long recalculated = 0L; try { Query<?> query = Query.fromAll().noCache().resolveToReferenceOnly(); query.getOptions().put(SqlDatabase.USE_JDBC_FETCH_SIZE_QUERY_OPTION, false); ObjectField metricField = context.getMetric(); DateTime processedLastRunDate = last.getLastExecutedDate(); if (metricField != null) { if (last.getLastExecutedDate() != null) { MetricInterval interval = metricField.as(MetricAccess.FieldData.class).getEventDateProcessor(); if (interval != null) { processedLastRunDate = new DateTime(interval.process(processedLastRunDate)); } } if (!context.isReindexAll()) { for (ObjectMethod method : context.methods) { query.or(method.getUniqueName() + " != missing"); } } } boolean isGlobal = context.groups.contains(Object.class.getName()); for (Object obj : query.iterable(QUERY_ITERABLE_SIZE)) { try { if (!shouldContinue()) { break; } setProgressIndex(++ recordsTotal); State objState = State.getInstance(obj); if (objState == null || objState.getType() == null) { continue; } if (!isGlobal) { Set<String> objGroups = new HashSet<String>(objState.getType().getGroups()); objGroups.retainAll(context.groups); if (objGroups.isEmpty()) { continue; } } if (!context.isReindexAll() && metricField != null) { Metric metric = new Metric(objState, metricField); DateTime lastMetricUpdate = metric.getLastUpdate(); if (lastMetricUpdate == null) { // there's no metric data, so just pass. continue; } else if (last.getLastExecutedDate() != null && lastMetricUpdate.isBefore(processedLastRunDate.minusSeconds(1))) { // metric data is older than the last run date, so skip it. continue; } } objState.setResolveToReferenceOnly(false); for (ObjectMethod method : context.methods) { LOGGER.debug("Updating Index: " + method.getInternalName() + " for " + objState.getId()); method.recalculate(objState); recalculated ++; } recordsProcessed ++; } finally { if (last.getCurrentRunningDate().plusSeconds(UPDATE_LATEST_EVERY_SECONDS).isBeforeNow()) { last.setCurrentRunningDate(new DateTime()); last.saveImmediately(); } } } } finally { last.setCurrentRunningDate(new DateTime()); last.saveImmediately(); } return recalculated; } @Override public String getProgress() { return new StringBuilder("Recalculated ").append(recordsProcessed).append(", checked ").append(recordsTotal).toString(); } /** * Saves the last time the index update was executed for each context. */ public static class LastRecalculation extends Record { // null if it's not running @Indexed private Long currentRunningDate; private Long lastExecutedDate; @Indexed(unique = true) private String key; public DateTime getCurrentRunningDate() { return (currentRunningDate == null ? null : new DateTime(currentRunningDate)); } public void setCurrentRunningDate(DateTime currentRunningDate) { this.currentRunningDate = (currentRunningDate == null ? null : currentRunningDate.getMillis()); } public DateTime getLastExecutedDate() { return (lastExecutedDate == null ? null : new DateTime(lastExecutedDate)); } public void setLastExecutedDate(DateTime lastExecutedDate) { this.lastExecutedDate = (lastExecutedDate == null ? null : lastExecutedDate.getMillis()); } public String getKey() { return key; } public void setKey(String key) { this.key = key; } } private static Collection<RecalculationContext> getIndexableMethods() { Map<String, RecalculationContext> contextsByGroupsAndDelayAndMetric = new HashMap<String, RecalculationContext>(); for (ObjectType type : Database.Static.getDefault().getEnvironment().getTypes()) { for (ObjectMethod method : type.getMethods()) { if (method.getJavaDeclaringClassName().equals(type.getObjectClassName()) && !method.as(RecalculationFieldData.class).isImmediate() && method.as(RecalculationFieldData.class).getRecalculationDelay() != null) { TreeSet<String> groups = new TreeSet<String>(); Set<Class<?>> objectClasses = new HashSet<Class<?>>(); for (String group : method.as(RecalculationFieldData.class).getGroups()) { objectClasses.add(ObjectUtils.getClassByName(group)); } if (objectClasses.isEmpty()) { objectClasses.add(type.getObjectClass()); } for (Class<?> objectClass : objectClasses) { if (Modification.class.isAssignableFrom(objectClass)) { @SuppressWarnings("unchecked") Class<? extends Modification<?>> modClass = ((Class<? extends Modification<?>>) objectClass); for (Class<?> modifiedClass : Modification.Static.getModifiedClasses(modClass)) { ObjectType modifiedType = ObjectType.getInstance(modifiedClass); if (modifiedType != null) { groups.add(modifiedType.getInternalName()); } else { groups.add(modifiedClass.getName()); } } } else { groups.add(objectClass.getName()); } } RecalculationContext context = new RecalculationContext(type, groups, method.as(RecalculationFieldData.class).getRecalculationDelay()); context.methods.add(method); String key = context.getKey(); if (!contextsByGroupsAndDelayAndMetric.containsKey(key)) { contextsByGroupsAndDelayAndMetric.put(key, context); } contextsByGroupsAndDelayAndMetric.get(key).methods.add(method); } } } return contextsByGroupsAndDelayAndMetric.values(); } private static final class RecalculationContext { public final ObjectType type; public final RecalculationDelay delay; public final TreeSet<String> groups; public final Set<ObjectMethod> methods = new HashSet<ObjectMethod>(); public boolean reindexAll = false; public RecalculationContext(ObjectType type, TreeSet<String> groups, RecalculationDelay delay) { this.type = type; this.groups = groups; this.delay = delay; } public ObjectField getMetric() { boolean first = true; ObjectField metricField = null; ObjectField useMetricField = null; for (ObjectMethod method : methods) { String methodMetricFieldName = method.as(MetricAccess.FieldData.class).getRecalculableFieldName(); ObjectField methodMetricField = null; if (methodMetricFieldName != null) { methodMetricField = type.getState().getDatabase().getEnvironment().getField(methodMetricFieldName); if (methodMetricField == null) { methodMetricField = type.getField(methodMetricFieldName); } if (methodMetricField == null) { LOGGER.warn("Invalid metric field: " + methodMetricFieldName); } } if (first) { metricField = methodMetricField; useMetricField = metricField; } else { if (!ObjectUtils.equals(metricField, methodMetricField)) { useMetricField = null; } } first = false; } return useMetricField; } public String getKey() { if (methods.isEmpty()) { throw new IllegalStateException("Add a method before you get the key!"); } ObjectField metricField = getMetric(); return type.getInternalName() + " " + StringUtils.join(groups.toArray(new String[0]), ",") + " " + delay.getClass().getName() + (metricField != null ? " " + metricField.getUniqueName() : ""); } public boolean isReindexAll() { return reindexAll; } public void setReindexAll(boolean reindexAll) { this.reindexAll = reindexAll; } } }
package com.icafe4j.image.meta.jpeg; import java.io.IOException; import java.io.OutputStream; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import com.icafe4j.image.jpeg.Marker; import com.icafe4j.image.meta.xmp.XMP; import com.icafe4j.io.IOUtils; import com.icafe4j.string.StringUtils; import com.icafe4j.string.XMLUtils; import com.icafe4j.util.ArrayUtils; import static com.icafe4j.image.jpeg.JPEGTweaker.*; public class JpegXMP extends XMP { // Largest size for each extended XMP chunk private static final int MAX_EXTENDED_XMP_CHUNK_SIZE = 65458; private static final int MAX_XMP_CHUNK_SIZE = 65504; private static final int GUID_LEN = 32; public JpegXMP(byte[] data) { super(data); } public JpegXMP(String xmp) { super(xmp); } /** * @param xmp XML string for the XMP - Assuming in UTF-8 format. * @param extendedXmp XML string for the extended XMP - Assuming in UTF-8 format */ public JpegXMP(String xmp, String extendedXmp) { super(xmp, extendedXmp); } public void write(OutputStream os) throws IOException { Document xmpDoc = getXmpDocument(); NodeList list = xmpDoc.getChildNodes(); boolean foundPI = false; for (int j = 0; j < list.getLength(); j++) { Node currentNode = list.item(j); if (currentNode.getNodeType() == Node.PROCESSING_INSTRUCTION_NODE && currentNode.getNodeName().equalsIgnoreCase("xpacket")) { foundPI = true; break; } } if(!foundPI) { // Add packet wrapper to the XMP document // Add PI at the beginning and end of the document, we will support only UTF-8, no BOM XMLUtils.insertLeadingPI(xmpDoc, "xpacket", "begin='?' id='W5M0MpCehiHzreSzNTczkc9d'"); XMLUtils.insertTrailingPI(xmpDoc, "xpacket", "end='r'"); } // Serialize XMP to byte array byte[] xmp = XMLUtils.serializeToByteArray(xmpDoc); if(xmp.length > MAX_XMP_CHUNK_SIZE) { Document extendedXMPDoc = XMLUtils.createDocumentNode(); // Copy all the children of rdf:RDF element Node xmpRDF = xmpDoc.getElementsByTagName("rdf:RDF").item(0); NodeList nodes = xmpRDF.getChildNodes(); Element extendedRDF = extendedXMPDoc.createElement("rdf:RDF"); extendedRDF.setAttribute("xmlns:rdf", "'http://www.w3.org/1999/02/22-rdf-syntax-ns extendedXMPDoc.appendChild(extendedRDF); for(int i = 0; i < nodes.getLength(); i++) { Node curr = extendedXMPDoc.importNode(nodes.item(i), true); extendedRDF.appendChild(curr); } int numOfItems = nodes.getLength(); for(int i = 1; i <= numOfItems; i++) { xmpRDF.removeChild(nodes.item(numOfItems - i)); } xmp = XMLUtils.serializeToByteArray(xmpDoc); setExtendedXMPData(XMLUtils.serializeToByteArray(extendedXMPDoc)); } String guid = null; byte[] extendedXmp = getExtendedXmpData(); if(extendedXmp != null) { // We have ExtendedXMP if(XMLUtils.getAttribute(xmpDoc, "rdf:Description", "xmpNote:HasExtendedXMP").length() == 0) { guid = StringUtils.generateMD5(extendedXmp); Element node = XMLUtils.createElement(xmpDoc, "rdf:Description"); node.setAttribute("xmlns:xmpNote", "http://ns.adobe.com/xmp/extension/"); node.setAttribute("xmpNote:HasExtendedXMP", guid); xmpDoc.getElementsByTagName("rdf:RDF").item(0).appendChild(node); xmp = XMLUtils.serializeToByteArray(xmpDoc); } else { guid = XMLUtils.getAttribute(xmpDoc, "rdf:Description", "xmpNote:HasExtendedXMP"); } } // Write XMP segment IOUtils.writeShortMM(os, Marker.APP1.getValue()); // Write segment length IOUtils.writeShortMM(os, XMP_ID.length() + 2 + xmp.length); // Write segment data os.write(XMP_ID.getBytes()); os.write(xmp); // Write ExtendedXMP if we have if(extendedXmp != null) { // We have ExtendedXMP int numOfChunks = extendedXmp.length / MAX_EXTENDED_XMP_CHUNK_SIZE; int extendedXmpLen = extendedXmp.length; int offset = 0; for(int i = 0; i < numOfChunks; i++) { IOUtils.writeShortMM(os, Marker.APP1.getValue()); // Write segment length IOUtils.writeShortMM(os, 2 + XMP_EXT_ID.length() + GUID_LEN + 4 + 4 + MAX_EXTENDED_XMP_CHUNK_SIZE); // Write segment data os.write(XMP_EXT_ID.getBytes()); os.write(guid.getBytes()); IOUtils.writeIntMM(os, extendedXmpLen); IOUtils.writeIntMM(os, offset); os.write(ArrayUtils.subArray(extendedXmp, offset, MAX_EXTENDED_XMP_CHUNK_SIZE)); offset += MAX_EXTENDED_XMP_CHUNK_SIZE; } int leftOver = extendedXmp.length % MAX_EXTENDED_XMP_CHUNK_SIZE; if(leftOver != 0) { IOUtils.writeShortMM(os, Marker.APP1.getValue()); // Write segment length IOUtils.writeShortMM(os, 2 + XMP_EXT_ID.length() + GUID_LEN + 4 + 4 + leftOver); // Write segment data os.write(XMP_EXT_ID.getBytes()); os.write(guid.getBytes()); IOUtils.writeIntMM(os, extendedXmpLen); IOUtils.writeIntMM(os, offset); os.write(ArrayUtils.subArray(extendedXmp, offset, leftOver)); } } } }
package io.asfjava.ui.demo.screen; import java.io.Serializable; import io.asfjava.ui.core.form.CheckBox; import io.asfjava.ui.core.form.ComboBox; import io.asfjava.ui.core.form.Number; import io.asfjava.ui.core.form.Password; import io.asfjava.ui.core.form.RadioBox; import io.asfjava.ui.core.form.Tab; import io.asfjava.ui.core.form.TextArea; import io.asfjava.ui.core.form.TextField; public class DemoForm implements Serializable { @TextField(title = "Pesonal Website",fieldAddonLeft="http://", description = "This is TextField with fieldAddonLeft") private String webSite; @TextField(title = "Your Github Mail",fieldAddonRight="@github.com", description = "This is TextField with fieldAddonRight") private String gitHub; // @Tab(title = "Contact", index = 2) @Password(title = "Password", placeHolder = "Please set you password",minLenght=6,description = "This is password", validationMessage = "The password must contain a minimum of 6 characters ") private String password; @Tab(title = "Info", index = 1) @TextField(title = "First Name", placeHolder = "Your first name",minLenght=3,maxLenght=10, validationMessage = "The First Name must contain a minimum of 3 and a max of 10 characters ", description = "This is a description for your first name field with minLenght and maxLenght") private String firstName; // @Tab(title = "Info", index = 1) @TextField(title = "Last Name", placeHolder = "Your last name") private String lastName; @Tab(title = "Contact", index = 2) @TextField(title = "eMail", placeHolder = "Your email", pattern = "^\\S+@\\S+$", validationMessage = "Your mail must be in this format jhondoe@example.com", description = "This is Text Field with pattern and validation message") private String email; @Tab(title = "Additional Info", index = 3) @Number(title = "Number of children", placeHolder = "Number of children", description = "This is a number") private Integer number; @Tab(title = "Info", index = 1) @ComboBox(title = "Gender", titleMap = GenderTitleMap.class) private String gender; // @Tab(title = "Additional Info", index = 3) @ComboBox(title = "Currency", values = { "euro", "dollar" }) private String currency; @Tab(title = "Additional Info", index = 3) @RadioBox(title = "Civil State", titleMap = CivilStateTitelsMap.class) private String civilState; // @Tab(title = "Contact", index = 2) @TextArea(title = "Address", placeHolder = "Fill your address please",maxLenght=30, description = "This is textarea" , validationMessage="Max 30 charactres") private String address; @Tab(title = "Additional Info", index = 3) @CheckBox(title = "Color", values = { "red", "bleu", "green" }, defaultvalue = "red") private String color; public String getFirstName() { return firstName; } public void setFirstName(String firstName) { this.firstName = firstName; } public String getLastName() { return lastName; } public void setEmail(String eMail) { this.email = eMail; } public String getEmail() { return email; } public void setGitHub(String github) { this.gitHub = github; } public String getGitHub() { return gitHub; } public void setWebSite(String website) { this.webSite = website; } public String getWebSite() { return webSite; } public void setLastName(String lastName) { this.lastName = lastName; } public Integer getNumber() { return number; } public void setNumber(Integer number) { this.number = number; } public String getPassword() { return password; } public void setPassword(String password) { this.password = password; } public String getGender() { return gender; } public String getAddress() { return address; } public void setAddress(String address) { this.address = address; } public String getCivilState() { return civilState; } public void setCivilState(String civilState) { this.civilState = civilState; } public String getCurrency() { return currency; } public String getColor() { return color; } private static final long serialVersionUID = -5073515619469444978L; }
package bndtools.builder; import java.io.File; import java.io.FileFilter; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; import org.bndtools.core.utils.workspace.WorkspaceUtils; import org.eclipse.core.resources.IFile; import org.eclipse.core.resources.IFolder; import org.eclipse.core.resources.IMarker; import org.eclipse.core.resources.IProject; import org.eclipse.core.resources.IResource; import org.eclipse.core.resources.IResourceDelta; import org.eclipse.core.resources.IResourceDeltaVisitor; import org.eclipse.core.resources.IWorkspaceRoot; import org.eclipse.core.resources.IncrementalProjectBuilder; import org.eclipse.core.resources.ProjectScope; import org.eclipse.core.resources.ResourcesPlugin; import org.eclipse.core.runtime.CoreException; import org.eclipse.core.runtime.IConfigurationElement; import org.eclipse.core.runtime.IPath; import org.eclipse.core.runtime.IProgressMonitor; import org.eclipse.core.runtime.IStatus; import org.eclipse.core.runtime.MultiStatus; import org.eclipse.core.runtime.Path; import org.eclipse.core.runtime.Platform; import org.eclipse.core.runtime.Status; import org.eclipse.jdt.core.IClasspathContainer; import org.eclipse.jdt.core.IClasspathEntry; import org.eclipse.jdt.core.IJavaModelMarker; import org.eclipse.jdt.core.IJavaProject; import org.eclipse.jdt.core.JavaCore; import org.eclipse.jdt.core.JavaModelException; import org.eclipse.ui.preferences.ScopedPreferenceStore; import aQute.bnd.build.Project; import aQute.bnd.build.Workspace; import aQute.lib.io.IO; import aQute.lib.osgi.Builder; import bndtools.Central; import bndtools.Plugin; import bndtools.api.IValidator; import bndtools.classpath.BndContainerInitializer; import bndtools.preferences.BndPreferences; import bndtools.preferences.CompileErrorAction; import bndtools.preferences.EclipseClasspathPreference; import bndtools.utils.Predicate; public class NewBuilder extends IncrementalProjectBuilder { public static final String BUILDER_ID = Plugin.PLUGIN_ID + ".bndbuilder"; public static final String MARKER_BND_PROBLEM = Plugin.PLUGIN_ID + ".bndproblem"; private static final int LOG_FULL = 2; private static final int LOG_BASIC = 1; private static final int LOG_NONE = 0; private Project model; private BuildListeners listeners; private List<String> classpathErrors; private MultiStatus validationResults; private List<String> buildLog; private int logLevel = LOG_NONE; private ScopedPreferenceStore projectPrefs; @Override protected IProject[] build(int kind, @SuppressWarnings("rawtypes") Map args, IProgressMonitor monitor) throws CoreException { BndPreferences prefs = new BndPreferences(); logLevel = prefs.getBuildLogging(); projectPrefs = new ScopedPreferenceStore(new ProjectScope(getProject()), Plugin.PLUGIN_ID); // Prepare build listeners listeners = new BuildListeners(); // Prepare validations classpathErrors = new LinkedList<String>(); validationResults = new MultiStatus(Plugin.PLUGIN_ID, 0, "Validation errors in bnd project", null); buildLog = new ArrayList<String>(5); // Initialise workspace OBR index (should only happen once) boolean builtAny = false; // Get the initial project IProject myProject = getProject(); listeners.fireBuildStarting(myProject); Project model = null; try { model = Workspace.getProject(myProject.getLocation().toFile()); } catch (Exception e) { clearBuildMarkers(); createBuildMarkers(Collections.singletonList(e.getMessage()), Collections.<String> emptyList()); } if (model == null) return null; this.model = model; model.setDelayRunDependencies(true); // Main build section try { IProject[] dependsOn = calculateDependsOn(model); // Clear errors and warnings model.clear(); // CASE 1: CNF changed if (isCnfChanged()) { log(LOG_BASIC, "cnf project changed"); model.refresh(); model.getWorkspace().refresh(); if (BndContainerInitializer.resetClasspaths(model, myProject, classpathErrors)) { log(LOG_BASIC, "classpaths were changed"); } else { log(LOG_FULL, "classpaths did not need to change"); } return dependsOn; } // CASE 2: local Bnd file changed, or Eclipse asks for full build boolean localChange = false; if (kind == FULL_BUILD) { localChange = true; log(LOG_BASIC, "Eclipse requested full build"); } else if (isLocalBndFileChange()) { localChange = true; log(LOG_BASIC, "local bnd files changed"); } if (localChange) { model.refresh(); if (BndContainerInitializer.resetClasspaths(model, myProject, classpathErrors)) { log(LOG_BASIC, "classpaths were changed"); return dependsOn; } else { log(LOG_FULL, "classpaths were not changed"); rebuildIfLocalChanges(dependsOn); return dependsOn; } } // (NB: from now on the delta cannot be null, due to the check in // isLocalBndFileChange) // CASE 3: JAR file in dependency project changed Project changedDependency = getDependencyTargetChange(); if (changedDependency != null) { log(LOG_BASIC, "target files in dependency project %s changed", changedDependency.getName()); model.propertiesChanged(); if (BndContainerInitializer.resetClasspaths(model, myProject, classpathErrors)) { log(LOG_BASIC, "classpaths were changed"); return dependsOn; } else { log(LOG_FULL, "classpaths were not changed"); } } // CASE 4: local file changes builtAny = rebuildIfLocalChanges(dependsOn); return dependsOn; } catch (Exception e) { throw new CoreException(new Status(IStatus.ERROR, Plugin.PLUGIN_ID, 0, "Build Error!", e)); } finally { if (!builtAny) { try { Central.getWorkspaceObrProvider().reset(); } catch (Exception e) { Plugin.logError("Error initialising workspace OBR provider", e); } } if (!buildLog.isEmpty() && logLevel > 0) { StringBuilder builder = new StringBuilder(); builder.append(String.format("BUILD LOG for project %s (%d entries):", getProject(), buildLog.size())); for (String message : buildLog) { builder.append("\n -> ").append(message); } Plugin.log(new Status(IStatus.INFO, Plugin.PLUGIN_ID, 0, builder.toString(), null)); } listeners.release(); model = null; } } @Override protected void clean(IProgressMonitor monitor) throws CoreException { try { IProject myProject = getProject(); Project model = Workspace.getProject(myProject.getLocation().toFile()); if (model == null) return; // Delete everything in the target directory File target = model.getTarget(); if (target.isDirectory() && target.getParentFile() != null) { IO.delete(target); target.mkdirs(); } // Tell Eclipse what we did... IFolder targetFolder = myProject.getFolder(calculateTargetDirPath(model)); targetFolder.refreshLocal(IResource.DEPTH_INFINITE, monitor); } catch (Exception e) { throw new CoreException(new Status(IStatus.ERROR, Plugin.PLUGIN_ID, 0, "Build Error!", e)); } } boolean isCnfChanged() throws Exception { IProject cnfProject = WorkspaceUtils.findCnfProject(); if (cnfProject == null) { Plugin.log(new Status(IStatus.ERROR, Plugin.PLUGIN_ID, 0, "Bnd configuration project (cnf) is not available in the Eclipse workspace.", null)); return false; } IResourceDelta cnfDelta = getDelta(cnfProject); if (cnfDelta == null) { log(LOG_FULL, "no delta available for cnf project, ignoring"); return false; } final AtomicBoolean result = new AtomicBoolean(false); cnfDelta.accept(new IResourceDeltaVisitor() { public boolean visit(IResourceDelta delta) throws CoreException { if (!isChangeDelta(delta)) return false; if (IResourceDelta.MARKERS == delta.getFlags()) return false; IResource resource = delta.getResource(); if (resource.getType() == IResource.ROOT || resource.getType() == IResource.PROJECT) return true; if (resource.getType() == IResource.FOLDER && resource.getName().equals("ext")) { log(LOG_FULL, "detected change in cnf due to resource %s, kind=0x%x, flags=0x%x", resource.getFullPath(), delta.getKind(), delta.getFlags()); result.set(true); } if (resource.getType() == IResource.FILE) { if (Workspace.BUILDFILE.equals(resource.getName())) { result.set(true); log(LOG_FULL, "detected change in cnf due to resource %s, kind=0x%x, flags=0x%x", resource.getFullPath(), delta.getKind(), delta.getFlags()); } else { // TODO: check other file names included from build.bnd } } return false; } }); return result.get(); } private boolean isLocalBndFileChange() throws CoreException { IResourceDelta myDelta = getDelta(getProject()); if (myDelta == null) { log(LOG_BASIC, "local project delta is null, assuming changes exist", model.getName()); return true; } final AtomicBoolean result = new AtomicBoolean(false); myDelta.accept(new IResourceDeltaVisitor() { public boolean visit(IResourceDelta delta) throws CoreException { if (!isChangeDelta(delta)) return false; IResource resource = delta.getResource(); if (resource.getType() == IResource.ROOT || resource.getType() == IResource.PROJECT) return true; if (resource.getType() == IResource.FOLDER) return true; String extension = ((IFile) resource).getFileExtension(); if (resource.getType() == IResource.FILE && "bnd".equalsIgnoreCase(extension)) { log(LOG_FULL, "detected change due to resource %s, kind=0x%x, flags=0x%x", resource.getFullPath(), delta.getKind(), delta.getFlags()); result.set(true); return false; } return false; } }); return result.get(); } private Project getDependencyTargetChange() throws Exception { IWorkspaceRoot wsroot = ResourcesPlugin.getWorkspace().getRoot(); Collection<Project> dependson = model.getDependson(); log(LOG_FULL, "project depends on: %s", dependson); for (Project dep : dependson) { File targetDir = dep.getTarget(); if (targetDir != null && !(targetDir.isDirectory())) // Does not // exist... // deleted? return dep; IProject project = WorkspaceUtils.findOpenProject(wsroot, dep); if (project == null) { Plugin.log(new Status(IStatus.WARNING, Plugin.PLUGIN_ID, 0, String.format("Dependency project '%s' from project '%s' is not in the Eclipse workspace.", dep.getName(), model.getName()), null)); return null; } IFile buildFile = project.getFolder(targetDir.getName()).getFile(Workspace.BUILDFILES); IPath buildFilePath = buildFile.getProjectRelativePath(); IResourceDelta delta = getDelta(project); if (delta == null) { // May have changed log(LOG_FULL, "null delta in dependency project %s", dep.getName()); return dep; } else if (!isChangeDelta(delta)) { continue; } else { IResourceDelta buildFileDelta = delta.findMember(buildFilePath); if (buildFileDelta != null && isChangeDelta(buildFileDelta)) { log(LOG_FULL, "detected change due to file %s, kind=0x%x, flags=0x%x", buildFile, delta.getKind(), delta.getFlags()); return dep; } } // this dependency project did not change, move on to next } // no dependencies changed return null; } /** * @return Whether any files were built */ private boolean rebuildIfLocalChanges(IProject[] dependsOn) throws Exception { log(LOG_FULL, "calculating local changes..."); final Set<File> changedFiles = new HashSet<File>(); final IPath projectPath = getProject().getFullPath(); final IPath targetDirFullPath = projectPath.append(calculateTargetDirPath(model)); final Set<File> targetJars = findJarsInTarget(); boolean force = false; IResourceDelta delta; IResourceDeltaVisitor deltaVisitor = new IResourceDeltaVisitor() { public boolean visit(IResourceDelta delta) throws CoreException { if (!isChangeDelta(delta)) return false; IResource resource = delta.getResource(); if (resource.getType() == IResource.ROOT || resource.getType() == IResource.PROJECT) return true; if (resource.getType() == IResource.FOLDER) { IPath folderPath = resource.getFullPath(); // ignore ALL files in target dir return !folderPath.equals(targetDirFullPath); } if (resource.getType() == IResource.FILE) { File file = resource.getLocation().toFile(); changedFiles.add(file); } return false; } }; // Get delta on local project delta = getDelta(getProject()); if (delta != null) { log(LOG_FULL, "%d files in local project (outside target) changed or removed: %s", changedFiles.size(), changedFiles); delta.accept(deltaVisitor); } else { log(LOG_BASIC, "no info on local changes available"); } // Get deltas on dependency projects for (IProject depProject : dependsOn) { delta = getDelta(depProject); if (delta != null) { delta.accept(deltaVisitor); log(LOG_FULL, "%d files in dependency project '%s' changed or removed: %s", changedFiles.size(), depProject.getName(), changedFiles); } else { log(LOG_BASIC, "no info available on changes from project '%s'", depProject.getName()); } } // Process the sub-builders to determine whether a rebuild, force // rebuild, or nothing is required. if (!model.isNoBundles()) for (Builder builder : model.getSubBuilders()) { // If the builder's output JAR has been removed, this could be // because the user // deleted it, so we should force build in order to regenerate File targetFile = new File(model.getTarget(), builder.getBsn() + ".jar"); if (!targetFile.isFile()) { log(LOG_FULL, "output file %s of builder %s was removed, will force a rebuild", targetFile, builder.getBsn()); force = true; break; } // Account for this builder's target JAR targetJars.remove(targetFile); // Finally if any removed or changed files are in scope for the // bundle, we simply force rebuild if (!changedFiles.isEmpty()) { if (changedFiles.contains(builder.getPropertiesFile())) { log(LOG_FULL, "the properties file for builder %s was changes, will force a rebuild", builder.getBsn()); force = true; break; } else if (builder.isInScope(changedFiles)) { log(LOG_FULL, "some removed files were in scope for builder %s, will force a rebuild", builder.getBsn()); force = true; break; } } } // Delete any unaccounted-for Jars from target dir for (File jar : targetJars) { try { jar.delete(); } catch (Exception e) { Plugin.logError("Error deleting target JAR: " + jar, e); } } // Do it boolean builtAny = false; if (force) { builtAny = rebuild(true); } else if (!changedFiles.isEmpty()) { builtAny = rebuild(false); } return builtAny; } private Set<File> findJarsInTarget() throws Exception { File targetDir = model.getTarget(); File[] targetJars = targetDir.listFiles(new FileFilter() { public boolean accept(File pathname) { return pathname.getName().toLowerCase().endsWith(".jar"); } }); Set<File> result = new HashSet<File>(); if (targetJars != null) for (File jar : targetJars) { result.add(jar); } return result; } private static IPath calculateTargetDirPath(Project model) throws Exception { IPath basePath = Path.fromOSString(model.getBase().getAbsolutePath()); final IPath targetDirPath = Path.fromOSString(model.getTarget().getAbsolutePath()).makeRelativeTo(basePath); return targetDirPath; } private enum Action { build, delete }; /** * @param force * Whether to force bnd to build * @return Whether any files were built */ @SuppressWarnings("unchecked") private boolean rebuild(boolean force) throws Exception { clearBuildMarkers(); // Check if compilation errors exist, and if so check the project // settings for what to do about that... Action buildAction = Action.build; if (hasBlockingErrors()) { ScopedPreferenceStore store = new ScopedPreferenceStore(new ProjectScope(getProject()), Plugin.PLUGIN_ID); switch (CompileErrorAction.parse(store.getString(CompileErrorAction.PREFERENCE_KEY))) { case skip : addBuildMarker(String.format("Will not build OSGi bundle(s) for project %s until compilation problems are fixed.", model.getName()), IMarker.SEVERITY_ERROR); log(LOG_BASIC, "SKIPPING due to Java problem markers"); return false; case build : buildAction = Action.build; break; case delete : buildAction = Action.delete; break; } } else if (!classpathErrors.isEmpty()) { ScopedPreferenceStore store = new ScopedPreferenceStore(new ProjectScope(getProject()), Plugin.PLUGIN_ID); switch (CompileErrorAction.parse(store.getString(CompileErrorAction.PREFERENCE_KEY))) { case skip : addBuildMarker(String.format("Will not build OSGi bundle(s) for project %s until classpath resolution problems are fixed.", model.getName()), IMarker.SEVERITY_ERROR); log(LOG_BASIC, "SKIPPING due to classpath resolution problem markers"); return false; case build : buildAction = Action.build; break; case delete : buildAction = Action.delete; break; } } File[] built; // Validate List<IValidator> validators = loadValidators(); if (validators != null) { Collection< ? extends Builder> builders = model.getSubBuilders(); for (Builder builder : builders) { validate(builder, validators); } } // Clear errors & warnings before build model.clear(); // Load Eclipse classpath containers model.clearClasspath(); EclipseClasspathPreference classpathPref = EclipseClasspathPreference.parse(projectPrefs.getString(EclipseClasspathPreference.PREFERENCE_KEY)); if (classpathPref == EclipseClasspathPreference.expose) { List<File> classpathFiles = new ArrayList<File>(20); accumulateClasspath(classpathFiles, JavaCore.create(getProject()), false, new ClasspathContainerFilter()); for (File file : classpathFiles) { log(LOG_FULL, "Adding Eclipse classpath entry %s", file.getAbsolutePath()); model.addClasspath(file); } } if (buildAction == Action.build) { // Build! model.setTrace(true); boolean stale = model.isStale(); if (force || stale) { log(LOG_BASIC, "REBUILDING: force=%b; stale=%b", force, stale); built = model.buildLocal(false); if (built == null) built = new File[0]; // shouldn't happen but just in case } else { log(LOG_BASIC, "NOT REBUILDING: force=%b;stale=%b", force, stale); built = new File[0]; } // Notify the build listeners if (listeners != null && built.length > 0) { IPath[] paths = new IPath[built.length]; for (int i = 0; i < built.length; i++) paths[i] = Central.toPath(built[i]); listeners.fireBuiltBundles(getProject(), paths); } // Log rebuilt files log(LOG_BASIC, "%d files were rebuilt", built.length); if (logLevel >= LOG_FULL) { for (File builtFile : built) { log(LOG_FULL, "target file %s has an age of %d ms", builtFile, System.currentTimeMillis() - builtFile.lastModified()); } } } else { // Delete target files since the project has compile errors and the // delete action was selected. for (Builder builder : model.getSubBuilders()) { File targetFile = new File(model.getTarget(), builder.getBsn() + ".jar"); boolean deleted = targetFile.delete(); log(LOG_FULL, "deleted target file %s (%b)", targetFile, deleted); } built = new File[0]; } // Notify central that there are new bundles if (built.length > 0) Central.invalidateIndex(); // Make sure Eclipse knows about the changed files (should already have // been done?) IFolder targetFolder = getProject().getFolder(calculateTargetDirPath(model)); targetFolder.refreshLocal(IResource.DEPTH_INFINITE, null); // Report errors List<String> errors = new ArrayList<String>(model.getErrors()); List<String> warnings = new ArrayList<String>(model.getWarnings()); createBuildMarkers(errors, warnings); return built.length > 0; } static List<IValidator> loadValidators() { List<IValidator> validators = null; IConfigurationElement[] validatorElems = Platform.getExtensionRegistry().getConfigurationElementsFor(Plugin.PLUGIN_ID, "validators"); if (validatorElems != null && validatorElems.length > 0) { validators = new ArrayList<IValidator>(validatorElems.length); for (IConfigurationElement elem : validatorElems) { try { validators.add((IValidator) elem.createExecutableExtension("class")); } catch (Exception e) { Plugin.logError("Unable to instantiate validator: " + elem.getAttribute("name"), e); } } } return validators; } void validate(Builder builder, List<IValidator> validators) { for (IValidator validator : validators) { IStatus status = validator.validate(builder); if (!status.isOK()) validationResults.add(status); } } private IProject[] calculateDependsOn(Project model) throws Exception { Collection<Project> dependsOn = model.getDependson(); List<IProject> result = new ArrayList<IProject>(dependsOn.size() + 1); IProject cnfProject = WorkspaceUtils.findCnfProject(); if (cnfProject != null) result.add(cnfProject); IWorkspaceRoot wsroot = ResourcesPlugin.getWorkspace().getRoot(); for (Project project : dependsOn) { IProject targetProj = WorkspaceUtils.findOpenProject(wsroot, project); if (targetProj == null) Plugin.log(new Status(IStatus.WARNING, Plugin.PLUGIN_ID, 0, "No open project in workspace for Bnd '-dependson' dependency: " + project.getName(), null)); else result.add(targetProj); } log(LOG_FULL, "Calculated depends-on list: %s", result); return result.toArray(new IProject[result.size()]); } private void accumulateClasspath(List<File> files, IJavaProject project, boolean exports, Predicate<IClasspathContainer>... containerFilters) throws JavaModelException { if (exports) { IPath outputPath = project.getOutputLocation(); files.add(getFileForPath(outputPath)); } IClasspathEntry[] entries = project.getRawClasspath(); List<IClasspathEntry> queue = new ArrayList<IClasspathEntry>(entries.length); queue.addAll(Arrays.asList(entries)); while (!queue.isEmpty()) { IClasspathEntry entry = queue.remove(0); if (exports && !entry.isExported()) continue; IPath path = entry.getPath(); switch (entry.getEntryKind()) { case IClasspathEntry.CPE_LIBRARY : files.add(getFileForPath(path)); break; case IClasspathEntry.CPE_VARIABLE : IPath resolvedPath = JavaCore.getResolvedVariablePath(path); files.add(getFileForPath(resolvedPath)); break; case IClasspathEntry.CPE_SOURCE : IPath outputLocation = entry.getOutputLocation(); if (exports && outputLocation != null) files.add(getFileForPath(outputLocation)); break; case IClasspathEntry.CPE_CONTAINER : IClasspathContainer container = JavaCore.getClasspathContainer(path, project); boolean allow = true; for (Predicate<IClasspathContainer> filter : containerFilters) if (!filter.select(container)) allow = false; if (allow) queue.addAll(Arrays.asList(container.getClasspathEntries())); break; case IClasspathEntry.CPE_PROJECT : IProject targetProject = ResourcesPlugin.getWorkspace().getRoot().getProject(path.lastSegment()); IJavaProject targetJavaProject = JavaCore.create(targetProject); accumulateClasspath(files, targetJavaProject, true, containerFilters); break; } } } private static File getFileForPath(IPath path) { File file; IResource resource = ResourcesPlugin.getWorkspace().getRoot().findMember(path); if (resource != null && resource.exists()) file = resource.getLocation().toFile(); else file = path.toFile(); return file; } private boolean hasBlockingErrors() { try { if (containsError(getProject().findMarkers(IJavaModelMarker.JAVA_MODEL_PROBLEM_MARKER, true, IResource.DEPTH_INFINITE))) return true; return false; } catch (CoreException e) { Plugin.logError("Error looking for project problem markers", e); return false; } } private static boolean containsError(IMarker[] markers) { if (markers != null) for (IMarker marker : markers) { int severity = marker.getAttribute(IMarker.SEVERITY, IMarker.SEVERITY_INFO); if (severity == IMarker.SEVERITY_ERROR) return true; } return false; } private static boolean isChangeDelta(IResourceDelta delta) { if (IResourceDelta.MARKERS == delta.getFlags()) return false; if ((delta.getKind() & (IResourceDelta.ADDED | IResourceDelta.CHANGED | IResourceDelta.REMOVED)) == 0) return false; return true; } private void createBuildMarkers(Collection< ? extends String> errors, Collection< ? extends String> warnings) throws CoreException { for (String error : errors) { addBuildMarker(error, IMarker.SEVERITY_ERROR); } for (String warning : warnings) { addBuildMarker(warning, IMarker.SEVERITY_WARNING); } for (String error : classpathErrors) { addClasspathMarker(error, IMarker.SEVERITY_ERROR); } if (!validationResults.isOK()) { for (IStatus status : validationResults.getChildren()) { addClasspathMarker(status); } } } private void clearBuildMarkers() throws CoreException { IFile bndFile = getProject().getFile(Project.BNDFILE); if (bndFile.exists()) { bndFile.deleteMarkers(MARKER_BND_PROBLEM, true, IResource.DEPTH_INFINITE); } } private IResource getBuildMarkerTargetResource() { IProject project = getProject(); IResource bndFile = project.getFile(Project.BNDFILE); if (bndFile == null || !bndFile.exists()) return project; return bndFile; } private void addBuildMarker(String message, int severity) throws CoreException { IResource resource = getBuildMarkerTargetResource(); IMarker marker = resource.createMarker(MARKER_BND_PROBLEM); marker.setAttribute(IMarker.SEVERITY, severity); marker.setAttribute(IMarker.MESSAGE, message); // marker.setAttribute(IMarker.LINE_NUMBER, 1); } private void addClasspathMarker(String message, int severity) throws CoreException { IResource resource = getBuildMarkerTargetResource(); IMarker marker = resource.createMarker(BndContainerInitializer.MARKER_BND_CLASSPATH_PROBLEM); marker.setAttribute(IMarker.SEVERITY, severity); marker.setAttribute(IMarker.MESSAGE, message); // marker.setAttribute(IMarker.LINE_NUMBER, 1); } private void addClasspathMarker(IStatus status) throws CoreException { int severity; switch (status.getSeverity()) { case IStatus.CANCEL : case IStatus.ERROR : severity = IMarker.SEVERITY_ERROR; break; case IStatus.WARNING : severity = IMarker.SEVERITY_WARNING; break; default : severity = IMarker.SEVERITY_INFO; } addClasspathMarker(status.getMessage(), severity); } private void log(int level, String message, Object... args) { if (logLevel >= level) buildLog.add(String.format(message, args)); } }
package org.team3042.sweep.commands; /** * * @author NewUser */ public class DriveTrainTankDrive extends CommandBase { public DriveTrainTankDrive() { // Use requires() here to declare subsystem dependencies // eg. requires(chassis); requires(driveTrain); } // Called just before this Command runs the first time protected void initialize() { } // Called repeatedly when this Command is scheduled to run protected void execute() { //driveTrain.drive(-oi.stickLeft.getY(), -oi.stickRight.getY()); } // Make this return true when this Command no longer needs to run execute() protected boolean isFinished() { return false; } // Called once after isFinished returns true protected void end() { } // Called when another command which requires one or more of the same // subsystems is scheduled to run protected void interrupted() { } }
package thredds.catalog2; import thredds.catalog.ServiceType; import java.util.List; /** * _more_ * * @author edavis * @since 4.0 */ public interface Dataset extends MetadataContainer { public String getName(); public String getId(); public String getAlias(); public boolean isAccessible(); public List<Access> getAccesses(); public Access getAccess( ServiceType type ); public boolean isCollection(); public List<Dataset> getDatasets(); public Catalog getParentCatalog(); public Dataset getParent(); }