repo_name
stringlengths
6
101
path
stringlengths
4
300
text
stringlengths
7
1.31M
johnmwaller/spring-webflow
spring-binding/src/main/java/org/springframework/binding/message/DefaultMessageResolver.java
<filename>spring-binding/src/main/java/org/springframework/binding/message/DefaultMessageResolver.java /* * Copyright 2004-2008 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.binding.message; import java.util.Locale; import org.springframework.context.MessageSource; import org.springframework.context.MessageSourceResolvable; import org.springframework.core.style.ToStringCreator; public class DefaultMessageResolver implements MessageResolver, MessageSourceResolvable { private Object source; private String[] codes; private Severity severity; private Object[] args; private String defaultText; public DefaultMessageResolver(Object source, String[] codes, Severity severity, Object[] args, String defaultText) { this.source = source; this.codes = codes; this.severity = severity; this.args = args; this.defaultText = defaultText; } public Message resolveMessage(MessageSource messageSource, Locale locale) { return new Message(source, postProcessMessageText(messageSource.getMessage(this, locale)), severity); } /** * Subclasses may override to perform special post-processing of the returned message text; for example, running it * through an Expression evaluator. * @param text the resolved message text * @return the post processeed message text */ protected String postProcessMessageText(String text) { return text; } // implementing MessageSourceResolver public String[] getCodes() { return codes; } public Object[] getArguments() { return args; } public String getDefaultMessage() { return defaultText; } public String toString() { return new ToStringCreator(this).append("source", source).append("severity", severity).append("codes", codes) .append("args", args).append("defaultText", defaultText).toString(); } }
OndrejKotek/Resteasy
testsuite/integration-tests/src/test/java/org/jboss/resteasy/test/microprofile/restclient/resource/HealthService.java
package org.jboss.resteasy.test.microprofile.restclient.resource; import javax.ws.rs.WebApplicationException; import javax.ws.rs.core.MediaType; import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.Produces; import java.io.Closeable; public interface HealthService extends Closeable { @GET @Produces({ MediaType.APPLICATION_JSON}) @Path("/health") HealthCheckData getHealthData() throws WebApplicationException; }
gems-uff/tipmerge
src/br/uff/ic/gems/tipmerge/gui/JFrameFilesAnalysis.java
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package br.uff.ic.gems.tipmerge.gui; import br.uff.ic.gems.tipmerge.dao.CommitterDao; import br.uff.ic.gems.tipmerge.dao.EditedFilesDao; import br.uff.ic.gems.tipmerge.dao.MergeFilesDao; import br.uff.ic.gems.tipmerge.dao.RepositoryDao; import br.uff.ic.gems.tipmerge.model.Committer; import br.uff.ic.gems.tipmerge.model.EditedFile; import br.uff.ic.gems.tipmerge.model.MergeFiles; import br.uff.ic.gems.tipmerge.model.RepoFiles; import br.uff.ic.gems.tipmerge.model.Repository; import br.uff.ic.gems.tipmerge.util.Export; import br.uff.ic.gems.tipmerge.util.Statistics; import java.text.DecimalFormat; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import javax.swing.BorderFactory; import javax.swing.JComboBox; import javax.swing.JOptionPane; import javax.swing.JTable; import javax.swing.table.DefaultTableModel; import javax.swing.table.TableModel; import org.jfree.chart.ChartFactory; import org.jfree.chart.ChartPanel; import org.jfree.chart.JFreeChart; import org.jfree.chart.labels.StandardCategoryItemLabelGenerator; import org.jfree.chart.plot.CategoryPlot; import org.jfree.chart.plot.PlotOrientation; import org.jfree.chart.renderer.category.CategoryItemRenderer; import org.jfree.data.category.CategoryDataset; import org.jfree.data.category.DefaultCategoryDataset; import org.jfree.ui.RefineryUtilities; /** * This class is in charge of showing all results about the files analysis * * @author j2cf, Catarina */ public class JFrameFilesAnalysis extends javax.swing.JFrame { private final RepoFiles repoFiles; private MergeFiles mergeFiles; /** * Creates new form JFrameCommitsAnalysis * * @param repository */ public JFrameFilesAnalysis(Repository repository) { this.repoFiles = new RepoFiles(repository); if(repoFiles.getMergeFiles() == null || repoFiles.getMergeFiles().isEmpty()){ RepositoryDao rdao = new RepositoryDao(repository.getProject()); rdao.setDetails(repository); } initComponents(); setParameters(); } //1- Shows the project branches that the user can select to merge - 2- shows all the existing merges - 3 - and put the name of the project private void setParameters() { jcMerge1.setModel( new JComboBox( repoFiles.getRepository().getListOfMerges().toArray()) .getModel() ); jPanel1.setBorder( BorderFactory.createTitledBorder(BorderFactory.createTitledBorder(""), "Project " + repoFiles.getRepository().getName()) ); txRepositoryName.setText(repoFiles.getRepository().getName()); } /** * This method is called from within the constructor to initialize the form. * WARNING: Do NOT modify this code. The content of this method is always * regenerated by the Form Editor. */ @SuppressWarnings("unchecked") // <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents private void initComponents() { hash1 = new javax.swing.JLabel(); buttonGroup1 = new javax.swing.ButtonGroup(); jFrame1 = new javax.swing.JFrame(); jPanel1 = new javax.swing.JPanel(); btRun = new javax.swing.JButton(); jLSelecByExt = new javax.swing.JLabel(); comboFileExtension = new javax.swing.JComboBox(); jcMerge1 = new javax.swing.JComboBox(); jLabel4 = new javax.swing.JLabel(); jLabel1 = new javax.swing.JLabel(); labelRepository = new javax.swing.JLabel(); txRepositoryName = new javax.swing.JTextField(); btExport = new javax.swing.JButton(); jTabbedPane1 = new javax.swing.JTabbedPane(); jScrollPane5 = new javax.swing.JScrollPane(); jTable1 = new javax.swing.JTable(); jScrollPane6 = new javax.swing.JScrollPane(); jTable2 = new javax.swing.JTable(); jScrollPane7 = new javax.swing.JScrollPane(); jTable3 = new javax.swing.JTable(); jScrollPane8 = new javax.swing.JScrollPane(); jTable4 = new javax.swing.JTable(); jButtonDependencies = new javax.swing.JButton(); btnChart2 = new javax.swing.JButton(); btnChart1 = new javax.swing.JButton(); hash1.setText("<hash>"); jFrame1.setBounds(500, 150, 500, 500); javax.swing.GroupLayout jFrame1Layout = new javax.swing.GroupLayout(jFrame1.getContentPane()); jFrame1.getContentPane().setLayout(jFrame1Layout); jFrame1Layout.setHorizontalGroup( jFrame1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGap(0, 400, Short.MAX_VALUE) ); jFrame1Layout.setVerticalGroup( jFrame1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGap(0, 300, Short.MAX_VALUE) ); setDefaultCloseOperation(javax.swing.WindowConstants.DISPOSE_ON_CLOSE); setTitle("Files Analysis"); btRun.setText("Run"); btRun.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { btRunActionPerformed(evt); } }); jLSelecByExt.setText("File Extensions"); comboFileExtension.setModel(new javax.swing.DefaultComboBoxModel(new String[] { "All Files", ".java", ".c", ".html", ".js", ".py", ".php", ".rb", ".xml" })); jcMerge1.setToolTipText("Select a previous merge"); jcMerge1.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { jcMerge1ActionPerformed(evt); } }); jLabel4.setText("Select a Merge"); jLabel1.setIcon(new javax.swing.ImageIcon(getClass().getResource("/br/uff/ic/gems/tipmerge/icons/loading1.gif"))); // NOI18N jLabel1.setText("Loading ..."); jLabel1.setVisible(false); labelRepository.setText("Repository Name"); txRepositoryName.setEnabled(false); javax.swing.GroupLayout jPanel1Layout = new javax.swing.GroupLayout(jPanel1); jPanel1.setLayout(jPanel1Layout); jPanel1Layout.setHorizontalGroup( jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel1Layout.createSequentialGroup() .addContainerGap() .addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel1Layout.createSequentialGroup() .addGap(10, 10, 10) .addComponent(jLabel1) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(jLSelecByExt) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(comboFileExtension, javax.swing.GroupLayout.PREFERRED_SIZE, 129, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(btRun, javax.swing.GroupLayout.PREFERRED_SIZE, 88, javax.swing.GroupLayout.PREFERRED_SIZE)) .addGroup(jPanel1Layout.createSequentialGroup() .addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(labelRepository) .addComponent(jLabel4)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(jcMerge1, 0, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(txRepositoryName)))) .addContainerGap()) ); jPanel1Layout.setVerticalGroup( jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, jPanel1Layout.createSequentialGroup() .addContainerGap() .addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(labelRepository) .addComponent(txRepositoryName, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(jcMerge1, javax.swing.GroupLayout.PREFERRED_SIZE, 22, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(jLabel4)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(comboFileExtension, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(btRun) .addComponent(jLabel1) .addComponent(jLSelecByExt)) .addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) ); btExport.setText("Export"); btExport.setEnabled(false); btExport.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { btExportActionPerformed(evt); } }); jTable1.setModel(new javax.swing.table.DefaultTableModel( new Object [][] { {}, {}, {}, {} }, new String [] { } )); jScrollPane5.setViewportView(jTable1); jTabbedPane1.addTab("Branch1", jScrollPane5); jTable2.setModel(new javax.swing.table.DefaultTableModel( new Object [][] { {}, {}, {}, {} }, new String [] { } )); jScrollPane6.setViewportView(jTable2); jTabbedPane1.addTab("Branch2", jScrollPane6); jTable3.setModel(new javax.swing.table.DefaultTableModel( new Object [][] { {}, {}, {}, {} }, new String [] { } )); jScrollPane7.setViewportView(jTable3); jTabbedPane1.addTab("Both Branches", jScrollPane7); jTable4.setModel(new javax.swing.table.DefaultTableModel( new Object [][] { {}, {}, {}, {} }, new String [] { } )); jTable4.setAutoResizeMode(javax.swing.JTable.AUTO_RESIZE_OFF); jScrollPane8.setViewportView(jTable4); jTabbedPane1.addTab("Previous History", jScrollPane8); jButtonDependencies.setText("Get Dependencies"); jButtonDependencies.setEnabled(false); jButtonDependencies.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { jButtonDependenciesActionPerformed(evt); } }); btnChart2.setText("Chart2"); btnChart2.setEnabled(false); btnChart2.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { btnChart2ActionPerformed(evt); } }); btnChart1.setText("Chart1"); btnChart1.setEnabled(false); btnChart1.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { btnChart1ActionPerformed(evt); } }); javax.swing.GroupLayout layout = new javax.swing.GroupLayout(getContentPane()); getContentPane().setLayout(layout); layout.setHorizontalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(jTabbedPane1, javax.swing.GroupLayout.DEFAULT_SIZE, 625, Short.MAX_VALUE) .addComponent(jPanel1, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addGroup(layout.createSequentialGroup() .addContainerGap() .addComponent(btnChart1) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(btnChart2) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(jButtonDependencies) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(btExport) .addContainerGap()) ); layout.setVerticalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addComponent(jPanel1, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addGap(18, 18, 18) .addComponent(jTabbedPane1, javax.swing.GroupLayout.DEFAULT_SIZE, 312, Short.MAX_VALUE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(btExport) .addComponent(jButtonDependencies) .addComponent(btnChart2) .addComponent(btnChart1)) .addContainerGap()) ); pack(); }// </editor-fold>//GEN-END:initComponents private void btRunActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btRunActionPerformed Runnable r = () -> { jLabel1.setVisible(true); btRun.setEnabled(false); /** * At this time a merge will be created or - the selection of * branches - by selecting one merge from the history */ MergeFilesDao mergeFilesDao = new MergeFilesDao(); String hash = codHash(jcMerge1.getSelectedItem().toString()); MergeFiles mergeSelected = mergeFilesDao.getMerge(hash, repoFiles.getRepository().getProject()); /** * From now the merge already exists with parents and merge base, * next steps are: Set the files of that merge and committers that * changed that files. */ //System.out.println(mergeSelected.getHash() + "\n" + mergeSelected.getHashBase() + "\n" + Arrays.toString(mergeSelected.getParents())); EditedFilesDao filesDao = new EditedFilesDao(); mergeSelected.setFilesOnBranchOne(filesDao.getFiles(mergeSelected.getHashBase(), mergeSelected.getParents()[0], mergeSelected.getPath(), comboFileExtension.getSelectedItem().toString())); mergeSelected.setFilesOnBranchTwo(filesDao.getFiles(mergeSelected.getHashBase(), mergeSelected.getParents()[1], mergeSelected.getPath(), comboFileExtension.getSelectedItem().toString())); CommitterDao cmterDao = new CommitterDao(); List<EditedFile> files = new LinkedList<>(); for (EditedFile editedFile : mergeSelected.getFilesOnBranchOne()) { List<Committer> whoEdited = cmterDao.getWhoEditedFile(mergeSelected.getHashBase(), mergeSelected.getParents()[0], editedFile.getFileName(), mergeSelected.getPath()); if (whoEdited.size() > 0) { editedFile.setWhoEditTheFile(whoEdited); files.add(editedFile); } } mergeSelected.setFilesOnBranchOne(files); files = new LinkedList<>(); for (EditedFile editedFile : mergeSelected.getFilesOnBranchTwo()) { List<Committer> whoEdited = cmterDao.getWhoEditedFile(mergeSelected.getHashBase(), mergeSelected.getParents()[1], editedFile.getFileName(), mergeSelected.getPath()); if (whoEdited.size() > 0) { editedFile.setWhoEditTheFile(whoEdited); files.add(editedFile); } } mergeSelected.setFilesOnBranchTwo(files); files = new LinkedList<>(); for (EditedFile editedFile : mergeSelected.getFilesOnPreviousHistory()) { List<Committer> whoEdited = cmterDao.getWhoEditedFile(repoFiles.getRepository().getFirstCommit(), mergeSelected.getHashBase(), editedFile.getFileName(), mergeSelected.getPath()); if (whoEdited.size() > 0) { editedFile.setWhoEditTheFile(whoEdited); files.add(editedFile); } } mergeSelected.setFilesOnPreviousHistory(new HashSet<>(files)); //prints on the command line //showCommitters(mergeSelected); repoFiles.getMergeFiles().add(mergeSelected); this.setMergeFiles(mergeSelected); showResultsTable(this.getMergeFiles()); //showResultsTable(this.getMergeFiles(),true); //showResIntersection(mCommits.getCommittersBothBranches()); btExport.setEnabled(true); btnChart1.setEnabled(true); btnChart2.setEnabled(true); jButtonDependencies.setEnabled(true); jLabel1.setVisible(false); btRun.setEnabled(true); }; Thread t = new Thread(r); t.start(); }//GEN-LAST:event_btRunActionPerformed public void showResultsTable(MergeFiles merge) { //organizes the data in the table showResBranch1(merge, false); showResBranch2(merge, false); showResIntersection(merge.getFilesOnBothBranch()); showResPreviousHistory(merge, false); } public void showResultsTable(MergeFiles merge, Boolean showScoreZ) { //organizes the data in the table showResBranch1(merge, showScoreZ); showResBranch2(merge, showScoreZ); showResPreviousHistory(merge, showScoreZ); } private void btExportActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btExportActionPerformed Map<String, TableModel> sheet = new LinkedHashMap<>(); sheet.put("Branch1", jTable1.getModel()); sheet.put("Branch2", jTable2.getModel()); sheet.put("Both Branches", jTable3.getModel()); sheet.put("Previous History", jTable4.getModel()); Export.toExcel(sheet); JOptionPane.showMessageDialog(this, "File was sucessfully saved", null, JOptionPane.INFORMATION_MESSAGE); }//GEN-LAST:event_btExportActionPerformed private void jButtonDependenciesActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButtonDependenciesActionPerformed JFrameDependencies filesDependence = new JFrameDependencies(this.repoFiles.getRepository(), this.getMergeFiles()); filesDependence.setLocationRelativeTo(this); filesDependence.setVisible(true); }//GEN-LAST:event_jButtonDependenciesActionPerformed private void btnChart1ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnChart1ActionPerformed // Botão do Chart 1: newGraphic(this.getMergeFiles(), 1); }//GEN-LAST:event_btnChart1ActionPerformed private void btnChart2ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnChart2ActionPerformed //Botão do Chart 2: newGraphic(this.getMergeFiles(), 2); }//GEN-LAST:event_btnChart2ActionPerformed private void jcMerge1ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jcMerge1ActionPerformed // TODO add your handling code here: }//GEN-LAST:event_jcMerge1ActionPerformed // Variables declaration - do not modify//GEN-BEGIN:variables private javax.swing.JButton btExport; private javax.swing.JButton btRun; private javax.swing.JButton btnChart1; private javax.swing.JButton btnChart2; private javax.swing.ButtonGroup buttonGroup1; private javax.swing.JComboBox comboFileExtension; private javax.swing.JLabel hash1; private javax.swing.JButton jButtonDependencies; private javax.swing.JFrame jFrame1; private javax.swing.JLabel jLSelecByExt; private javax.swing.JLabel jLabel1; private javax.swing.JLabel jLabel4; private javax.swing.JPanel jPanel1; private javax.swing.JScrollPane jScrollPane5; private javax.swing.JScrollPane jScrollPane6; private javax.swing.JScrollPane jScrollPane7; private javax.swing.JScrollPane jScrollPane8; private javax.swing.JTabbedPane jTabbedPane1; private javax.swing.JTable jTable1; private javax.swing.JTable jTable2; private javax.swing.JTable jTable3; private javax.swing.JTable jTable4; private javax.swing.JComboBox jcMerge1; private javax.swing.JLabel labelRepository; private javax.swing.JTextField txRepositoryName; // End of variables declaration//GEN-END:variables /* private void showCommitters(MergeFiles mergeFiles) { System.out.println("Merge: " + mergeFiles.getHash()); System.out.println("Branch One"); mergeFiles.getFilesOnBranchOne().stream().forEach((file) -> { System.out.println(file.getFileName()); file.getWhoEditTheFile().stream().forEach((cmter) -> { System.out.println("\t" + cmter.toString()); }); }); System.out.println("Branch Two"); mergeFiles.getFilesOnBranchTwo().stream().forEach((file) -> { System.out.println(file.getFileName()); file.getWhoEditTheFile().stream().forEach((cmter) -> { System.out.println("\t" + cmter.toString()); }); }); mergeFiles.getCommittersOnMege().stream().forEach((cmt) -> { System.out.println(cmt.toString()); }); } */ //shows the number of commits by committers in each file on Branch 1 private void showResBranch1(MergeFiles mergeSelected, Boolean showScoreZ) { DefaultTableModel dftModel = new DefaultTableModel(new Object[]{"File name"}, 0); List<Committer> committers = mergeSelected.getCommittersOnBranchOne(); //Includes columns with the names of all developers (branches 1 and 2) committers.stream().forEach((committer) -> { dftModel.addColumn(committer.getName()); }); //dftModel.addRow(new Object[]{"BRANCH ONE"}); mergeSelected.getFilesOnBranchOne().stream().forEach((editedfile) -> { dftModel.addRow(getValueToRow(editedfile, committers, showScoreZ)); //dftModel.addRow(new Object[]{file.getFileName()}); }); jTable1.setModel(dftModel); } //shows the number of commits by committers in each file on Branch 2 private void showResBranch2(MergeFiles mergeSelected, Boolean showScoreZ) { DefaultTableModel dftModel = new DefaultTableModel(new Object[]{"File name"}, 0); List<Committer> committers = mergeSelected.getCommittersOnBranchTwo(); //Includes columns with the names of all developers (branches 1 and 2) committers.stream().forEach((committer) -> { dftModel.addColumn(committer.getName()); }); //dftModel.addRow(new Object[]{"BRANCH TWO"}); mergeSelected.getFilesOnBranchTwo().stream().forEach((file) -> { dftModel.addRow(getValueToRow(file, committers, showScoreZ)); }); jTable2.setModel(dftModel); //tbResultsBranch1.update(tbResultsBranch1.getGraphics()); } private void showResIntersection(Set<EditedFile> filesOnBothBranch) { DefaultTableModel dftModel = new DefaultTableModel(new Object[]{"File name"}, 0); filesOnBothBranch.stream().forEach((file) -> { dftModel.addRow(new String[]{file.getFileName(), ""}); }); jTable3.setModel(dftModel); } //shows the number of commits by committers in each file (changed on any branch) that was changed in the history before the branch private void showResPreviousHistory(MergeFiles mergeSelected, Boolean showScoreZ) { DefaultTableModel dftModel = new DefaultTableModel(new Object[]{"File name"}, 0); List<Committer> committers = mergeSelected.getCommittersOnPreviousHistory(); // Arrays.sort(committers); //Includes columns with the names of all developers (branches 1 and 2) committers.stream().forEach((committer) -> { dftModel.addColumn(committer.getName()); // System.out.println(committer.toString()); }); //dftModel.addRow(new Object[]{"PREVIOUS HISTORY"}); mergeSelected.getFilesOnPreviousHistory().stream().forEach((file) -> { if (file.getWhoEditTheFile().size() > 0) { dftModel.addRow(getValueToRow(file, committers, showScoreZ)); } }); jTable4.setModel(dftModel); } private String[] getValueToRow(EditedFile editedFile, List<Committer> committers, Boolean showScoreZ) { String fileName = editedFile.getFileName(); Integer[] values = new Integer[committers.size()]; editedFile.getWhoEditTheFile().stream().forEach((cmtrFile) -> { int index = 0; for (Committer cmter : committers) { if (cmtrFile.equals(cmter)) { values[index] = cmtrFile.getCommits(); break; } index++; } }); String[] result = getArrayResult(fileName, values, showScoreZ); return result; } /** * @return the mergeFiles */ public MergeFiles getMergeFiles() { return mergeFiles; } /** * @param mergeFiles the mergeFiles to set */ public void setMergeFiles(MergeFiles mergeFiles) { this.mergeFiles = mergeFiles; } private String[] getArrayResult(String fileName, Integer[] values, Boolean showScoreZ) { String[] result = new String[values.length + 1]; result[0] = fileName; for (int i = 1; i < result.length; i++) { result[i] = values[i - 1] == null ? (values[i - 1] = 0).toString() : values[i - 1].toString(); } if (showScoreZ) { List<Double> scores = Statistics.getMZScore(values); for (int i = 0; i < scores.size(); i++) { result[i + 1] = scores.get(i).toString(); } return result; } return result; } private String codHash(String hash) { String temp = hash; hash = ""; String temp2; boolean valid = true; for (int i = 0; i < temp.length(); i++) { temp2 = String.valueOf(temp.charAt(i)); if (temp2.equals(" ")) { valid = false; } else { if (valid == true) { hash = hash + temp2; } } } return hash; } private String limit(String fileName) { int length = fileName.length(); if (length > 30) { String[] parts = fileName.split("/"); // System.out.println(parts[parts.length - 1]); return parts[parts.length - 1]; } return fileName; } private CategoryDataset createBranch(int botao) { DefaultCategoryDataset dataBranch1 = new DefaultCategoryDataset(); JTable table12 = jTable1; if (jTable1.isShowing() || jTable2.isShowing()) { String fileName; if (jTable1.isShowing()) { table12 = jTable1; } else { if (jTable2.isShowing()) { table12 = jTable2; } } if (botao == 1) { double num; for (int i = 1; i < table12.getColumnCount(); i++) { for (int j = 0; j < table12.getRowCount(); j++) { num = Integer.parseInt((String) table12.getValueAt(j, i)); if (num != 0) { fileName = limit((String) table12.getValueAt(j, 0)); dataBranch1.addValue(num, fileName, table12.getColumnName(i)); } } } } } if (jTable4.isShowing()) { if (botao == 1) { double cont = 0; for (int i = 1; i < jTable4.getColumnCount(); i++) { for (int j = 0; j < jTable4.getRowCount(); j++) { if (Integer.parseInt((String) jTable4.getValueAt(j, i)) == 0) { } else { cont++; } } dataBranch1.addValue(cont, jTable4.getColumnName(i), "Number of Files"); cont = 0; } } } if (botao == 2) { String fileName = " "; for (int k = 0; k < jTable3.getRowCount(); k++) { for (EditedFile file : mergeFiles.getFilesOnBranchOne()) { for (Committer comitter : file.getWhoEditTheFile()) { if (file.getFileName().equals(jTable3.getValueAt(k, 0))) { if (comitter.getCommits() != 0) { fileName = limit(file.getFileName()); dataBranch1.addValue(comitter.getCommits(), comitter.getName(), "B1 " + fileName); } } } } } for (int k = 0; k < jTable3.getRowCount(); k++) { for (EditedFile file : mergeFiles.getFilesOnBranchTwo()) { for (Committer comitter : file.getWhoEditTheFile()) { if (file.getFileName().equals(jTable3.getValueAt(k, 0))) { if (comitter.getCommits() != 0) { fileName = limit(file.getFileName()); dataBranch1.addValue(comitter.getCommits(), comitter.getName(), "B2 " + fileName); } } } } } double num; for (int i = 0; i < jTable3.getRowCount(); i++) { for (int j = 0; j < jTable4.getRowCount(); j++) { if (jTable4.getValueAt(j, 0).equals(jTable3.getValueAt(i, 0))) { for (int k = 1; k < jTable4.getColumnCount(); k++) { num = Integer.parseInt((String) jTable4.getValueAt(j, k)); if (num != 0) { fileName = limit((String) jTable4.getValueAt(j, 0)); dataBranch1.addValue(num, jTable4.getColumnName(k), "H " + fileName); } } } } } } return dataBranch1; } public void newGraphic(MergeFiles merge, int botao) { jFrame1.setVisible(true); CategoryDataset cdsBranch1 = createBranch(botao); String title = ""; if (jTable1.isShowing()) { title = "Branch 1"; } if (jTable2.isShowing()) { title = "Branch 2"; } if (jTable3.isShowing()) { title = "Both Branches"; } if (jTable4.isShowing()) { title = "Previous History"; } JFreeChart graphic; if (botao == 1) { graphic = ChartFactory.createBarChart3D(title, "Names", "Commit", cdsBranch1, PlotOrientation.VERTICAL, true, true, true); } else { graphic = ChartFactory.createStackedBarChart("Modified Files in Both Branches", "History/Branch2/Branch1", "Commiters Name", cdsBranch1, PlotOrientation.HORIZONTAL, true, true, true); } CategoryPlot plot = graphic.getCategoryPlot(); CategoryItemRenderer itemRerender = plot.getRenderer(); itemRerender.setItemLabelGenerator(new StandardCategoryItemLabelGenerator("{2}", new DecimalFormat("0"))); itemRerender.setItemLabelsVisible(true); ChartPanel chartPanel = new ChartPanel(graphic); chartPanel.setPreferredSize(new java.awt.Dimension(590, 350)); jFrame1.setContentPane(chartPanel); jFrame1.pack(); RefineryUtilities.centerFrameOnScreen(jFrame1); jFrame1.setVisible(true); } }
foreverzmy/zent
packages/zent/src/utils/isFirefox.js
<filename>packages/zent/src/utils/isFirefox.js // https://stackoverflow.com/questions/9847580/how-to-detect-safari-chrome-ie-firefox-and-opera-browser const isFirefox = typeof InstallTrigger !== 'undefined'; export default isFirefox;
tugayipek1/Java_Hexagonal_Architecture_FormElite
domain/src/main/java/com/phexum/formHandler/domain/account/usecase/AccountGetByIdUseCase.java
<reponame>tugayipek1/Java_Hexagonal_Architecture_FormElite package com.phexum.formHandler.domain.account.usecase; import com.phexum.formHandler.domain.account.model.AccountModel; import com.phexum.formHandler.domain.common.usecase.UseCase; public class AccountGetByIdUseCase implements UseCase { private long id; public AccountGetByIdUseCase() { } public AccountGetByIdUseCase(long id) { this.id = id; } public long getId() { return id; } }
CoprHD/sds-controller
internalLibraries/security/src/main/java/com/emc/storageos/security/keystore/impl/CoordinatorConfigStoringHelper.java
<filename>internalLibraries/security/src/main/java/com/emc/storageos/security/keystore/impl/CoordinatorConfigStoringHelper.java<gh_stars>10-100 /* * Copyright (c) 2014 EMC Corporation * All Rights Reserved */ package com.emc.storageos.security.keystore.impl; import java.io.IOException; import java.util.HashMap; import java.util.List; import java.util.Map; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.util.CollectionUtils; import com.emc.storageos.coordinator.client.service.CoordinatorClient; import com.emc.storageos.coordinator.common.Configuration; import com.emc.storageos.coordinator.common.impl.ConfigurationImpl; import com.emc.storageos.security.SerializerUtils; import org.apache.curator.framework.recipes.locks.InterProcessLock; /** * Helper class for storing and retrieving configurations in coordinator. * This class handles the use of InterProcessLock such that all updates on * the coordinator configuration are synced accross the cluseter. Also, * InterProcessLocks are being reused according to the lock name given, * which makes it possible to acquire the same lock several times on a single * thread. */ public class CoordinatorConfigStoringHelper { private static Logger log = LoggerFactory.getLogger(CoordinatorConfigStoringHelper.class); private CoordinatorClient coordinator; private final Map<String, InterProcessLock> nameLockMap; public CoordinatorConfigStoringHelper() { nameLockMap = new HashMap<String, InterProcessLock>(); } public CoordinatorConfigStoringHelper(CoordinatorClient coordinator) { this.coordinator = coordinator; nameLockMap = new HashMap<String, InterProcessLock>(); } /** * * Creates or updates a new entry of the specified type in coordinator. Config is * in ZK global aread /config * * @param objToPersist * the object to store in coordinator * @param lockName * the name of the lock to use while storing this object * If passed as Null, lock is assumed to be already owned * @param configKInd * @param configId * @param ConfigKey * @throws Exception */ public void createOrUpdateConfig(Object objToPersist, String lockName, String configKind, String configId, String configKey) throws Exception { createOrUpdateConfig(objToPersist, lockName, null, configKind, configId, configKey); } /** * * Creates or updates a new entry of the specified type in coordinator. If siteId * is not null, the config is in zk site specific area. Otherwise in global area * * @param objToPersist * the object to store in coordinator * @param lockName * the name of the lock to use while storing this object * If passed as Null, lock is assumed to be already owned * @param configKInd * @param configId * @param ConfigKey * @throws Exception */ public void createOrUpdateConfig(Object objToPersist, String lockName, String siteId, String configKInd, String configId, String ConfigKey) throws Exception { InterProcessLock lock = acquireLock(lockName); try { if (lock != null) { Configuration config = coordinator.queryConfiguration(siteId, configKInd, configId); ConfigurationImpl configImpl = null; if (config == null) { configImpl = new ConfigurationImpl(); configImpl.setId(configId); configImpl.setKind(configKInd); log.debug("Creating new config"); } else { configImpl = (ConfigurationImpl) config; if (config.getKind() == null) { ((ConfigurationImpl) config).setKind(configKInd); } if (config.getId() == null) { ((ConfigurationImpl) config).setId(configId); } log.debug("Updating existing config"); } configImpl.setConfig(ConfigKey, SerializerUtils.serializeAsBase64EncodedString(objToPersist)); coordinator.persistServiceConfiguration(siteId, configImpl); log.debug("Updated config successfully"); } } finally { releaseLock(lock); } } /** * Reads object of the specified kind from coordinator and deserializes it. Config is * in ZK global aread /config * * @param configKind * @param configId * @param ConfigKey * @return the retrieved object or null if not found * @throws ClassNotFoundException * @throws IOException */ public <T> T readConfig(String configKind, String configId, String ConfigKey) throws IOException, ClassNotFoundException { return readConfig(null, configKind, configId, ConfigKey); } /** * Reads object of the specified kind from coordinator and deserializes it. If siteId * is not null, the config is in zk site specific area. Otherwise in global area * * @param siteId * @param configKind * @param configId * @param ConfigKey * @return the retrieved object or null if not found * @throws ClassNotFoundException * @throws IOException */ public <T> T readConfig(String siteId, String configKind, String configId, String ConfigKey) throws IOException, ClassNotFoundException { Configuration config = coordinator.queryConfiguration(siteId, configKind, configId); if (config == null || config.getConfig(ConfigKey) == null) { log.debug("Config of kind " + configKind + " and id " + configId + "not found"); return null; } String serializedConfig = config.getConfig(ConfigKey); @SuppressWarnings("unchecked") T retObj = (T) SerializerUtils.deserialize(serializedConfig); return retObj; } /** * Acquires an interprocess lock * * @param lockName * the lock to acquire * @return the acquired lock * @throws Exception * if failed to acquire the lock */ public synchronized InterProcessLock acquireLock(String lockName) throws Exception { InterProcessLock lock = nameLockMap.get(lockName); if (lock == null) { lock = coordinator.getSiteLocalLock(lockName); nameLockMap.put(lockName, lock); } lock.acquire(); log.info("Acquired the lock {}", lockName); return lock; } /** * release the specified lock * * @param lock * the lock to release */ public void releaseLock(InterProcessLock lock) { try { if (lock != null) { log.info("Releasing the lock {}", lock.toString()); lock.release(); log.info("Released the lock {}", lock.toString()); } } catch (Exception e) { log.error("Could not release lock"); } } /** * Removes the specified config from coordinator. Config is in global area * * @param lockName * the name of the lock to use while removing this object * @param configKInd * @param configId * @throws Exception */ public void removeConfig(String lockName, String configKind, String configId) throws Exception { removeConfig(lockName, null, configKind, configId); } /** * Removes the specified config from coordinator. If siteId is not null, config * is in global area. Otherwise it is in site specific area * * @param lockName * the name of the lock to use while removing this object * @param configKInd * @param configId * @throws Exception */ public void removeConfig(String lockName, String siteId, String configKInd, String configId) throws Exception { InterProcessLock lock = acquireLock(lockName); try { Configuration config = coordinator.queryConfiguration(siteId, configKInd, configId); if (config != null) { coordinator.removeServiceConfiguration(siteId, config); log.debug("removed config successfully"); } else { log.debug("config " + configId + " of kind " + configKInd + " was not removed since it could not be found"); } } finally { releaseLock(lock); } } /** * * Removes all configs with the specified kind from coordinator * * @param lockName * the name of the lock to use while removing this object * @param configKInd * @param configId * @param ConfigKey * @throws Exception */ public void removeAllConfigOfKInd(String lockName, String configKInd) throws Exception { InterProcessLock lock = acquireLock(lockName); try { List<Configuration> configs = coordinator.queryAllConfiguration(configKInd); if (!CollectionUtils.isEmpty(configs)) { for (Configuration configuration : configs) { coordinator.removeServiceConfiguration(configuration); } } else { log.debug("configs of kind " + configKInd + " were not removed since none were found"); } } finally { releaseLock(lock); } } /** * Reads all objects of the specified kind from coordinator * * @param configKind * @param configId * @return the retrieved objects list or null if not found * @throws ClassNotFoundException * @throws IOException */ public <T> Map<String, T> readAllConfigs(String configKind, String configKey) throws IOException, ClassNotFoundException { List<Configuration> configsList = coordinator.queryAllConfiguration(configKind); Map<String, T> returnedObjects = new HashMap<String, T>(); if (CollectionUtils.isEmpty(configsList)) { log.debug("No config of kind " + configKind + " found"); return returnedObjects; } for (Configuration config : configsList) { String serializedConfig = config.getConfig(configKey); if (serializedConfig != null) { @SuppressWarnings("unchecked") T deserialize = (T) SerializerUtils.deserialize(serializedConfig); returnedObjects.put(config.getId(), deserialize); } } return returnedObjects; } public String getSiteId() { return coordinator.getSiteId(); } /** * @param coordinator the coordinator to set */ public void setCoordinator(CoordinatorClient coordinator) { this.coordinator = coordinator; } }
jelitox/navigator-api
navigator/modules/session/middlewares.py
<filename>navigator/modules/session/middlewares.py from typing import Any, Callable, Dict, List, Optional #import ujson as json import json # TODO: using rapidjson from aiohttp import web from aiohttp.web import middleware from navigator.middlewares import check_path # TODO: Middleware Class to avoid repeat check_path($0) @middleware async def django_session(request, handler): id = None if not check_path(request.path): return await handler(request) try: id = request.headers.get("sessionid", None) except Exception as e: print(e) id = request.headers.get("X-Sessionid", None) if id is not None: session = None try: # first: clear session session = request.app["django_session"] await session.logout() # clear existing session if not await session.decode(key=id): message = { "code": 403, "message": "Invalid Session", "reason": "Unknown Session ID", } return web.json_response({"error": message}, status=403) except Exception as err: print("Error Decoding Session: {}, {}".format(err, err.__class__)) return await handler(request) try: request["user_id"] = session["user_id"] request["session"] = session except Exception as err: # TODO: response to an auth error message = { "code": 403, "message": "Invalid Session or Authentication Error", "reason": str(err), } return web.json_response({"error": message}, status=403) finally: return await handler(request) else: # TODO: authorization return await handler(request)
devkral/spkbspider
spkcspider/apps/spider/serializing.py
__all__ = [ "paginate_stream", "serialize_stream", "serialize_content", "serialize_component", "list_features" ] from rdflib import RDF, XSD, Literal, URIRef from django.core.paginator import InvalidPage, Paginator from django.db.models import Q, prefetch_related_objects from django.http import Http404 from spkcspider.constants import VariantType, spkcgraph from spkcspider.utils.fields import add_property from .conf import get_anchor_domain # TODO replace by proper tree search (connect by/recursive query) def references_q(ids, limit, prefix=""): ids = set(ids) q = Q() for i in range(0, limit+1): q |= Q(**{"{}{}id__in".format(prefix, "referenced_by__"*i): ids}) return q def list_features(graph, entity, ref_entity, context): from .models import UserComponent, ContentVariant if not ref_entity: return if isinstance(entity, UserComponent): active_features = entity.features.all() else: active_features = ContentVariant.objects.filter( Q(feature_for_contents=entity) | Q(feature_for_components=entity.usercomponent) ) add_property( graph, "features", ref=ref_entity, literal=active_features.values_list("name", flat=True), datatype=XSD.string, iterate=True ) for feature in active_features: if context["scope"] != "export": for name, url_feature in feature.feature_urls: ref_feature = URIRef("{}{}".format( context["hostpart"], url_feature )) graph.add(( ref_entity, spkcgraph["action:feature"], ref_feature )) graph.add(( ref_feature, spkcgraph["feature:name"], Literal(name, datatype=XSD.string) )) def serialize_content(graph, content, context, embed=False): if VariantType.anchor in content.ctype.ctype: url_content = "{}{}".format( get_anchor_domain(), content.get_absolute_url() ) else: url_content = "{}{}".format( context["hostpart"], content.get_absolute_url() ) ref_content = URIRef(url_content) # is already node in graph if (ref_content, spkcgraph["type"], None) in graph: return ref_content if ( context.get("ac_namespace", None) and context["sourceref"] != ref_content ): graph.add(( context["sourceref"], context["ac_namespace"], ref_content )) add_property( graph, "name", ref=ref_content, ob=content, datatype=XSD.string ) add_property( graph, "description", ref=ref_content, ob=content, datatype=XSD.string ) add_property( graph, "info", ref=ref_content, ob=content, datatype=XSD.string ) add_property( graph, "id", ref=ref_content, literal=content.id, datatype=XSD.integer ) add_property( graph, "priority", ref=ref_content, ob=content ) if ( VariantType.component_feature in content.ctype.ctype or VariantType.content_feature in content.ctype.ctype ): graph.add(( ref_content, RDF["type"], spkcgraph["spkc:Feature"] )) else: graph.add(( ref_content, RDF["type"], spkcgraph["spkc:Content"] )) # always add type info to content (which content type) graph.add(( ref_content, spkcgraph["type"], Literal(content.ctype.name, datatype=XSD.string) )) if context["scope"] == "export": add_property( graph, "attached_to_content", ref=ref_content, ob=content ) add_property( graph, "features", ref=ref_content, literal=content.features.exclude( Q(name="DomainMode") | Q(name="DefaultActions") ).values_list("name", flat=True), datatype=XSD.string, iterate=True ) if embed: list_features(graph, content, ref_content, context) content.content.serialize(graph, ref_content, context) return ref_content def serialize_component(graph, component, context, visible=True): # visible: everything is visible elsewise only public ref_component = URIRef("{}{}".format( context["hostpart"], component.get_absolute_url() )) if component.public: visible = True if not visible and ref_component != context["sourceref"]: return None graph.set(( ref_component, spkcgraph["type"], Literal("Component", datatype=XSD.string) )) graph.add(( ref_component, RDF["type"], spkcgraph["spkc:Component"] )) if component.primary_anchor: url_content = "{}{}".format( context["hostpart"], component.primary_anchor.get_absolute_url() ) add_property( graph, "primary_anchor", ref=ref_component, literal=url_content, datatype=XSD.anyURI ) if component.public or context["scope"] == "export": add_property( graph, "user", ref=ref_component, literal=component.username ) add_property( graph, "name", ref=ref_component, literal=component.__str__() ) add_property( graph, "description", ref=ref_component, ob=component ) if context["scope"] == "export": add_property( graph, "required_passes", ref=ref_component, ob=component ) add_property( graph, "token_duration", ref=ref_component, ob=component ) graph.add(( ref_component, spkcgraph["strength"], Literal(component.strength) )) add_property( graph, "features", ref=ref_component, literal=component.features.exclude( name="DomainMode" ).values_list("name", flat=True), datatype=XSD.string, iterate=True ) if ( context.get("uc_namespace", None) and context["sourceref"] != ref_component ): graph.add(( context["sourceref"], context["uc_namespace"], ref_component )) return ref_component def paginate_stream(query, page_size, limit_depth): # WARNING: if AssignedContent queryset is empty # no usercomponent can be retrieved # so don't use AssignedContent queryset if serializing an # empty usercomponent from .models import AssignedContent if query.model == AssignedContent: query = AssignedContent.objects.filter( references_q( query.values_list("id", flat=True), limit_depth ) ) query = query.distinct().order_by("usercomponent__id", "id") else: query = query.order_by("id") return Paginator( query, page_size, orphans=0, allow_empty_first_page=False ) def serialize_stream( graph, paginators, context, page=1, embed=False, restrict_inclusion=True, restrict_embed=False ): # restrict_inclusion: only public components of contents are included # restrict_embed: only contents with no restrictions are embedded from .models import UserComponent if not isinstance(paginators, (tuple, list)): paginators = [paginators] assert isinstance(page, int) if page <= 1: num_pages = max(map(lambda p: p.num_pages, paginators)) per_page = sum(map(lambda p: p.per_page, paginators)) graph.add(( context["sourceref"], spkcgraph["pages.num_pages"], Literal(num_pages, datatype=XSD.positiveInteger) )) graph.add(( context["sourceref"], spkcgraph["pages.size_page"], Literal(per_page, datatype=XSD.positiveInteger) )) graph.add(( context["sourceref"], spkcgraph["pages.current_page"], Literal(page, datatype=XSD.positiveInteger) )) invalid_pages = 0 for paginator in paginators: try: page_view = paginator.get_page(page) # for mysql object_list = list(page_view.object_list) # error if page is out of bound if page > paginator.num_pages: raise InvalidPage() except InvalidPage: invalid_pages += 1 continue if paginator.object_list.model == UserComponent: if embed: prefetch_related_objects( object_list, "contents", "contents__ctype", "contents__datacontent" ) for component in object_list: ref_component = serialize_component( graph, component, context ) list_features(graph, component, ref_component, context) else: # either start with invalid usercomponent which will be replaced # or use bottom-1 usercomponent to detect split if page <= 1: usercomponent = None ref_component = None else: _pos = page_view.start_index() - 1 usercomponent = paginator.object_list[_pos] ref_component = URIRef("{}{}".format( context["hostpart"], usercomponent.get_absolute_url() )) prefetch_related_objects( object_list, "ctype", "datacontent" ) for content in object_list: if usercomponent != content.usercomponent: usercomponent = content.usercomponent ref_component = serialize_component( graph, usercomponent, context, visible=not restrict_inclusion ) list_features(graph, usercomponent, ref_component, context) _embed = embed if restrict_embed and content.usercomponent.strength != 0: _embed = False ref_content = serialize_content( graph, content, context, embed=_embed ) if ref_component: graph.add(( ref_component, spkcgraph["contents"], ref_content )) if invalid_pages == len(paginators): raise Http404('Invalid page (%(page_number)s)' % { 'page_number': page })
mfeindt0705/pynetmf
lesson5/jinja_filter_1.py
#!/user/bin/env python3 """ simple exercise for jinja rendering """ from __future__ import unicode_literals, print_function from jinja2 import FileSystemLoader, StrictUndefined from jinja2.environment import Environment env = Environment(undefined=StrictUndefined) env.loader = FileSystemLoader(".") intf_vars = {} template_file = "filter_template_2.j2" template = env.get_template(template_file) output = template.render(**intf_vars) print(output)
vtHawk/streets-gl
src/js/renderer/Texture3D.js
export default class Texture3D { constructor(renderer, params) { this.gl = renderer.gl; this.url = params.url; this.depth = params.depth; this.imageHeight = params.imageHeight; this.minFilter = params.minFilter || 'LINEAR_MIPMAP_LINEAR'; this.magFilter = params.magFilter || 'LINEAR'; this.wrap = params.wrap || 'repeat'; this.width = params.width || 1; this.height = params.height || 1; this.depth = params.depth || 1; this.format = params.format || 'RED'; this.internalFormat = params.internalFormat || 'R8'; this.type = params.type || 'UNSIGNED_BYTE'; this.data = params.data || null; this.WebGLTexture = this.gl.createTexture(); } write(data) { this.data = data; this.gl.bindTexture(this.gl.TEXTURE_3D, this.WebGLTexture); this.gl.texParameteri(this.gl.TEXTURE_3D, this.gl.TEXTURE_BASE_LEVEL, 0); this.gl.texParameteri(this.gl.TEXTURE_3D, this.gl.TEXTURE_MIN_FILTER, this.gl[this.minFilter]); this.gl.texParameteri(this.gl.TEXTURE_3D, this.gl.TEXTURE_MAG_FILTER, this.gl[this.magFilter]); this.gl.texImage3D(this.gl.TEXTURE_3D, 0, this.gl[this.internalFormat], this.width, this.height, this.depth, 0, this.gl[this.format], this.gl[this.type], this.data); this.gl.generateMipmap(this.gl.TEXTURE_3D); this.updateWrapping(); } updateWrapping() { let value = null; switch(this.wrap) { case 'repeat': value = this.gl.REPEAT; break; case 'clamp': value = this.gl.CLAMP_TO_EDGE; break; } if(value) { this.gl.texParameteri(this.gl.TEXTURE_3D, this.gl.TEXTURE_WRAP_S, value); this.gl.texParameteri(this.gl.TEXTURE_3D, this.gl.TEXTURE_WRAP_T, value); } } }
Khirion/nau
nau/src/nau/render/opengl/glProgram.cpp
<filename>nau/src/nau/render/opengl/glProgram.cpp #include "nau/render/opengl/glProgram.h" #include "nau.h" #include "nau/config.h" #include "nau/slogger.h" #include "nau/geometry/vertexData.h" #include "nau/material/uniformBlockManager.h" #include "nau/render/iRenderer.h" #include "nau/system/file.h" #include "nau/system/textutil.h" //#include <GL/glew.h> using namespace nau::render; using namespace nau::system; // STATIC METHOD GLenum GLProgram::ShaderGLId[IProgram::SHADER_COUNT] = {GL_VERTEX_SHADER, GL_GEOMETRY_SHADER, GL_TESS_CONTROL_SHADER, GL_TESS_EVALUATION_SHADER, GL_FRAGMENT_SHADER, GL_COMPUTE_SHADER, GL_TASK_SHADER_NV, GL_MESH_SHADER_NV }; // CONSTRUCTORS GLProgram::GLProgram() : m_NumUniforms (0), m_MaxLength (0), m_PLinked (false), m_ShowGlobalUniforms (false), m_Name("default") { m_P = glCreateProgram(); m_Shaders.resize(SHADER_COUNT); } GLProgram::~GLProgram() { if (m_P != 0) glDeleteProgram(m_P); for (int i = 0; i < SHADER_COUNT; ++i) { if (m_Shaders[i].id != 0) glDeleteShader(m_Shaders[i].id); } } bool GLProgram::areCompiled() { bool res = true; for (int i = 0; i < SHADER_COUNT; ++i) { if (m_Shaders[i].id != 0) res = res && m_Shaders[i].compiled; } return(res); } bool GLProgram::isCompiled(IProgram::ShaderType type) { bool res = true; if (m_Shaders[type].id != 0) res = res && m_Shaders[type].compiled; return res; } bool GLProgram::isLinked() { return(m_PLinked); } void GLProgram::getAttributeNames(std::vector<std::string>* s) { int k; GLsizei len, siz; GLenum typ; char name[256]; glGetProgramiv(m_P, GL_ACTIVE_ATTRIBUTES, &k); for (unsigned int i = 0; i < (unsigned int)k; ++i) { glGetActiveAttrib(m_P, i, 256, &len, &siz, &typ, name); s->push_back(name); } } void GLProgram::setName(const std::string &name) { m_Name = name; } const std::string & GLProgram::getName() { return(m_Name); } bool GLProgram::loadShader (IProgram::ShaderType type, const std::vector<std::string> &files) { if (!isShaderSupported(type)) return false; if (type == TESS_CONTROL_SHADER || type == TESS_EVALUATION_SHADER) m_HasTessShader = true; if (true == setShaderFiles(type,files)) { m_Shaders[type].compiled = compileShader(type); return m_Shaders[type].compiled; } else return false; } const std::vector<std::string> & GLProgram::getShaderFiles(ShaderType type) { return m_Shaders[type].files; } void GLProgram::files2source(IProgram::ShaderType type) { // loop on filenames to create string array std::string source; m_Shaders[type].source = (char **)malloc(sizeof (char*) * m_Shaders[type].files.size()); for (int i = 0; i < m_Shaders[type].files.size(); ++i) { if (i > 0) source = "#line 1 " + std::to_string(i) + "\n"; source += nau::system::File::TextRead(m_Shaders[type].files[i]); m_Shaders[type].source[i] = (char *)malloc(sizeof(char) * source.size() + 2); memcpy(m_Shaders[type].source[i], source.c_str(), source.size()); char *aux = m_Shaders[type].source[i]; aux[source.size()] = '\n'; aux[source.size()+1] = '\0'; } } bool GLProgram::setShaderFiles (IProgram::ShaderType type, const std::vector<std::string> &files) { if (!isShaderSupported(type)) return false; // MUST SPLIT FILENAMES - separator: comma m_Shaders[type].files = files; // check if files exist for (int i = 0; i < m_Shaders[type].files.size(); ++i) if (!nau::system::File::Exists(m_Shaders[type].files[i])) return false; // reset shader if (files.size() == 0 && m_Shaders[type].id != 0) { glDetachShader(m_P, (GLuint)ShaderGLId[type]); glDeleteShader((GLuint)ShaderGLId[type]); m_Shaders[type].id = 0; m_Shaders[type].attached = false; m_Shaders[type].compiled = false; for (int i = 0; i < m_Shaders[type].files.size(); ++i) { free(m_Shaders[type].source[i]); } free(m_Shaders[type].source); m_Shaders[type].source = NULL; m_Shaders[type].files.clear(); return true; } // if first time if (m_Shaders[type].id == 0) { m_Shaders[type].id = glCreateShader(ShaderGLId[type]); } // init shader variables m_Shaders[type].attached = false; m_Shaders[type].compiled = false; m_PLinked = false; files2source(type); // set shader source glShaderSource (m_Shaders[type].id, (GLsizei)m_Shaders[type].files.size(), m_Shaders[type].source, NULL); return true; } bool GLProgram::setValueOfUniform (const std::string &name, void *values) { int i; i = findUniform (name); if (-1 == i) { return false; } m_Uniforms[i].setValues (values); setValueOfUniform(i); return true; } bool GLProgram::setValueOfUniform(int loc, void *values) { int i = findUniformByLocation(loc); if (-1 == i) { return false; } m_Uniforms[i].setValues(values); setValueOfUniform(i); return true; } int GLProgram::getUniformLocation(std::string name) { int i = findUniform(name); if (-1 == i) return -1; else return m_Uniforms[i].getLoc(); } bool GLProgram::reloadShaderFile (IProgram::ShaderType type) { if (!isShaderSupported(type)) return false; m_Shaders[type].compiled = false; m_PLinked = false; for (int i = 0; i < m_Shaders[type].files.size(); ++i) { free(m_Shaders[type].source[i]); } free(m_Shaders[type].source); m_Shaders[type].source = NULL; files2source(type); glShaderSource(m_Shaders[type].id, (GLsizei)m_Shaders[type].files.size(), m_Shaders[type].source, NULL); return true; } bool GLProgram::reload (void) { for (int i = 0; i < SHADER_COUNT; ++i) { reloadShaderFile((IProgram::ShaderType)i); m_Shaders[i].compiled = compileShader((IProgram::ShaderType)i); } if (areCompiled()) { m_PLinked = linkProgram(); } return m_PLinked; } int GLProgram::programValidate() { int v; glGetProgramiv(m_P,GL_VALIDATE_STATUS,&v); return v; } bool GLProgram::compileShader (IProgram::ShaderType type) { int r; if (m_Shaders[type].id != 0) { glCompileShader (m_Shaders[type].id); glGetShaderiv (m_Shaders[type].id, GL_COMPILE_STATUS, &r); m_Shaders[type].compiled = (1 == r); m_PLinked = false; return (m_Shaders[type].compiled); } else return true; } bool GLProgram::linkProgram() { int r; if (!areCompiled()) { return false; } unsigned int index; for (index = 0; index < VertexData::MaxAttribs; index++) { glBindAttribLocation(m_P, index , VertexData::Syntax[index].c_str()); } for (int i = 0; i < SHADER_COUNT; ++i) { if (m_Shaders[i].id != 0 && m_Shaders[i].attached == false) { glAttachShader(m_P, m_Shaders[i].id); m_Shaders[i].attached = true; } } glLinkProgram (m_P); glUseProgram (m_P); glGetProgramiv (m_P, GL_LINK_STATUS, &r); m_PLinked = (1 == r); glGetProgramiv (m_P, GL_ACTIVE_UNIFORMS, &m_NumUniforms); glGetProgramiv (m_P, GL_ACTIVE_UNIFORM_MAX_LENGTH, &m_MaxLength); setUniforms(); setBlocks(); glUseProgram(0); return (m_PLinked); } int GLProgram::getNumberOfUniforms() { if (true == m_PLinked) { return (m_NumUniforms); } else { return (-1); } } int GLProgram::getAttributeLocation (const std::string &name) { return glGetAttribLocation (m_P, name.c_str()); } void GLProgram::useProgram (void) { if (m_Shaders[FRAGMENT_SHADER].id == 0 && m_Shaders[COMPUTE_SHADER].id == 0) glEnable(GL_RASTERIZER_DISCARD); else glDisable(GL_RASTERIZER_DISCARD); if (true == m_PLinked) { glUseProgram (m_P); } else { glUseProgram (0); } } unsigned int GLProgram::getProgramID() { return m_P; } void GLProgram::showGlobalUniforms (void) { m_ShowGlobalUniforms = !m_ShowGlobalUniforms; } bool GLProgram::prepare (void) { // glUseProgram(m_P); // return true; if (m_PLinked) { useProgram(); return true; } else return false; } void GLProgram::prepareBlocks(void) { UniformBlockManager *blockMan = UNIFORMBLOCKMANAGER; IUniformBlock *block; std::string blockName; for (auto b : m_Blocks) { blockName = b.first; block = blockMan->getBlock(blockName); block->useBlock(); } } bool GLProgram::restore (void) { glUseProgram (0); return true; } int GLProgram::findUniform (const std::string &name) { GLUniform uni; int i = 0; bool found (false); std::vector<GLUniform>::iterator it; for (it = m_Uniforms.begin(); it != m_Uniforms.end() && !found; it++) { //uni = *it; if ((*it).getName() == name) { found = true; } else { i++; } } if (true == found) { return (i); } else { return (-1); } } int GLProgram::findUniformByLocation(int loc) { GLUniform uni; int i = 0; bool found(false); std::vector<GLUniform>::iterator it; for (it = m_Uniforms.begin(); it != m_Uniforms.end() && !found; it++) { //uni = *it; if ((*it).getLoc() == loc) { found = true; } else { i++; } } if (true == found) { return (i); } else { return (-1); } } const GLUniform& GLProgram::getUniform(const std::string &name) { int i = findUniform (name); if (-1 == i) { i = 0; } return (m_Uniforms[i]); } void GLProgram::getUniformBlockNames(std::vector<std::string>* s) { for (auto b : m_Blocks) { s->push_back(b.first); } } const IUniform& GLProgram::getIUniform(int i) { assert((unsigned int) i < m_Uniforms.size()); return (m_Uniforms[i]); } void GLProgram::setValueOfUniform (int i) { m_Uniforms[i].setValueInProgram(); } void GLProgram::setBlocks() { int count, dataSize, actualLen, activeUnif, maxUniLength; int uniType, uniSize, uniOffset, uniMatStride, uniArrayStride, auxSize; char name[256], name2[256]; unsigned int indices[256]; IUniformBlock *block; UniformBlockManager *blockMan = UNIFORMBLOCKMANAGER; glGetProgramiv(m_P, GL_ACTIVE_UNIFORM_BLOCKS, &count); for (int i = 0; i < count; ++i) { // Get buffers name glGetActiveUniformBlockiv(m_P, i, GL_UNIFORM_BLOCK_NAME_LENGTH, &actualLen); //name = (char *)malloc(sizeof(char) * actualLen); glGetActiveUniformBlockName(m_P, i, actualLen, NULL, name); glGetActiveUniformBlockiv(m_P, i, GL_UNIFORM_BLOCK_DATA_SIZE, &dataSize); bool newBlock = true; std::string sName = name; if (blockMan->hasBlock(sName)) { newBlock = false; block = blockMan->getBlock(sName); if (block->getSize() != dataSize) NAU_THROW("Block %s is already defined with a different size", name); } // /*if (!spBlocks.count(name))*/ { // // Get buffers size // //block = spBlocks[name]; // // //printf("DataSize:%d\n", dataSize); if (newBlock) { blockMan->addBlock(sName, dataSize); block = blockMan->getBlock(sName); block->setBindingIndex(blockMan->getCurrentBindingIndex()); IBuffer *b = block->getBuffer(); b->bind((unsigned int)GL_UNIFORM_BUFFER); glBufferData(GL_UNIFORM_BUFFER, dataSize, NULL, GL_DYNAMIC_DRAW); glUniformBlockBinding(m_P, i, blockMan->getCurrentBindingIndex()); glBindBufferRange(GL_UNIFORM_BUFFER, blockMan->getCurrentBindingIndex(), block->getBuffer()->getPropi(IBuffer::ID), 0, dataSize); } else { block = blockMan->getBlock(sName); IBuffer *b = block->getBuffer(); b->bind((unsigned int)GL_UNIFORM_BUFFER); glUniformBlockBinding(m_P, i, block->getBindingIndex()); glBindBufferRange(GL_UNIFORM_BUFFER, block->getBindingIndex(), block->getBuffer()->getPropi(IBuffer::ID), 0, dataSize); } m_Blocks[name] = i; glGetActiveUniformBlockiv(m_P, i, GL_UNIFORM_BLOCK_ACTIVE_UNIFORMS, &activeUnif); //indices = (unsigned int *)malloc(sizeof(unsigned int) * activeUnif); glGetActiveUniformBlockiv(m_P, i, GL_UNIFORM_BLOCK_ACTIVE_UNIFORM_INDICES, (int *)indices); glGetProgramiv(m_P, GL_ACTIVE_UNIFORM_MAX_LENGTH, &maxUniLength); //name2 = (char *)malloc(sizeof(char) * maxUniLength); for (int k = 0; k < activeUnif; ++k) { glGetActiveUniformName(m_P, indices[k], maxUniLength, &actualLen, name2); glGetActiveUniformsiv(m_P, 1, &indices[k], GL_UNIFORM_TYPE, &uniType); glGetActiveUniformsiv(m_P, 1, &indices[k], GL_UNIFORM_SIZE, &uniSize); glGetActiveUniformsiv(m_P, 1, &indices[k], GL_UNIFORM_OFFSET, &uniOffset); glGetActiveUniformsiv(m_P, 1, &indices[k], GL_UNIFORM_MATRIX_STRIDE, &uniMatStride); glGetActiveUniformsiv(m_P, 1, &indices[k], GL_UNIFORM_ARRAY_STRIDE, &uniArrayStride); if (uniArrayStride > 0) auxSize = uniArrayStride * uniSize; else if (uniMatStride > 0) { switch (uniType) { case (int)GL_FLOAT_MAT2: case (int)GL_FLOAT_MAT2x3: case (int)GL_FLOAT_MAT2x4: case (int)GL_DOUBLE_MAT2: case (int)GL_DOUBLE_MAT2x3: case (int)GL_DOUBLE_MAT2x4: auxSize = 2 * uniMatStride; break; case (int)GL_FLOAT_MAT3: case (int)GL_FLOAT_MAT3x2: case (int)GL_FLOAT_MAT3x4: case (int)GL_DOUBLE_MAT3: case (int)GL_DOUBLE_MAT3x2: case (int)GL_DOUBLE_MAT3x4: auxSize = 3 * uniMatStride; break; case (int)GL_FLOAT_MAT4: case (int)GL_FLOAT_MAT4x2: case (int)GL_FLOAT_MAT4x3: case (int)GL_DOUBLE_MAT4: case (int)GL_DOUBLE_MAT4x2: case (int)GL_DOUBLE_MAT4x3: auxSize = 4 * uniMatStride; break; } } else auxSize = Enums::getSize(GLUniform::spSimpleType[(GLenum)uniType]) * uniSize;; std::string uniName = name2; block->addUniform(uniName, GLUniform::spSimpleType[(GLenum)uniType], uniOffset, auxSize, uniArrayStride); } } } void GLProgram::setUniforms() { int i,index,len,size; unsigned int type; char *name = new char [m_MaxLength + 1]; GLUniform uni; // set all types = NOT_USED std::vector<GLUniform>::iterator it; for(it = m_Uniforms.begin(); it != m_Uniforms.end(); it++) { it->setGLType(GLUniform::NOT_USED, 0); } // add new uniforms and reset types for previous uniforms for (i = 0; i < m_NumUniforms; i++) { glGetActiveUniform (m_P, i, m_MaxLength, &len, &size, (GLenum *)&type, name); int loc = glGetUniformLocation(m_P, name); if (loc != -1) { std::string n(name); index = findUniform (n); if (-1 != index) { m_Uniforms[index].setGLType(type,size); m_Uniforms[index].setLoc (loc); } else { uni.reset(); std::string ProgName (name); uni.setName (ProgName); uni.setGLType(type,size); uni.setLoc (loc); m_Uniforms.push_back (uni); } } } // delete all uniforms where type is NOT_USED for(it = m_Uniforms.begin(), i = 0; it != m_Uniforms.end(); i++ ) { if (it->getGLType() == GLUniform::NOT_USED) { it = m_Uniforms.erase(it); } else { ++it; } } m_NumUniforms = (int)m_Uniforms.size(); delete name; } void GLProgram::updateUniforms() { for (int i = 0; i < m_NumUniforms; i++) { glGetUniformfv (m_P, m_Uniforms[i].getLoc(), (float *)m_Uniforms[i].getValues()); } } const GLUniform& GLProgram::getUniform (int i) { if (i < m_NumUniforms) { return (m_Uniforms[i]); } else { return (m_Uniforms[0]); } } std::string GLProgram::getShaderInfoLog(ShaderType type) { // GLuint shader; int infologLength = 0; int charsWritten = 0; std::string res; char *infoLog; if (m_Shaders[type].id == 0) return ""; res = IProgram::ShaderNames[type] + ": OK"; glGetShaderiv (m_Shaders[type].id, GL_INFO_LOG_LENGTH, &infologLength); if (infologLength > 1) { infoLog = new char[infologLength]; glGetShaderInfoLog (m_Shaders[type].id, infologLength, &charsWritten, infoLog); res.assign(infoLog); delete infoLog; } return (res); } const std::string & GLProgram::getProgramInfoLog() { int infologLength = 0; int charsWritten = 0; glGetProgramiv (m_P, GL_INFO_LOG_LENGTH, &infologLength); if (infologLength > 1) { char *infoLog = new char[infologLength]; glGetProgramInfoLog (m_P, infologLength, &charsWritten, infoLog); m_ReturnString = infoLog; delete infoLog; } else { m_ReturnString = "Program: OK"; } return(m_ReturnString); } int GLProgram::getNumberOfUserUniforms() { int count = 0; if (true == m_PLinked) { for (int i = 0; i < m_NumUniforms; i++) { if (m_Uniforms[i].getName().substr(0,3) != "gl_" ) { count++; } } } return (count); } bool GLProgram::getPropertyb(int query) { int res; glGetProgramiv(m_P, (GLenum)query, &res); return (res != 0); } int GLProgram::getPropertyi(int query) { int res; glGetProgramiv(m_P, (GLenum)query, &res); return (res); }
diegows/wanproxy
network/uinet/sys/contrib/octeon-sdk/cvmx-usb.h
/***********************license start*************** * Copyright (c) 2003-2010 <NAME> (<EMAIL>). All rights * reserved. * * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above * copyright notice, this list of conditions and the following * disclaimer in the documentation and/or other materials provided * with the distribution. * * Neither the name of Cavium Networks nor the names of * its contributors may be used to endorse or promote products * derived from this software without specific prior written * permission. * This Software, including technical data, may be subject to U.S. export control * laws, including the U.S. Export Administration Act and its associated * regulations, and may be subject to export or import regulations in other * countries. * TO THE MAXIMUM EXTENT PERMITTED BY LAW, THE SOFTWARE IS PROVIDED "AS IS" * AND WITH ALL FAULTS AND CAVIUM NETWORKS MAKES NO PROMISES, REPRESENTATIONS OR * WARRANTIES, EITHER EXPRESS, IMPLIED, STATUTORY, OR OTHERWISE, WITH RESPECT TO * THE SOFTWARE, INCLUDING ITS CONDITION, ITS CONFORMITY TO ANY REPRESENTATION OR * DESCRIPTION, OR THE EXISTENCE OF ANY LATENT OR PATENT DEFECTS, AND CAVIUM * SPECIFICALLY DISCLAIMS ALL IMPLIED (IF ANY) WARRANTIES OF TITLE, * MERCHANTABILITY, NONINFRINGEMENT, FITNESS FOR A PARTICULAR PURPOSE, LACK OF * VIRUSES, ACCURACY OR COMPLETENESS, QUIET ENJOYMENT, QUIET POSSESSION OR * CORRESPONDENCE TO DESCRIPTION. THE ENTIRE RISK ARISING OUT OF USE OR * PERFORMANCE OF THE SOFTWARE LIES WITH YOU. ***********************license end**************************************/ /** * @file * * "cvmx-usb.h" defines a set of low level USB functions to help * developers create Octeon USB drivers for various operating * systems. These functions provide a generic API to the Octeon * USB blocks, hiding the internal hardware specific * operations. * * At a high level the device driver needs to: * * -# Call cvmx_usb_get_num_ports() to get the number of * supported ports. * -# Call cvmx_usb_initialize() for each Octeon USB port. * -# Enable the port using cvmx_usb_enable(). * -# Either periodically, or in an interrupt handler, call * cvmx_usb_poll() to service USB events. * -# Manage pipes using cvmx_usb_open_pipe() and * cvmx_usb_close_pipe(). * -# Manage transfers using cvmx_usb_submit_*() and * cvmx_usb_cancel*(). * -# Shutdown USB on unload using cvmx_usb_shutdown(). * * To monitor USB status changes, the device driver must use * cvmx_usb_register_callback() to register for events that it * is interested in. Below are a few hints on successfully * implementing a driver on top of this API. * * <h2>Initialization</h2> * * When a driver is first loaded, it is normally not necessary * to bring up the USB port completely. Most operating systems * expect to initialize and enable the port in two independent * steps. Normally an operating system will probe hardware, * initialize anything found, and then enable the hardware. * * In the probe phase you should: * -# Use cvmx_usb_get_num_ports() to determine the number of * USB port to be supported. * -# Allocate space for a cvmx_usb_state_t structure for each * port. * -# Tell the operating system about each port * * In the initialization phase you should: * -# Use cvmx_usb_initialize() on each port. * -# Do not call cvmx_usb_enable(). This leaves the USB port in * the disabled state until the operating system is ready. * * Finally, in the enable phase you should: * -# Call cvmx_usb_enable() on the appropriate port. * -# Note that some operating system use a RESET instead of an * enable call. To implement RESET, you should call * cvmx_usb_disable() followed by cvmx_usb_enable(). * * <h2>Locking</h2> * * All of the functions in the cvmx-usb API assume exclusive * access to the USB hardware and internal data structures. This * means that the driver must provide locking as necessary. * * In the single CPU state it is normally enough to disable * interrupts before every call to cvmx_usb*() and enable them * again after the call is complete. Keep in mind that it is * very common for the callback handlers to make additional * calls into cvmx-usb, so the disable/enable must be protected * against recursion. As an example, the Linux kernel * local_irq_save() and local_irq_restore() are perfect for this * in the non SMP case. * * In the SMP case, locking is more complicated. For SMP you not * only need to disable interrupts on the local core, but also * take a lock to make sure that another core cannot call * cvmx-usb. * * <h2>Port callback</h2> * * The port callback prototype needs to look as follows: * * void port_callback(cvmx_usb_state_t *usb, * cvmx_usb_callback_t reason, * cvmx_usb_complete_t status, * int pipe_handle, * int submit_handle, * int bytes_transferred, * void *user_data); * - @b usb is the cvmx_usb_state_t for the port. * - @b reason will always be * CVMX_USB_CALLBACK_PORT_CHANGED. * - @b status will always be CVMX_USB_COMPLETE_SUCCESS. * - @b pipe_handle will always be -1. * - @b submit_handle will always be -1. * - @b bytes_transferred will always be 0. * - @b user_data is the void pointer originally passed along * with the callback. Use this for any state information you * need. * * The port callback will be called whenever the user plugs / * unplugs a device from the port. It will not be called when a * device is plugged / unplugged from a hub connected to the * root port. Normally all the callback needs to do is tell the * operating system to poll the root hub for status. Under * Linux, this is performed by calling usb_hcd_poll_rh_status(). * In the Linux driver we use @b user_data. to pass around the * Linux "hcd" structure. Once the port callback completes, * Linux automatically calls octeon_usb_hub_status_data() which * uses cvmx_usb_get_status() to determine the root port status. * * <h2>Complete callback</h2> * * The completion callback prototype needs to look as follows: * * void complete_callback(cvmx_usb_state_t *usb, * cvmx_usb_callback_t reason, * cvmx_usb_complete_t status, * int pipe_handle, * int submit_handle, * int bytes_transferred, * void *user_data); * - @b usb is the cvmx_usb_state_t for the port. * - @b reason will always be * CVMX_USB_CALLBACK_TRANSFER_COMPLETE. * - @b status will be one of the cvmx_usb_complete_t * enumerations. * - @b pipe_handle is the handle to the pipe the transaction * was originally submitted on. * - @b submit_handle is the handle returned by the original * cvmx_usb_submit_* call. * - @b bytes_transferred is the number of bytes successfully * transferred in the transaction. This will be zero on most * error conditions. * - @b user_data is the void pointer originally passed along * with the callback. Use this for any state information you * need. For example, the Linux "urb" is stored in here in the * Linux driver. * * In general your callback handler should use @b status and @b * bytes_transferred to tell the operating system the how the * transaction completed. Normally the pipe is not changed in * this callback. * * <h2>Canceling transactions</h2> * * When a transaction is cancelled using cvmx_usb_cancel*(), the * actual length of time until the complete callback is called * can vary greatly. It may be called before cvmx_usb_cancel*() * returns, or it may be called a number of usb frames in the * future once the hardware frees the transaction. In either of * these cases, the complete handler will receive * CVMX_USB_COMPLETE_CANCEL. * * <h2>Handling pipes</h2> * * USB "pipes" is a software construct created by this API to * enable the ordering of usb transactions to a device endpoint. * Octeon's underlying hardware doesn't have any concept * equivalent to "pipes". The hardware instead has eight * channels that can be used simultaneously to have up to eight * transaction in process at the same time. In order to maintain * ordering in a pipe, the transactions for a pipe will only be * active in one hardware channel at a time. From an API user's * perspective, this doesn't matter but it can be helpful to * keep this in mind when you are probing hardware while * debugging. * * Also keep in mind that usb transactions contain state * information about the previous transaction to the same * endpoint. Each transaction has a PID toggle that changes 0/1 * between each sub packet. This is maintained in the pipe data * structures. For this reason, you generally cannot create and * destroy a pipe for every transaction. A sequence of * transaction to the same endpoint must use the same pipe. * * <h2>Root Hub</h2> * * Some operating systems view the usb root port as a normal usb * hub. These systems attempt to control the root hub with * messages similar to the usb 2.0 spec for hub control and * status. For these systems it may be necessary to write * function to decode standard usb control messages into * equivalent cvmx-usb API calls. As an example, the following * code is used under Linux for some of the basic hub control * messages. * * @code * static int octeon_usb_hub_control(struct usb_hcd *hcd, u16 typeReq, u16 wValue, u16 wIndex, char *buf, u16 wLength) * { * cvmx_usb_state_t *usb = (cvmx_usb_state_t *)hcd->hcd_priv; * cvmx_usb_port_status_t usb_port_status; * int port_status; * struct usb_hub_descriptor *desc; * unsigned long flags; * * switch (typeReq) * { * case ClearHubFeature: * DEBUG_ROOT_HUB("OcteonUSB: ClearHubFeature\n"); * switch (wValue) * { * case C_HUB_LOCAL_POWER: * case C_HUB_OVER_CURRENT: * // Nothing required here * break; * default: * return -EINVAL; * } * break; * case ClearPortFeature: * DEBUG_ROOT_HUB("OcteonUSB: ClearPortFeature"); * if (wIndex != 1) * { * DEBUG_ROOT_HUB(" INVALID\n"); * return -EINVAL; * } * * switch (wValue) * { * case USB_PORT_FEAT_ENABLE: * DEBUG_ROOT_HUB(" ENABLE"); * local_irq_save(flags); * cvmx_usb_disable(usb); * local_irq_restore(flags); * break; * case USB_PORT_FEAT_SUSPEND: * DEBUG_ROOT_HUB(" SUSPEND"); * // Not supported on Octeon * break; * case USB_PORT_FEAT_POWER: * DEBUG_ROOT_HUB(" POWER"); * // Not supported on Octeon * break; * case USB_PORT_FEAT_INDICATOR: * DEBUG_ROOT_HUB(" INDICATOR"); * // Port inidicator not supported * break; * case USB_PORT_FEAT_C_CONNECTION: * DEBUG_ROOT_HUB(" C_CONNECTION"); * // Clears drivers internal connect status change flag * cvmx_usb_set_status(usb, cvmx_usb_get_status(usb)); * break; * case USB_PORT_FEAT_C_RESET: * DEBUG_ROOT_HUB(" C_RESET"); * // Clears the driver's internal Port Reset Change flag * cvmx_usb_set_status(usb, cvmx_usb_get_status(usb)); * break; * case USB_PORT_FEAT_C_ENABLE: * DEBUG_ROOT_HUB(" C_ENABLE"); * // Clears the driver's internal Port Enable/Disable Change flag * cvmx_usb_set_status(usb, cvmx_usb_get_status(usb)); * break; * case USB_PORT_FEAT_C_SUSPEND: * DEBUG_ROOT_HUB(" C_SUSPEND"); * // Clears the driver's internal Port Suspend Change flag, * which is set when resume signaling on the host port is * complete * break; * case USB_PORT_FEAT_C_OVER_CURRENT: * DEBUG_ROOT_HUB(" C_OVER_CURRENT"); * // Clears the driver's overcurrent Change flag * cvmx_usb_set_status(usb, cvmx_usb_get_status(usb)); * break; * default: * DEBUG_ROOT_HUB(" UNKNOWN\n"); * return -EINVAL; * } * DEBUG_ROOT_HUB("\n"); * break; * case GetHubDescriptor: * DEBUG_ROOT_HUB("OcteonUSB: GetHubDescriptor\n"); * desc = (struct usb_hub_descriptor *)buf; * desc->bDescLength = 9; * desc->bDescriptorType = 0x29; * desc->bNbrPorts = 1; * desc->wHubCharacteristics = 0x08; * desc->bPwrOn2PwrGood = 1; * desc->bHubContrCurrent = 0; * desc->bitmap[0] = 0; * desc->bitmap[1] = 0xff; * break; * case GetHubStatus: * DEBUG_ROOT_HUB("OcteonUSB: GetHubStatus\n"); * *(__le32 *)buf = 0; * break; * case GetPortStatus: * DEBUG_ROOT_HUB("OcteonUSB: GetPortStatus"); * if (wIndex != 1) * { * DEBUG_ROOT_HUB(" INVALID\n"); * return -EINVAL; * } * * usb_port_status = cvmx_usb_get_status(usb); * port_status = 0; * * if (usb_port_status.connect_change) * { * port_status |= (1 << USB_PORT_FEAT_C_CONNECTION); * DEBUG_ROOT_HUB(" C_CONNECTION"); * } * * if (usb_port_status.port_enabled) * { * port_status |= (1 << USB_PORT_FEAT_C_ENABLE); * DEBUG_ROOT_HUB(" C_ENABLE"); * } * * if (usb_port_status.connected) * { * port_status |= (1 << USB_PORT_FEAT_CONNECTION); * DEBUG_ROOT_HUB(" CONNECTION"); * } * * if (usb_port_status.port_enabled) * { * port_status |= (1 << USB_PORT_FEAT_ENABLE); * DEBUG_ROOT_HUB(" ENABLE"); * } * * if (usb_port_status.port_over_current) * { * port_status |= (1 << USB_PORT_FEAT_OVER_CURRENT); * DEBUG_ROOT_HUB(" OVER_CURRENT"); * } * * if (usb_port_status.port_powered) * { * port_status |= (1 << USB_PORT_FEAT_POWER); * DEBUG_ROOT_HUB(" POWER"); * } * * if (usb_port_status.port_speed == CVMX_USB_SPEED_HIGH) * { * port_status |= (1 << USB_PORT_FEAT_HIGHSPEED); * DEBUG_ROOT_HUB(" HIGHSPEED"); * } * else if (usb_port_status.port_speed == CVMX_USB_SPEED_LOW) * { * port_status |= (1 << USB_PORT_FEAT_LOWSPEED); * DEBUG_ROOT_HUB(" LOWSPEED"); * } * * *((__le32 *)buf) = cpu_to_le32(port_status); * DEBUG_ROOT_HUB("\n"); * break; * case SetHubFeature: * DEBUG_ROOT_HUB("OcteonUSB: SetHubFeature\n"); * // No HUB features supported * break; * case SetPortFeature: * DEBUG_ROOT_HUB("OcteonUSB: SetPortFeature"); * if (wIndex != 1) * { * DEBUG_ROOT_HUB(" INVALID\n"); * return -EINVAL; * } * * switch (wValue) * { * case USB_PORT_FEAT_SUSPEND: * DEBUG_ROOT_HUB(" SUSPEND\n"); * return -EINVAL; * case USB_PORT_FEAT_POWER: * DEBUG_ROOT_HUB(" POWER\n"); * return -EINVAL; * case USB_PORT_FEAT_RESET: * DEBUG_ROOT_HUB(" RESET\n"); * local_irq_save(flags); * cvmx_usb_disable(usb); * if (cvmx_usb_enable(usb)) * DEBUG_ERROR("Failed to enable the port\n"); * local_irq_restore(flags); * return 0; * case USB_PORT_FEAT_INDICATOR: * DEBUG_ROOT_HUB(" INDICATOR\n"); * // Not supported * break; * default: * DEBUG_ROOT_HUB(" UNKNOWN\n"); * return -EINVAL; * } * break; * default: * DEBUG_ROOT_HUB("OcteonUSB: Unknown root hub request\n"); * return -EINVAL; * } * return 0; * } * @endcode * * <h2>Interrupts</h2> * * If you plan on using usb interrupts, cvmx_usb_poll() must be * called on every usb interrupt. It will read the usb state, * call any needed callbacks, and schedule transactions as * needed. Your device driver needs only to hookup an interrupt * handler and call cvmx_usb_poll(). Octeon's usb port 0 causes * CIU bit CIU_INT*_SUM0[USB] to be set (bit 56). For port 1, * CIU bit CIU_INT_SUM1[USB1] is set (bit 17). How these bits * are turned into interrupt numbers is operating system * specific. For Linux, there are the convenient defines * OCTEON_IRQ_USB0 and OCTEON_IRQ_USB1 for the IRQ numbers. * * If you aren't using interrupts, simple call cvmx_usb_poll() * in your main processing loop. * * <hr>$Revision: 32636 $<hr> */ #ifndef __CVMX_USB_H__ #define __CVMX_USB_H__ #ifdef __cplusplus extern "C" { #endif /** * Enumerations representing the status of function calls. */ typedef enum { CVMX_USB_SUCCESS = 0, /**< There were no errors */ CVMX_USB_INVALID_PARAM = -1, /**< A parameter to the function was invalid */ CVMX_USB_NO_MEMORY = -2, /**< Insufficient resources were available for the request */ CVMX_USB_BUSY = -3, /**< The resource is busy and cannot service the request */ CVMX_USB_TIMEOUT = -4, /**< Waiting for an action timed out */ CVMX_USB_INCORRECT_MODE = -5, /**< The function call doesn't work in the current USB mode. This happens when host only functions are called in device mode or vice versa */ } cvmx_usb_status_t; /** * Enumerations representing the possible USB device speeds */ typedef enum { CVMX_USB_SPEED_HIGH = 0, /**< Device is operation at 480Mbps */ CVMX_USB_SPEED_FULL = 1, /**< Device is operation at 12Mbps */ CVMX_USB_SPEED_LOW = 2, /**< Device is operation at 1.5Mbps */ } cvmx_usb_speed_t; /** * Enumeration representing the possible USB transfer types. */ typedef enum { CVMX_USB_TRANSFER_CONTROL = 0, /**< USB transfer type control for hub and status transfers */ CVMX_USB_TRANSFER_ISOCHRONOUS = 1, /**< USB transfer type isochronous for low priority periodic transfers */ CVMX_USB_TRANSFER_BULK = 2, /**< USB transfer type bulk for large low priority transfers */ CVMX_USB_TRANSFER_INTERRUPT = 3, /**< USB transfer type interrupt for high priority periodic transfers */ } cvmx_usb_transfer_t; /** * Enumeration of the transfer directions */ typedef enum { CVMX_USB_DIRECTION_OUT, /**< Data is transferring from Octeon to the device/host */ CVMX_USB_DIRECTION_IN, /**< Data is transferring from the device/host to Octeon */ } cvmx_usb_direction_t; /** * Enumeration of all possible status codes passed to callback * functions. */ typedef enum { CVMX_USB_COMPLETE_SUCCESS, /**< The transaction / operation finished without any errors */ CVMX_USB_COMPLETE_SHORT, /**< FIXME: This is currently not implemented */ CVMX_USB_COMPLETE_CANCEL, /**< The transaction was canceled while in flight by a user call to cvmx_usb_cancel* */ CVMX_USB_COMPLETE_ERROR, /**< The transaction aborted with an unexpected error status */ CVMX_USB_COMPLETE_STALL, /**< The transaction received a USB STALL response from the device */ CVMX_USB_COMPLETE_XACTERR, /**< The transaction failed with an error from the device even after a number of retries */ CVMX_USB_COMPLETE_DATATGLERR, /**< The transaction failed with a data toggle error even after a number of retries */ CVMX_USB_COMPLETE_BABBLEERR, /**< The transaction failed with a babble error */ CVMX_USB_COMPLETE_FRAMEERR, /**< The transaction failed with a frame error even after a number of retries */ } cvmx_usb_complete_t; /** * Structure returned containing the USB port status information. */ typedef struct { uint32_t reserved : 25; uint32_t port_enabled : 1; /**< 1 = Usb port is enabled, 0 = disabled */ uint32_t port_over_current : 1; /**< 1 = Over current detected, 0 = Over current not detected. Octeon doesn't support over current detection */ uint32_t port_powered : 1; /**< 1 = Port power is being supplied to the device, 0 = power is off. Octeon doesn't support turning port power off */ cvmx_usb_speed_t port_speed : 2; /**< Current port speed */ uint32_t connected : 1; /**< 1 = A device is connected to the port, 0 = No device is connected */ uint32_t connect_change : 1; /**< 1 = Device connected state changed since the last set status call */ } cvmx_usb_port_status_t; /** * This is the structure of a Control packet header */ typedef union { uint64_t u64; struct { uint64_t request_type : 8; /**< Bit 7 tells the direction: 1=IN, 0=OUT */ uint64_t request : 8; /**< The standard usb request to make */ uint64_t value : 16; /**< Value parameter for the request in little endian format */ uint64_t index : 16; /**< Index for the request in little endian format */ uint64_t length : 16; /**< Length of the data associated with this request in little endian format */ } s; } cvmx_usb_control_header_t; /** * Descriptor for Isochronous packets */ typedef struct { int offset; /**< This is the offset in bytes into the main buffer where this data is stored */ int length; /**< This is the length in bytes of the data */ cvmx_usb_complete_t status; /**< This is the status of this individual packet transfer */ } cvmx_usb_iso_packet_t; /** * Possible callback reasons for the USB API. */ typedef enum { CVMX_USB_CALLBACK_TRANSFER_COMPLETE, /**< A callback of this type is called when a submitted transfer completes. The completion callback will be called even if the transfer fails or is canceled. The status parameter will contain details of why he callback was called. */ CVMX_USB_CALLBACK_PORT_CHANGED, /**< The status of the port changed. For example, someone may have plugged a device in. The status parameter contains CVMX_USB_COMPLETE_SUCCESS. Use cvmx_usb_get_status() to get the new port status. */ __CVMX_USB_CALLBACK_END /**< Do not use. Used internally for array bounds */ } cvmx_usb_callback_t; /** * USB state internal data. The contents of this structure * may change in future SDKs. No data in it should be referenced * by user's of this API. */ typedef struct { char data[65536]; } cvmx_usb_state_t; /** * USB callback functions are always of the following type. * The parameters are as follows: * - state = USB device state populated by * cvmx_usb_initialize(). * - reason = The cvmx_usb_callback_t used to register * the callback. * - status = The cvmx_usb_complete_t representing the * status code of a transaction. * - pipe_handle = The Pipe that caused this callback, or * -1 if this callback wasn't associated with a pipe. * - submit_handle = Transfer submit handle causing this * callback, or -1 if this callback wasn't associated * with a transfer. * - Actual number of bytes transfer. * - user_data = The user pointer supplied to the * function cvmx_usb_submit() or * cvmx_usb_register_callback() */ typedef void (*cvmx_usb_callback_func_t)(cvmx_usb_state_t *state, cvmx_usb_callback_t reason, cvmx_usb_complete_t status, int pipe_handle, int submit_handle, int bytes_transferred, void *user_data); /** * Flags to pass the initialization function. */ typedef enum { CVMX_USB_INITIALIZE_FLAGS_CLOCK_XO_XI = 1<<0, /**< The USB port uses a 12MHz crystal as clock source at USB_XO and USB_XI. */ CVMX_USB_INITIALIZE_FLAGS_CLOCK_XO_GND = 1<<1, /**< The USB port uses 12/24/48MHz 2.5V board clock source at USB_XO. USB_XI should be tied to GND.*/ CVMX_USB_INITIALIZE_FLAGS_CLOCK_AUTO = 0, /**< Automatically determine clock type based on function in cvmx-helper-board.c. */ CVMX_USB_INITIALIZE_FLAGS_CLOCK_MHZ_MASK = 3<<3, /**< Mask for clock speed field */ CVMX_USB_INITIALIZE_FLAGS_CLOCK_12MHZ = 1<<3, /**< Speed of reference clock or crystal */ CVMX_USB_INITIALIZE_FLAGS_CLOCK_24MHZ = 2<<3, /**< Speed of reference clock */ CVMX_USB_INITIALIZE_FLAGS_CLOCK_48MHZ = 3<<3, /**< Speed of reference clock */ /* Bits 3-4 used to encode the clock frequency */ CVMX_USB_INITIALIZE_FLAGS_NO_DMA = 1<<5, /**< Disable DMA and used polled IO for data transfer use for the USB */ CVMX_USB_INITIALIZE_FLAGS_DEBUG_TRANSFERS = 1<<16, /**< Enable extra console output for debugging USB transfers */ CVMX_USB_INITIALIZE_FLAGS_DEBUG_CALLBACKS = 1<<17, /**< Enable extra console output for debugging USB callbacks */ CVMX_USB_INITIALIZE_FLAGS_DEBUG_INFO = 1<<18, /**< Enable extra console output for USB informational data */ CVMX_USB_INITIALIZE_FLAGS_DEBUG_CALLS = 1<<19, /**< Enable extra console output for every function call */ CVMX_USB_INITIALIZE_FLAGS_DEBUG_CSRS = 1<<20, /**< Enable extra console output for every CSR access */ CVMX_USB_INITIALIZE_FLAGS_DEBUG_ALL = ((CVMX_USB_INITIALIZE_FLAGS_DEBUG_CSRS<<1)-1) - (CVMX_USB_INITIALIZE_FLAGS_DEBUG_TRANSFERS-1), } cvmx_usb_initialize_flags_t; /** * Flags for passing when a pipe is created. Currently no flags * need to be passed. */ typedef enum { CVMX_USB_PIPE_FLAGS_DEBUG_TRANSFERS = 1<<15,/**< Used to display CVMX_USB_INITIALIZE_FLAGS_DEBUG_TRANSFERS for a specific pipe only */ __CVMX_USB_PIPE_FLAGS_OPEN = 1<<16, /**< Used internally to determine if a pipe is open. Do not use */ __CVMX_USB_PIPE_FLAGS_SCHEDULED = 1<<17, /**< Used internally to determine if a pipe is actively using hardware. Do not use */ __CVMX_USB_PIPE_FLAGS_NEED_PING = 1<<18, /**< Used internally to determine if a high speed pipe is in the ping state. Do not use */ } cvmx_usb_pipe_flags_t; /** * Return the number of USB ports supported by this Octeon * chip. If the chip doesn't support USB, or is not supported * by this API, a zero will be returned. Most Octeon chips * support one usb port, but some support two ports. * cvmx_usb_initialize() must be called on independent * cvmx_usb_state_t structures. * * @return Number of port, zero if usb isn't supported */ extern int cvmx_usb_get_num_ports(void); /** * Initialize a USB port for use. This must be called before any * other access to the Octeon USB port is made. The port starts * off in the disabled state. * * @param state Pointer to an empty cvmx_usb_state_t structure * that will be populated by the initialize call. * This structure is then passed to all other USB * functions. * @param usb_port_number * Which Octeon USB port to initialize. * @param flags Flags to control hardware initialization. See * cvmx_usb_initialize_flags_t for the flag * definitions. Some flags are mandatory. * * @return CVMX_USB_SUCCESS or a negative error code defined in * cvmx_usb_status_t. */ extern cvmx_usb_status_t cvmx_usb_initialize(cvmx_usb_state_t *state, int usb_port_number, cvmx_usb_initialize_flags_t flags); /** * Shutdown a USB port after a call to cvmx_usb_initialize(). * The port should be disabled with all pipes closed when this * function is called. * * @param state USB device state populated by * cvmx_usb_initialize(). * * @return CVMX_USB_SUCCESS or a negative error code defined in * cvmx_usb_status_t. */ extern cvmx_usb_status_t cvmx_usb_shutdown(cvmx_usb_state_t *state); /** * Enable a USB port. After this call succeeds, the USB port is * online and servicing requests. * * @param state USB device state populated by * cvmx_usb_initialize(). * * @return CVMX_USB_SUCCESS or a negative error code defined in * cvmx_usb_status_t. */ extern cvmx_usb_status_t cvmx_usb_enable(cvmx_usb_state_t *state); /** * Disable a USB port. After this call the USB port will not * generate data transfers and will not generate events. * Transactions in process will fail and call their * associated callbacks. * * @param state USB device state populated by * cvmx_usb_initialize(). * * @return CVMX_USB_SUCCESS or a negative error code defined in * cvmx_usb_status_t. */ extern cvmx_usb_status_t cvmx_usb_disable(cvmx_usb_state_t *state); /** * Get the current state of the USB port. Use this call to * determine if the usb port has anything connected, is enabled, * or has some sort of error condition. The return value of this * call has "changed" bits to signal of the value of some fields * have changed between calls. These "changed" fields are based * on the last call to cvmx_usb_set_status(). In order to clear * them, you must update the status through cvmx_usb_set_status(). * * @param state USB device state populated by * cvmx_usb_initialize(). * * @return Port status information */ extern cvmx_usb_port_status_t cvmx_usb_get_status(cvmx_usb_state_t *state); /** * Set the current state of the USB port. The status is used as * a reference for the "changed" bits returned by * cvmx_usb_get_status(). Other than serving as a reference, the * status passed to this function is not used. No fields can be * changed through this call. * * @param state USB device state populated by * cvmx_usb_initialize(). * @param port_status * Port status to set, most like returned by cvmx_usb_get_status() */ extern void cvmx_usb_set_status(cvmx_usb_state_t *state, cvmx_usb_port_status_t port_status); /** * Open a virtual pipe between the host and a USB device. A pipe * must be opened before data can be transferred between a device * and Octeon. * * @param state USB device state populated by * cvmx_usb_initialize(). * @param flags Optional pipe flags defined in * cvmx_usb_pipe_flags_t. * @param device_addr * USB device address to open the pipe to * (0-127). * @param endpoint_num * USB endpoint number to open the pipe to * (0-15). * @param device_speed * The speed of the device the pipe is going * to. This must match the device's speed, * which may be different than the port speed. * @param max_packet The maximum packet length the device can * transmit/receive (low speed=0-8, full * speed=0-1023, high speed=0-1024). This value * comes from the standard endpoint descriptor * field wMaxPacketSize bits <10:0>. * @param transfer_type * The type of transfer this pipe is for. * @param transfer_dir * The direction the pipe is in. This is not * used for control pipes. * @param interval For ISOCHRONOUS and INTERRUPT transfers, * this is how often the transfer is scheduled * for. All other transfers should specify * zero. The units are in frames (8000/sec at * high speed, 1000/sec for full speed). * @param multi_count * For high speed devices, this is the maximum * allowed number of packet per microframe. * Specify zero for non high speed devices. This * value comes from the standard endpoint descriptor * field wMaxPacketSize bits <12:11>. * @param hub_device_addr * Hub device address this device is connected * to. Devices connected directly to Octeon * use zero. This is only used when the device * is full/low speed behind a high speed hub. * The address will be of the high speed hub, * not and full speed hubs after it. * @param hub_port Which port on the hub the device is * connected. Use zero for devices connected * directly to Octeon. Like hub_device_addr, * this is only used for full/low speed * devices behind a high speed hub. * * @return A non negative value is a pipe handle. Negative * values are failure codes from cvmx_usb_status_t. */ extern int cvmx_usb_open_pipe(cvmx_usb_state_t *state, cvmx_usb_pipe_flags_t flags, int device_addr, int endpoint_num, cvmx_usb_speed_t device_speed, int max_packet, cvmx_usb_transfer_t transfer_type, cvmx_usb_direction_t transfer_dir, int interval, int multi_count, int hub_device_addr, int hub_port); /** * Call to submit a USB Bulk transfer to a pipe. * * @param state USB device state populated by * cvmx_usb_initialize(). * @param pipe_handle * Handle to the pipe for the transfer. * @param buffer Physical address of the data buffer in * memory. Note that this is NOT A POINTER, but * the full 64bit physical address of the * buffer. This may be zero if buffer_length is * zero. * @param buffer_length * Length of buffer in bytes. * @param callback Function to call when this transaction * completes. If the return value of this * function isn't an error, then this function * is guaranteed to be called when the * transaction completes. If this parameter is * NULL, then the generic callback registered * through cvmx_usb_register_callback is * called. If both are NULL, then there is no * way to know when a transaction completes. * @param user_data User supplied data returned when the * callback is called. This is only used if * callback in not NULL. * * @return A submitted transaction handle or negative on * failure. Negative values are failure codes from * cvmx_usb_status_t. */ extern int cvmx_usb_submit_bulk(cvmx_usb_state_t *state, int pipe_handle, uint64_t buffer, int buffer_length, cvmx_usb_callback_func_t callback, void *user_data); /** * Call to submit a USB Interrupt transfer to a pipe. * * @param state USB device state populated by * cvmx_usb_initialize(). * @param pipe_handle * Handle to the pipe for the transfer. * @param buffer Physical address of the data buffer in * memory. Note that this is NOT A POINTER, but * the full 64bit physical address of the * buffer. This may be zero if buffer_length is * zero. * @param buffer_length * Length of buffer in bytes. * @param callback Function to call when this transaction * completes. If the return value of this * function isn't an error, then this function * is guaranteed to be called when the * transaction completes. If this parameter is * NULL, then the generic callback registered * through cvmx_usb_register_callback is * called. If both are NULL, then there is no * way to know when a transaction completes. * @param user_data User supplied data returned when the * callback is called. This is only used if * callback in not NULL. * * @return A submitted transaction handle or negative on * failure. Negative values are failure codes from * cvmx_usb_status_t. */ extern int cvmx_usb_submit_interrupt(cvmx_usb_state_t *state, int pipe_handle, uint64_t buffer, int buffer_length, cvmx_usb_callback_func_t callback, void *user_data); /** * Call to submit a USB Control transfer to a pipe. * * @param state USB device state populated by * cvmx_usb_initialize(). * @param pipe_handle * Handle to the pipe for the transfer. * @param control_header * USB 8 byte control header physical address. * Note that this is NOT A POINTER, but the * full 64bit physical address of the buffer. * @param buffer Physical address of the data buffer in * memory. Note that this is NOT A POINTER, but * the full 64bit physical address of the * buffer. This may be zero if buffer_length is * zero. * @param buffer_length * Length of buffer in bytes. * @param callback Function to call when this transaction * completes. If the return value of this * function isn't an error, then this function * is guaranteed to be called when the * transaction completes. If this parameter is * NULL, then the generic callback registered * through cvmx_usb_register_callback is * called. If both are NULL, then there is no * way to know when a transaction completes. * @param user_data User supplied data returned when the * callback is called. This is only used if * callback in not NULL. * * @return A submitted transaction handle or negative on * failure. Negative values are failure codes from * cvmx_usb_status_t. */ extern int cvmx_usb_submit_control(cvmx_usb_state_t *state, int pipe_handle, uint64_t control_header, uint64_t buffer, int buffer_length, cvmx_usb_callback_func_t callback, void *user_data); /** * Flags to pass the cvmx_usb_submit_isochronous() function. */ typedef enum { CVMX_USB_ISOCHRONOUS_FLAGS_ALLOW_SHORT = 1<<0, /**< Do not return an error if a transfer is less than the maximum packet size of the device */ CVMX_USB_ISOCHRONOUS_FLAGS_ASAP = 1<<1, /**< Schedule the transaction as soon as possible */ } cvmx_usb_isochronous_flags_t; /** * Call to submit a USB Isochronous transfer to a pipe. * * @param state USB device state populated by * cvmx_usb_initialize(). * @param pipe_handle * Handle to the pipe for the transfer. * @param start_frame * Number of frames into the future to schedule * this transaction. * @param flags Flags to control the transfer. See * cvmx_usb_isochronous_flags_t for the flag * definitions. * @param number_packets * Number of sequential packets to transfer. * "packets" is a pointer to an array of this * many packet structures. * @param packets Description of each transfer packet as * defined by cvmx_usb_iso_packet_t. The array * pointed to here must stay valid until the * complete callback is called. * @param buffer Physical address of the data buffer in * memory. Note that this is NOT A POINTER, but * the full 64bit physical address of the * buffer. This may be zero if buffer_length is * zero. * @param buffer_length * Length of buffer in bytes. * @param callback Function to call when this transaction * completes. If the return value of this * function isn't an error, then this function * is guaranteed to be called when the * transaction completes. If this parameter is * NULL, then the generic callback registered * through cvmx_usb_register_callback is * called. If both are NULL, then there is no * way to know when a transaction completes. * @param user_data User supplied data returned when the * callback is called. This is only used if * callback in not NULL. * * @return A submitted transaction handle or negative on * failure. Negative values are failure codes from * cvmx_usb_status_t. */ extern int cvmx_usb_submit_isochronous(cvmx_usb_state_t *state, int pipe_handle, int start_frame, int flags, int number_packets, cvmx_usb_iso_packet_t packets[], uint64_t buffer, int buffer_length, cvmx_usb_callback_func_t callback, void *user_data); /** * Cancel one outstanding request in a pipe. Canceling a request * can fail if the transaction has already completed before cancel * is called. Even after a successful cancel call, it may take * a frame or two for the cvmx_usb_poll() function to call the * associated callback. * * @param state USB device state populated by * cvmx_usb_initialize(). * @param pipe_handle * Pipe handle to cancel requests in. * @param submit_handle * Handle to transaction to cancel, returned by the submit function. * * @return CVMX_USB_SUCCESS or a negative error code defined in * cvmx_usb_status_t. */ extern cvmx_usb_status_t cvmx_usb_cancel(cvmx_usb_state_t *state, int pipe_handle, int submit_handle); /** * Cancel all outstanding requests in a pipe. Logically all this * does is call cvmx_usb_cancel() in a loop. * * @param state USB device state populated by * cvmx_usb_initialize(). * @param pipe_handle * Pipe handle to cancel requests in. * * @return CVMX_USB_SUCCESS or a negative error code defined in * cvmx_usb_status_t. */ extern cvmx_usb_status_t cvmx_usb_cancel_all(cvmx_usb_state_t *state, int pipe_handle); /** * Close a pipe created with cvmx_usb_open_pipe(). * * @param state USB device state populated by * cvmx_usb_initialize(). * @param pipe_handle * Pipe handle to close. * * @return CVMX_USB_SUCCESS or a negative error code defined in * cvmx_usb_status_t. CVMX_USB_BUSY is returned if the * pipe has outstanding transfers. */ extern cvmx_usb_status_t cvmx_usb_close_pipe(cvmx_usb_state_t *state, int pipe_handle); /** * Register a function to be called when various USB events occur. * * @param state USB device state populated by * cvmx_usb_initialize(). * @param reason Which event to register for. * @param callback Function to call when the event occurs. * @param user_data User data parameter to the function. * * @return CVMX_USB_SUCCESS or a negative error code defined in * cvmx_usb_status_t. */ extern cvmx_usb_status_t cvmx_usb_register_callback(cvmx_usb_state_t *state, cvmx_usb_callback_t reason, cvmx_usb_callback_func_t callback, void *user_data); /** * Get the current USB protocol level frame number. The frame * number is always in the range of 0-0x7ff. * * @param state USB device state populated by * cvmx_usb_initialize(). * * @return USB frame number */ extern int cvmx_usb_get_frame_number(cvmx_usb_state_t *state); /** * Poll the USB block for status and call all needed callback * handlers. This function is meant to be called in the interrupt * handler for the USB controller. It can also be called * periodically in a loop for non-interrupt based operation. * * @param state USB device state populated by * cvmx_usb_initialize(). * * @return CVMX_USB_SUCCESS or a negative error code defined in * cvmx_usb_status_t. */ extern cvmx_usb_status_t cvmx_usb_poll(cvmx_usb_state_t *state); /* * The FreeBSD host driver uses these functions to manipulate the toggle to deal * more easily with endpoint management. */ extern void cvmx_usb_set_toggle(cvmx_usb_state_t *state, int endpoint_num, int toggle); extern int cvmx_usb_get_toggle(cvmx_usb_state_t *state, int endpoint_num); #ifdef __cplusplus } #endif #endif /* __CVMX_USB_H__ */
vishalbelsare/jubakit
example/clustering_sklearn_wrapper.py
<gh_stars>10-100 #!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import absolute_import, division, print_function, unicode_literals """ Using Clustering ======================================== This is a simple example that illustrates Clustering service usage. """ from sklearn.datasets import make_blobs from jubakit.wrapper.clustering import KMeans, GMM, DBSCAN # make blob dataset using sklearn API. X, y = make_blobs(n_samples=200, centers=3, n_features=2, random_state=42) # launch clustering instance clusterings = [ KMeans(k=3, bucket_size=200, embedded=False), GMM(k=3, bucket_size=200, embedded=False), DBSCAN(eps=2.0, bucket_size=200, embedded=False) ] for clustering in clusterings: # fit and predict y_pred = clustering.fit_predict(X) # print result labels = set(y_pred) label_counts = {} for label in labels: label_counts[label] = y_pred.count(label) print('{0}: {1}'.format( clustering.__class__.__name__, label_counts)) # stop clustering service clustering.stop()
sanri1993/sanri-tools-maven
tools-core/src/main/java/com/sanri/tools/modules/core/validation/custom/IdCard18Validator.java
<reponame>sanri1993/sanri-tools-maven<gh_stars>10-100 package com.sanri.tools.modules.core.validation.custom; import lombok.extern.slf4j.Slf4j; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; import javax.validation.ConstraintValidator; import javax.validation.ConstraintValidatorContext; import java.io.IOException; import java.io.InputStream; import java.net.URL; import java.net.URLConnection; import java.nio.charset.Charset; @Slf4j public class IdCard18Validator implements ConstraintValidator<IdCard18,String> { @Override public void initialize(IdCard18 constraintAnnotation) { try { URL resource = IdCard18Validator.class.getResource("/areaCodes"); if(resource == null){ log.warn("身份证地区码未找到,请在资源目录下放置 areaCodes 文件用来验证身份证"); return ; } URLConnection urlConnection = resource.openConnection(); InputStream inputStream = urlConnection.getInputStream(); String areaCodes = IOUtils.toString(inputStream, Charset.forName("utf-8")); if(StringUtils.isNotBlank(areaCodes)){ IdCard18Model.areaCodes = StringUtils.split(areaCodes,','); } } catch (IOException e) { log.error("读取身份证地区码异常,检查资源目录的 areaCodes 文件 {}",e.getMessage()); } } @Override public boolean isValid(String idcard, ConstraintValidatorContext constraintValidatorContext) { try{ IdCard18Model idCard18Model = new IdCard18Model(idcard); return true; }catch (IllegalArgumentException e){ log.error(e.getMessage()); return false; } } }
cisco-ie/cisco-proto
codegen/go/xr/65x/cisco_ios_xr_ipv4_pim_oper/pim/standby/vrfs/vrf/topologies/topology/pim_tt_route_bag.pb.go
<gh_stars>1-10 /* Copyright 2019 Cisco Systems Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ // Code generated by protoc-gen-go. DO NOT EDIT. // source: pim_tt_route_bag.proto package cisco_ios_xr_ipv4_pim_oper_pim_standby_vrfs_vrf_topologies_topology import ( fmt "fmt" proto "github.com/golang/protobuf/proto" math "math" ) // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal var _ = fmt.Errorf var _ = math.Inf // This is a compile-time assertion to ensure that this generated file // is compatible with the proto package it is being compiled against. // A compilation error at this line likely means your copy of the // proto package needs to be updated. const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package type PimTtRouteBag_KEYS struct { VrfName string `protobuf:"bytes,1,opt,name=vrf_name,json=vrfName,proto3" json:"vrf_name,omitempty"` SourceAddress string `protobuf:"bytes,2,opt,name=source_address,json=sourceAddress,proto3" json:"source_address,omitempty"` GroupAddress string `protobuf:"bytes,3,opt,name=group_address,json=groupAddress,proto3" json:"group_address,omitempty"` Rpt uint32 `protobuf:"varint,4,opt,name=rpt,proto3" json:"rpt,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *PimTtRouteBag_KEYS) Reset() { *m = PimTtRouteBag_KEYS{} } func (m *PimTtRouteBag_KEYS) String() string { return proto.CompactTextString(m) } func (*PimTtRouteBag_KEYS) ProtoMessage() {} func (*PimTtRouteBag_KEYS) Descriptor() ([]byte, []int) { return fileDescriptor_b3d91aa7d312ccab, []int{0} } func (m *PimTtRouteBag_KEYS) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_PimTtRouteBag_KEYS.Unmarshal(m, b) } func (m *PimTtRouteBag_KEYS) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_PimTtRouteBag_KEYS.Marshal(b, m, deterministic) } func (m *PimTtRouteBag_KEYS) XXX_Merge(src proto.Message) { xxx_messageInfo_PimTtRouteBag_KEYS.Merge(m, src) } func (m *PimTtRouteBag_KEYS) XXX_Size() int { return xxx_messageInfo_PimTtRouteBag_KEYS.Size(m) } func (m *PimTtRouteBag_KEYS) XXX_DiscardUnknown() { xxx_messageInfo_PimTtRouteBag_KEYS.DiscardUnknown(m) } var xxx_messageInfo_PimTtRouteBag_KEYS proto.InternalMessageInfo func (m *PimTtRouteBag_KEYS) GetVrfName() string { if m != nil { return m.VrfName } return "" } func (m *PimTtRouteBag_KEYS) GetSourceAddress() string { if m != nil { return m.SourceAddress } return "" } func (m *PimTtRouteBag_KEYS) GetGroupAddress() string { if m != nil { return m.GroupAddress } return "" } func (m *PimTtRouteBag_KEYS) GetRpt() uint32 { if m != nil { return m.Rpt } return 0 } type PimAddrtype struct { AfName string `protobuf:"bytes,1,opt,name=af_name,json=afName,proto3" json:"af_name,omitempty"` Ipv4Address string `protobuf:"bytes,2,opt,name=ipv4_address,json=ipv4Address,proto3" json:"ipv4_address,omitempty"` Ipv6Address string `protobuf:"bytes,3,opt,name=ipv6_address,json=ipv6Address,proto3" json:"ipv6_address,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *PimAddrtype) Reset() { *m = PimAddrtype{} } func (m *PimAddrtype) String() string { return proto.CompactTextString(m) } func (*PimAddrtype) ProtoMessage() {} func (*PimAddrtype) Descriptor() ([]byte, []int) { return fileDescriptor_b3d91aa7d312ccab, []int{1} } func (m *PimAddrtype) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_PimAddrtype.Unmarshal(m, b) } func (m *PimAddrtype) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_PimAddrtype.Marshal(b, m, deterministic) } func (m *PimAddrtype) XXX_Merge(src proto.Message) { xxx_messageInfo_PimAddrtype.Merge(m, src) } func (m *PimAddrtype) XXX_Size() int { return xxx_messageInfo_PimAddrtype.Size(m) } func (m *PimAddrtype) XXX_DiscardUnknown() { xxx_messageInfo_PimAddrtype.DiscardUnknown(m) } var xxx_messageInfo_PimAddrtype proto.InternalMessageInfo func (m *PimAddrtype) GetAfName() string { if m != nil { return m.AfName } return "" } func (m *PimAddrtype) GetIpv4Address() string { if m != nil { return m.Ipv4Address } return "" } func (m *PimAddrtype) GetIpv6Address() string { if m != nil { return m.Ipv6Address } return "" } type PimTtOleBag struct { InterfaceName string `protobuf:"bytes,1,opt,name=interface_name,json=interfaceName,proto3" json:"interface_name,omitempty"` Uptime uint64 `protobuf:"varint,2,opt,name=uptime,proto3" json:"uptime,omitempty"` Expiry uint64 `protobuf:"varint,3,opt,name=expiry,proto3" json:"expiry,omitempty"` BgpCMcastPruneDelayTimer uint64 `protobuf:"varint,4,opt,name=bgp_c_mcast_prune_delay_timer,json=bgpCMcastPruneDelayTimer,proto3" json:"bgp_c_mcast_prune_delay_timer,omitempty"` JpTimer int32 `protobuf:"zigzag32,5,opt,name=jp_timer,json=jpTimer,proto3" json:"jp_timer,omitempty"` JpState int32 `protobuf:"zigzag32,6,opt,name=jp_state,json=jpState,proto3" json:"jp_state,omitempty"` AssertTimer int32 `protobuf:"zigzag32,7,opt,name=assert_timer,json=assertTimer,proto3" json:"assert_timer,omitempty"` AssertWinner *PimAddrtype `protobuf:"bytes,8,opt,name=assert_winner,json=assertWinner,proto3" json:"assert_winner,omitempty"` LocalMembers int32 `protobuf:"zigzag32,9,opt,name=local_members,json=localMembers,proto3" json:"local_members,omitempty"` InternalInterestInfo int32 `protobuf:"zigzag32,10,opt,name=internal_interest_info,json=internalInterestInfo,proto3" json:"internal_interest_info,omitempty"` ForwardingState int32 `protobuf:"zigzag32,11,opt,name=forwarding_state,json=forwardingState,proto3" json:"forwarding_state,omitempty"` ImmediateState int32 `protobuf:"zigzag32,12,opt,name=immediate_state,json=immediateState,proto3" json:"immediate_state,omitempty"` LastHop bool `protobuf:"varint,13,opt,name=last_hop,json=lastHop,proto3" json:"last_hop,omitempty"` MldpInbandMdt bool `protobuf:"varint,14,opt,name=mldp_inband_mdt,json=mldpInbandMdt,proto3" json:"mldp_inband_mdt,omitempty"` MldpStale bool `protobuf:"varint,15,opt,name=mldp_stale,json=mldpStale,proto3" json:"mldp_stale,omitempty"` SaPrune bool `protobuf:"varint,16,opt,name=sa_prune,json=saPrune,proto3" json:"sa_prune,omitempty"` AdminBoundary bool `protobuf:"varint,17,opt,name=admin_boundary,json=adminBoundary,proto3" json:"admin_boundary,omitempty"` IgmpMembership bool `protobuf:"varint,18,opt,name=igmp_membership,json=igmpMembership,proto3" json:"igmp_membership,omitempty"` MdtSafiJoin bool `protobuf:"varint,19,opt,name=mdt_safi_join,json=mdtSafiJoin,proto3" json:"mdt_safi_join,omitempty"` MvpnSafiJoin bool `protobuf:"varint,20,opt,name=mvpn_safi_join,json=mvpnSafiJoin,proto3" json:"mvpn_safi_join,omitempty"` LocalMdtJoin bool `protobuf:"varint,21,opt,name=local_mdt_join,json=localMdtJoin,proto3" json:"local_mdt_join,omitempty"` DataMdtJoin bool `protobuf:"varint,22,opt,name=data_mdt_join,json=dataMdtJoin,proto3" json:"data_mdt_join,omitempty"` Mvpnv6SafiJoin bool `protobuf:"varint,23,opt,name=mvpnv6_safi_join,json=mvpnv6SafiJoin,proto3" json:"mvpnv6_safi_join,omitempty"` BgpCMcastJoin bool `protobuf:"varint,24,opt,name=bgp_c_mcast_join,json=bgpCMcastJoin,proto3" json:"bgp_c_mcast_join,omitempty"` VrfName string `protobuf:"bytes,25,opt,name=vrf_name,json=vrfName,proto3" json:"vrf_name,omitempty"` ExtranetInterface bool `protobuf:"varint,26,opt,name=extranet_interface,json=extranetInterface,proto3" json:"extranet_interface,omitempty"` InternalInterestInformation string `protobuf:"bytes,27,opt,name=internal_interest_information,json=internalInterestInformation,proto3" json:"internal_interest_information,omitempty"` LocalMembersInformation string `protobuf:"bytes,28,opt,name=local_members_information,json=localMembersInformation,proto3" json:"local_members_information,omitempty"` AssertState bool `protobuf:"varint,29,opt,name=assert_state,json=assertState,proto3" json:"assert_state,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *PimTtOleBag) Reset() { *m = PimTtOleBag{} } func (m *PimTtOleBag) String() string { return proto.CompactTextString(m) } func (*PimTtOleBag) ProtoMessage() {} func (*PimTtOleBag) Descriptor() ([]byte, []int) { return fileDescriptor_b3d91aa7d312ccab, []int{2} } func (m *PimTtOleBag) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_PimTtOleBag.Unmarshal(m, b) } func (m *PimTtOleBag) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_PimTtOleBag.Marshal(b, m, deterministic) } func (m *PimTtOleBag) XXX_Merge(src proto.Message) { xxx_messageInfo_PimTtOleBag.Merge(m, src) } func (m *PimTtOleBag) XXX_Size() int { return xxx_messageInfo_PimTtOleBag.Size(m) } func (m *PimTtOleBag) XXX_DiscardUnknown() { xxx_messageInfo_PimTtOleBag.DiscardUnknown(m) } var xxx_messageInfo_PimTtOleBag proto.InternalMessageInfo func (m *PimTtOleBag) GetInterfaceName() string { if m != nil { return m.InterfaceName } return "" } func (m *PimTtOleBag) GetUptime() uint64 { if m != nil { return m.Uptime } return 0 } func (m *PimTtOleBag) GetExpiry() uint64 { if m != nil { return m.Expiry } return 0 } func (m *PimTtOleBag) GetBgpCMcastPruneDelayTimer() uint64 { if m != nil { return m.BgpCMcastPruneDelayTimer } return 0 } func (m *PimTtOleBag) GetJpTimer() int32 { if m != nil { return m.JpTimer } return 0 } func (m *PimTtOleBag) GetJpState() int32 { if m != nil { return m.JpState } return 0 } func (m *PimTtOleBag) GetAssertTimer() int32 { if m != nil { return m.AssertTimer } return 0 } func (m *PimTtOleBag) GetAssertWinner() *PimAddrtype { if m != nil { return m.AssertWinner } return nil } func (m *PimTtOleBag) GetLocalMembers() int32 { if m != nil { return m.LocalMembers } return 0 } func (m *PimTtOleBag) GetInternalInterestInfo() int32 { if m != nil { return m.InternalInterestInfo } return 0 } func (m *PimTtOleBag) GetForwardingState() int32 { if m != nil { return m.ForwardingState } return 0 } func (m *PimTtOleBag) GetImmediateState() int32 { if m != nil { return m.ImmediateState } return 0 } func (m *PimTtOleBag) GetLastHop() bool { if m != nil { return m.LastHop } return false } func (m *PimTtOleBag) GetMldpInbandMdt() bool { if m != nil { return m.MldpInbandMdt } return false } func (m *PimTtOleBag) GetMldpStale() bool { if m != nil { return m.MldpStale } return false } func (m *PimTtOleBag) GetSaPrune() bool { if m != nil { return m.SaPrune } return false } func (m *PimTtOleBag) GetAdminBoundary() bool { if m != nil { return m.AdminBoundary } return false } func (m *PimTtOleBag) GetIgmpMembership() bool { if m != nil { return m.IgmpMembership } return false } func (m *PimTtOleBag) GetMdtSafiJoin() bool { if m != nil { return m.MdtSafiJoin } return false } func (m *PimTtOleBag) GetMvpnSafiJoin() bool { if m != nil { return m.MvpnSafiJoin } return false } func (m *PimTtOleBag) GetLocalMdtJoin() bool { if m != nil { return m.LocalMdtJoin } return false } func (m *PimTtOleBag) GetDataMdtJoin() bool { if m != nil { return m.DataMdtJoin } return false } func (m *PimTtOleBag) GetMvpnv6SafiJoin() bool { if m != nil { return m.Mvpnv6SafiJoin } return false } func (m *PimTtOleBag) GetBgpCMcastJoin() bool { if m != nil { return m.BgpCMcastJoin } return false } func (m *PimTtOleBag) GetVrfName() string { if m != nil { return m.VrfName } return "" } func (m *PimTtOleBag) GetExtranetInterface() bool { if m != nil { return m.ExtranetInterface } return false } func (m *PimTtOleBag) GetInternalInterestInformation() string { if m != nil { return m.InternalInterestInformation } return "" } func (m *PimTtOleBag) GetLocalMembersInformation() string { if m != nil { return m.LocalMembersInformation } return "" } func (m *PimTtOleBag) GetAssertState() bool { if m != nil { return m.AssertState } return false } type PimTtRouteBag struct { LimitReached bool `protobuf:"varint,50,opt,name=limit_reached,json=limitReached,proto3" json:"limit_reached,omitempty"` LowMemory bool `protobuf:"varint,51,opt,name=low_memory,json=lowMemory,proto3" json:"low_memory,omitempty"` Protocol string `protobuf:"bytes,52,opt,name=protocol,proto3" json:"protocol,omitempty"` GroupAddressXr *PimAddrtype `protobuf:"bytes,53,opt,name=group_address_xr,json=groupAddressXr,proto3" json:"group_address_xr,omitempty"` SourceAddressXr *PimAddrtype `protobuf:"bytes,54,opt,name=source_address_xr,json=sourceAddressXr,proto3" json:"source_address_xr,omitempty"` Wildcard bool `protobuf:"varint,55,opt,name=wildcard,proto3" json:"wildcard,omitempty"` RptXr bool `protobuf:"varint,56,opt,name=rpt_xr,json=rptXr,proto3" json:"rpt_xr,omitempty"` Spt bool `protobuf:"varint,57,opt,name=spt,proto3" json:"spt,omitempty"` Uptime uint64 `protobuf:"varint,58,opt,name=uptime,proto3" json:"uptime,omitempty"` Expiry uint64 `protobuf:"varint,59,opt,name=expiry,proto3" json:"expiry,omitempty"` Alive int32 `protobuf:"zigzag32,60,opt,name=alive,proto3" json:"alive,omitempty"` RegisterReceivedTimer int32 `protobuf:"zigzag32,61,opt,name=register_received_timer,json=registerReceivedTimer,proto3" json:"register_received_timer,omitempty"` RemoteSource bool `protobuf:"varint,62,opt,name=remote_source,json=remoteSource,proto3" json:"remote_source,omitempty"` CrossedThreshold bool `protobuf:"varint,63,opt,name=crossed_threshold,json=crossedThreshold,proto3" json:"crossed_threshold,omitempty"` DataMdtAddrAssigned bool `protobuf:"varint,64,opt,name=data_mdt_addr_assigned,json=dataMdtAddrAssigned,proto3" json:"data_mdt_addr_assigned,omitempty"` RpAddress *PimAddrtype `protobuf:"bytes,65,opt,name=rp_address,json=rpAddress,proto3" json:"rp_address,omitempty"` RpfInterfaceName string `protobuf:"bytes,66,opt,name=rpf_interface_name,json=rpfInterfaceName,proto3" json:"rpf_interface_name,omitempty"` RpfVrfName string `protobuf:"bytes,67,opt,name=rpf_vrf_name,json=rpfVrfName,proto3" json:"rpf_vrf_name,omitempty"` RpfSafi uint32 `protobuf:"varint,68,opt,name=rpf_safi,json=rpfSafi,proto3" json:"rpf_safi,omitempty"` RpfTableName string `protobuf:"bytes,69,opt,name=rpf_table_name,json=rpfTableName,proto3" json:"rpf_table_name,omitempty"` RpfDrop bool `protobuf:"varint,70,opt,name=rpf_drop,json=rpfDrop,proto3" json:"rpf_drop,omitempty"` RpfExtranet bool `protobuf:"varint,71,opt,name=rpf_extranet,json=rpfExtranet,proto3" json:"rpf_extranet,omitempty"` RpfNeighbor *PimAddrtype `protobuf:"bytes,72,opt,name=rpf_neighbor,json=rpfNeighbor,proto3" json:"rpf_neighbor,omitempty"` IsViaLsm bool `protobuf:"varint,73,opt,name=is_via_lsm,json=isViaLsm,proto3" json:"is_via_lsm,omitempty"` SecondaryRpfInterfaceName string `protobuf:"bytes,74,opt,name=secondary_rpf_interface_name,json=secondaryRpfInterfaceName,proto3" json:"secondary_rpf_interface_name,omitempty"` SecondaryRpfNeighbor *PimAddrtype `protobuf:"bytes,75,opt,name=secondary_rpf_neighbor,json=secondaryRpfNeighbor,proto3" json:"secondary_rpf_neighbor,omitempty"` RpfRoot *PimAddrtype `protobuf:"bytes,76,opt,name=rpf_root,json=rpfRoot,proto3" json:"rpf_root,omitempty"` Connected bool `protobuf:"varint,77,opt,name=connected,proto3" json:"connected,omitempty"` Proxy bool `protobuf:"varint,78,opt,name=proxy,proto3" json:"proxy,omitempty"` ProxyAddress *PimAddrtype `protobuf:"bytes,79,opt,name=proxy_address,json=proxyAddress,proto3" json:"proxy_address,omitempty"` RpfProxyEnabled bool `protobuf:"varint,80,opt,name=rpf_proxy_enabled,json=rpfProxyEnabled,proto3" json:"rpf_proxy_enabled,omitempty"` MofrrEnabled bool `protobuf:"varint,81,opt,name=mofrr_enabled,json=mofrrEnabled,proto3" json:"mofrr_enabled,omitempty"` RibMoFrrEnabled bool `protobuf:"varint,82,opt,name=rib_mo_frr_enabled,json=ribMoFrrEnabled,proto3" json:"rib_mo_frr_enabled,omitempty"` JpTimer int32 `protobuf:"zigzag32,83,opt,name=jp_timer,json=jpTimer,proto3" json:"jp_timer,omitempty"` JpStatus int32 `protobuf:"zigzag32,84,opt,name=jp_status,json=jpStatus,proto3" json:"jp_status,omitempty"` SuppressRegisters int32 `protobuf:"zigzag32,85,opt,name=suppress_registers,json=suppressRegisters,proto3" json:"suppress_registers,omitempty"` AssumeAlive bool `protobuf:"varint,86,opt,name=assume_alive,json=assumeAlive,proto3" json:"assume_alive,omitempty"` ProbeAlive bool `protobuf:"varint,87,opt,name=probe_alive,json=probeAlive,proto3" json:"probe_alive,omitempty"` ReallyAlive bool `protobuf:"varint,88,opt,name=really_alive,json=reallyAlive,proto3" json:"really_alive,omitempty"` InheritAlive bool `protobuf:"varint,89,opt,name=inherit_alive,json=inheritAlive,proto3" json:"inherit_alive,omitempty"` InheritSpt bool `protobuf:"varint,90,opt,name=inherit_spt,json=inheritSpt,proto3" json:"inherit_spt,omitempty"` SignalSources bool `protobuf:"varint,91,opt,name=signal_sources,json=signalSources,proto3" json:"signal_sources,omitempty"` DontCheckConnected bool `protobuf:"varint,92,opt,name=dont_check_connected,json=dontCheckConnected,proto3" json:"dont_check_connected,omitempty"` RegisterReceived bool `protobuf:"varint,93,opt,name=register_received,json=registerReceived,proto3" json:"register_received,omitempty"` LastHop bool `protobuf:"varint,94,opt,name=last_hop,json=lastHop,proto3" json:"last_hop,omitempty"` SendingRegisters bool `protobuf:"varint,95,opt,name=sending_registers,json=sendingRegisters,proto3" json:"sending_registers,omitempty"` SendingNullRegisters bool `protobuf:"varint,96,opt,name=sending_null_registers,json=sendingNullRegisters,proto3" json:"sending_null_registers,omitempty"` SaSent bool `protobuf:"varint,97,opt,name=sa_sent,json=saSent,proto3" json:"sa_sent,omitempty"` SaReceived bool `protobuf:"varint,98,opt,name=sa_received,json=saReceived,proto3" json:"sa_received,omitempty"` SaJoined bool `protobuf:"varint,99,opt,name=sa_joined,json=saJoined,proto3" json:"sa_joined,omitempty"` AnycastRpMatch bool `protobuf:"varint,100,opt,name=anycast_rp_match,json=anycastRpMatch,proto3" json:"anycast_rp_match,omitempty"` AnycastRpRouteTarget string `protobuf:"bytes,101,opt,name=anycast_rp_route_target,json=anycastRpRouteTarget,proto3" json:"anycast_rp_route_target,omitempty"` BgpJoin bool `protobuf:"varint,102,opt,name=bgp_join,json=bgpJoin,proto3" json:"bgp_join,omitempty"` BgpJpTime uint64 `protobuf:"varint,103,opt,name=bgp_jp_time,json=bgpJpTime,proto3" json:"bgp_jp_time,omitempty"` CustomerRoutingType int32 `protobuf:"zigzag32,104,opt,name=customer_routing_type,json=customerRoutingType,proto3" json:"customer_routing_type,omitempty"` ExtranetRoute bool `protobuf:"varint,105,opt,name=extranet_route,json=extranetRoute,proto3" json:"extranet_route,omitempty"` OutgoingInterface []*PimTtOleBag `protobuf:"bytes,106,rep,name=outgoing_interface,json=outgoingInterface,proto3" json:"outgoing_interface,omitempty"` MofrrActive bool `protobuf:"varint,107,opt,name=mofrr_active,json=mofrrActive,proto3" json:"mofrr_active,omitempty"` MofrrPrimary bool `protobuf:"varint,108,opt,name=mofrr_primary,json=mofrrPrimary,proto3" json:"mofrr_primary,omitempty"` MofrrBackup bool `protobuf:"varint,109,opt,name=mofrr_backup,json=mofrrBackup,proto3" json:"mofrr_backup,omitempty"` Vxlan bool `protobuf:"varint,110,opt,name=vxlan,proto3" json:"vxlan,omitempty"` OrigSrcAddress *PimAddrtype `protobuf:"bytes,111,opt,name=orig_src_address,json=origSrcAddress,proto3" json:"orig_src_address,omitempty"` KatState bool `protobuf:"varint,112,opt,name=kat_state,json=katState,proto3" json:"kat_state,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *PimTtRouteBag) Reset() { *m = PimTtRouteBag{} } func (m *PimTtRouteBag) String() string { return proto.CompactTextString(m) } func (*PimTtRouteBag) ProtoMessage() {} func (*PimTtRouteBag) Descriptor() ([]byte, []int) { return fileDescriptor_b3d91aa7d312ccab, []int{3} } func (m *PimTtRouteBag) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_PimTtRouteBag.Unmarshal(m, b) } func (m *PimTtRouteBag) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_PimTtRouteBag.Marshal(b, m, deterministic) } func (m *PimTtRouteBag) XXX_Merge(src proto.Message) { xxx_messageInfo_PimTtRouteBag.Merge(m, src) } func (m *PimTtRouteBag) XXX_Size() int { return xxx_messageInfo_PimTtRouteBag.Size(m) } func (m *PimTtRouteBag) XXX_DiscardUnknown() { xxx_messageInfo_PimTtRouteBag.DiscardUnknown(m) } var xxx_messageInfo_PimTtRouteBag proto.InternalMessageInfo func (m *PimTtRouteBag) GetLimitReached() bool { if m != nil { return m.LimitReached } return false } func (m *PimTtRouteBag) GetLowMemory() bool { if m != nil { return m.LowMemory } return false } func (m *PimTtRouteBag) GetProtocol() string { if m != nil { return m.Protocol } return "" } func (m *PimTtRouteBag) GetGroupAddressXr() *PimAddrtype { if m != nil { return m.GroupAddressXr } return nil } func (m *PimTtRouteBag) GetSourceAddressXr() *PimAddrtype { if m != nil { return m.SourceAddressXr } return nil } func (m *PimTtRouteBag) GetWildcard() bool { if m != nil { return m.Wildcard } return false } func (m *PimTtRouteBag) GetRptXr() bool { if m != nil { return m.RptXr } return false } func (m *PimTtRouteBag) GetSpt() bool { if m != nil { return m.Spt } return false } func (m *PimTtRouteBag) GetUptime() uint64 { if m != nil { return m.Uptime } return 0 } func (m *PimTtRouteBag) GetExpiry() uint64 { if m != nil { return m.Expiry } return 0 } func (m *PimTtRouteBag) GetAlive() int32 { if m != nil { return m.Alive } return 0 } func (m *PimTtRouteBag) GetRegisterReceivedTimer() int32 { if m != nil { return m.RegisterReceivedTimer } return 0 } func (m *PimTtRouteBag) GetRemoteSource() bool { if m != nil { return m.RemoteSource } return false } func (m *PimTtRouteBag) GetCrossedThreshold() bool { if m != nil { return m.CrossedThreshold } return false } func (m *PimTtRouteBag) GetDataMdtAddrAssigned() bool { if m != nil { return m.DataMdtAddrAssigned } return false } func (m *PimTtRouteBag) GetRpAddress() *PimAddrtype { if m != nil { return m.RpAddress } return nil } func (m *PimTtRouteBag) GetRpfInterfaceName() string { if m != nil { return m.RpfInterfaceName } return "" } func (m *PimTtRouteBag) GetRpfVrfName() string { if m != nil { return m.RpfVrfName } return "" } func (m *PimTtRouteBag) GetRpfSafi() uint32 { if m != nil { return m.RpfSafi } return 0 } func (m *PimTtRouteBag) GetRpfTableName() string { if m != nil { return m.RpfTableName } return "" } func (m *PimTtRouteBag) GetRpfDrop() bool { if m != nil { return m.RpfDrop } return false } func (m *PimTtRouteBag) GetRpfExtranet() bool { if m != nil { return m.RpfExtranet } return false } func (m *PimTtRouteBag) GetRpfNeighbor() *PimAddrtype { if m != nil { return m.RpfNeighbor } return nil } func (m *PimTtRouteBag) GetIsViaLsm() bool { if m != nil { return m.IsViaLsm } return false } func (m *PimTtRouteBag) GetSecondaryRpfInterfaceName() string { if m != nil { return m.SecondaryRpfInterfaceName } return "" } func (m *PimTtRouteBag) GetSecondaryRpfNeighbor() *PimAddrtype { if m != nil { return m.SecondaryRpfNeighbor } return nil } func (m *PimTtRouteBag) GetRpfRoot() *PimAddrtype { if m != nil { return m.RpfRoot } return nil } func (m *PimTtRouteBag) GetConnected() bool { if m != nil { return m.Connected } return false } func (m *PimTtRouteBag) GetProxy() bool { if m != nil { return m.Proxy } return false } func (m *PimTtRouteBag) GetProxyAddress() *PimAddrtype { if m != nil { return m.ProxyAddress } return nil } func (m *PimTtRouteBag) GetRpfProxyEnabled() bool { if m != nil { return m.RpfProxyEnabled } return false } func (m *PimTtRouteBag) GetMofrrEnabled() bool { if m != nil { return m.MofrrEnabled } return false } func (m *PimTtRouteBag) GetRibMoFrrEnabled() bool { if m != nil { return m.RibMoFrrEnabled } return false } func (m *PimTtRouteBag) GetJpTimer() int32 { if m != nil { return m.JpTimer } return 0 } func (m *PimTtRouteBag) GetJpStatus() int32 { if m != nil { return m.JpStatus } return 0 } func (m *PimTtRouteBag) GetSuppressRegisters() int32 { if m != nil { return m.SuppressRegisters } return 0 } func (m *PimTtRouteBag) GetAssumeAlive() bool { if m != nil { return m.AssumeAlive } return false } func (m *PimTtRouteBag) GetProbeAlive() bool { if m != nil { return m.ProbeAlive } return false } func (m *PimTtRouteBag) GetReallyAlive() bool { if m != nil { return m.ReallyAlive } return false } func (m *PimTtRouteBag) GetInheritAlive() bool { if m != nil { return m.InheritAlive } return false } func (m *PimTtRouteBag) GetInheritSpt() bool { if m != nil { return m.InheritSpt } return false } func (m *PimTtRouteBag) GetSignalSources() bool { if m != nil { return m.SignalSources } return false } func (m *PimTtRouteBag) GetDontCheckConnected() bool { if m != nil { return m.DontCheckConnected } return false } func (m *PimTtRouteBag) GetRegisterReceived() bool { if m != nil { return m.RegisterReceived } return false } func (m *PimTtRouteBag) GetLastHop() bool { if m != nil { return m.LastHop } return false } func (m *PimTtRouteBag) GetSendingRegisters() bool { if m != nil { return m.SendingRegisters } return false } func (m *PimTtRouteBag) GetSendingNullRegisters() bool { if m != nil { return m.SendingNullRegisters } return false } func (m *PimTtRouteBag) GetSaSent() bool { if m != nil { return m.SaSent } return false } func (m *PimTtRouteBag) GetSaReceived() bool { if m != nil { return m.SaReceived } return false } func (m *PimTtRouteBag) GetSaJoined() bool { if m != nil { return m.SaJoined } return false } func (m *PimTtRouteBag) GetAnycastRpMatch() bool { if m != nil { return m.AnycastRpMatch } return false } func (m *PimTtRouteBag) GetAnycastRpRouteTarget() string { if m != nil { return m.AnycastRpRouteTarget } return "" } func (m *PimTtRouteBag) GetBgpJoin() bool { if m != nil { return m.BgpJoin } return false } func (m *PimTtRouteBag) GetBgpJpTime() uint64 { if m != nil { return m.BgpJpTime } return 0 } func (m *PimTtRouteBag) GetCustomerRoutingType() int32 { if m != nil { return m.CustomerRoutingType } return 0 } func (m *PimTtRouteBag) GetExtranetRoute() bool { if m != nil { return m.ExtranetRoute } return false } func (m *PimTtRouteBag) GetOutgoingInterface() []*PimTtOleBag { if m != nil { return m.OutgoingInterface } return nil } func (m *PimTtRouteBag) GetMofrrActive() bool { if m != nil { return m.MofrrActive } return false } func (m *PimTtRouteBag) GetMofrrPrimary() bool { if m != nil { return m.MofrrPrimary } return false } func (m *PimTtRouteBag) GetMofrrBackup() bool { if m != nil { return m.MofrrBackup } return false } func (m *PimTtRouteBag) GetVxlan() bool { if m != nil { return m.Vxlan } return false } func (m *PimTtRouteBag) GetOrigSrcAddress() *PimAddrtype { if m != nil { return m.OrigSrcAddress } return nil } func (m *PimTtRouteBag) GetKatState() bool { if m != nil { return m.KatState } return false } func init() { proto.RegisterType((*PimTtRouteBag_KEYS)(nil), "cisco_ios_xr_ipv4_pim_oper.pim.standby.vrfs.vrf.topologies.topology.pim_tt_route_bag_KEYS") proto.RegisterType((*PimAddrtype)(nil), "cisco_ios_xr_ipv4_pim_oper.pim.standby.vrfs.vrf.topologies.topology.pim_addrtype") proto.RegisterType((*PimTtOleBag)(nil), "cisco_ios_xr_ipv4_pim_oper.pim.standby.vrfs.vrf.topologies.topology.pim_tt_ole_bag") proto.RegisterType((*PimTtRouteBag)(nil), "cisco_ios_xr_ipv4_pim_oper.pim.standby.vrfs.vrf.topologies.topology.pim_tt_route_bag") } func init() { proto.RegisterFile("pim_tt_route_bag.proto", fileDescriptor_b3d91aa7d312ccab) } var fileDescriptor_b3d91aa7d312ccab = []byte{ // 1808 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x57, 0x5d, 0x77, 0x1b, 0xb7, 0xd1, 0x3e, 0x7a, 0x6d, 0x4b, 0x22, 0x24, 0x4a, 0x24, 0xac, 0x0f, 0x28, 0xb2, 0xdf, 0x2a, 0x72, 0xd3, 0xa8, 0x75, 0xab, 0xd3, 0x63, 0x3b, 0x6a, 0x9b, 0x7e, 0xb8, 0xb2, 0xec, 0x34, 0x72, 0x22, 0xd7, 0x21, 0x55, 0xc7, 0xe9, 0x17, 0x0a, 0xee, 0x62, 0x49, 0x58, 0xbb, 0x0b, 0x1c, 0x00, 0xa4, 0xc4, 0xd3, 0xd3, 0x9b, 0xde, 0xf4, 0xb6, 0x7f, 0xab, 0x7f, 0xa8, 0xd7, 0x3d, 0x33, 0xc0, 0x2e, 0x97, 0xaa, 0x7b, 0x17, 0xdd, 0xf0, 0x10, 0xcf, 0x3c, 0xc0, 0x00, 0x83, 0xc1, 0x33, 0xb3, 0x64, 0xcb, 0xa8, 0x82, 0x7b, 0xcf, 0xad, 0x1e, 0x7b, 0xc9, 0x07, 0x62, 0x78, 0x68, 0xac, 0xf6, 0x9a, 0x9e, 0x24, 0xca, 0x25, 0x9a, 0x2b, 0xed, 0xf8, 0x95, 0xe5, 0xca, 0x4c, 0x9e, 0x70, 0x60, 0x6a, 0x23, 0xed, 0xa1, 0x51, 0xc5, 0xa1, 0xf3, 0xa2, 0x4c, 0x07, 0xd3, 0xc3, 0x89, 0xcd, 0x1c, 0xfc, 0x1c, 0x7a, 0x6d, 0x74, 0xae, 0x87, 0x4a, 0xba, 0xea, 0xef, 0x74, 0xff, 0x9f, 0x0b, 0x64, 0xf3, 0xfa, 0xfa, 0xfc, 0x8b, 0x17, 0xdf, 0xf4, 0xe9, 0x0e, 0x59, 0x9e, 0xd8, 0x8c, 0x97, 0xa2, 0x90, 0x6c, 0x61, 0x6f, 0xe1, 0xa0, 0xd5, 0x5b, 0x9a, 0xd8, 0xec, 0x95, 0x28, 0x24, 0xfd, 0x88, 0xac, 0x39, 0x3d, 0xb6, 0x89, 0xe4, 0x22, 0x4d, 0xad, 0x74, 0x8e, 0xfd, 0x1f, 0x12, 0xda, 0x01, 0x3d, 0x0e, 0x20, 0x7d, 0x40, 0xda, 0x43, 0xab, 0xc7, 0xa6, 0x66, 0xdd, 0x42, 0xd6, 0x2a, 0x82, 0x15, 0xa9, 0x43, 0x6e, 0x59, 0xe3, 0xd9, 0xed, 0xbd, 0x85, 0x83, 0x76, 0x0f, 0xfe, 0xee, 0x17, 0x64, 0x15, 0x76, 0x04, 0x93, 0xfc, 0xd4, 0x48, 0xba, 0x4d, 0x96, 0xc4, 0xdc, 0x3e, 0x16, 0x45, 0xd8, 0xc6, 0x87, 0x64, 0x15, 0x4f, 0x3d, 0xbf, 0x89, 0x15, 0xc0, 0xaa, 0xd5, 0x03, 0xe5, 0xe8, 0xda, 0x0e, 0x80, 0x72, 0x14, 0x29, 0xfb, 0xff, 0x6a, 0x91, 0xb5, 0x18, 0x01, 0x9d, 0xe3, 0xf9, 0xe1, 0x7c, 0xaa, 0xf4, 0xd2, 0x66, 0x22, 0x91, 0x4d, 0xc7, 0xed, 0x1a, 0x45, 0xff, 0x5b, 0x64, 0x71, 0x6c, 0xbc, 0x2a, 0x24, 0x7a, 0xbe, 0xdd, 0x8b, 0x23, 0xc0, 0xe5, 0x95, 0x51, 0x76, 0x8a, 0xee, 0x6e, 0xf7, 0xe2, 0x88, 0x3e, 0x25, 0xf7, 0x07, 0x43, 0xc3, 0x13, 0x5e, 0x24, 0xc2, 0x79, 0x6e, 0xec, 0xb8, 0x94, 0x3c, 0x95, 0xb9, 0x98, 0x72, 0x98, 0x67, 0x31, 0x08, 0xb7, 0x7b, 0x6c, 0x30, 0x34, 0x27, 0x67, 0x40, 0x79, 0x0d, 0x8c, 0xe7, 0x40, 0x38, 0x07, 0x3b, 0x5c, 0xc9, 0x3b, 0x13, 0xb9, 0x77, 0xf6, 0x16, 0x0e, 0xba, 0xbd, 0xa5, 0x77, 0xa6, 0x69, 0x72, 0x5e, 0x78, 0xc9, 0x16, 0x2b, 0x53, 0x1f, 0x86, 0x10, 0x03, 0xe1, 0x9c, 0xb4, 0x3e, 0xce, 0x5c, 0x42, 0xf3, 0x4a, 0xc0, 0xc2, 0xec, 0x09, 0x69, 0x47, 0xca, 0xa5, 0x2a, 0x4b, 0x69, 0xd9, 0xf2, 0xde, 0xc2, 0xc1, 0xca, 0xa3, 0xaf, 0x0e, 0xbf, 0x85, 0x14, 0x3b, 0x6c, 0x5e, 0x66, 0x2f, 0x6e, 0xe5, 0x6b, 0x74, 0x03, 0x19, 0x92, 0xeb, 0x44, 0xe4, 0xbc, 0x90, 0xc5, 0x40, 0x5a, 0xc7, 0x5a, 0xb8, 0xb7, 0x55, 0x04, 0xcf, 0x02, 0x46, 0x9f, 0x90, 0x2d, 0x8c, 0x7b, 0x29, 0x72, 0x8e, 0x7f, 0xa4, 0xf3, 0x5c, 0x95, 0x99, 0x66, 0x04, 0xd9, 0x1b, 0x95, 0xf5, 0x34, 0x1a, 0x4f, 0xcb, 0x4c, 0xd3, 0xef, 0x93, 0x4e, 0xa6, 0xed, 0xa5, 0xb0, 0xa9, 0x2a, 0x87, 0x31, 0x30, 0x2b, 0xc8, 0x5f, 0x9f, 0xe1, 0x21, 0x40, 0x1f, 0x93, 0x75, 0x55, 0x14, 0x32, 0x55, 0xc2, 0xcb, 0xc8, 0x5c, 0x45, 0xe6, 0x5a, 0x0d, 0x07, 0xe2, 0x0e, 0x59, 0xce, 0xe1, 0xe6, 0x46, 0xda, 0xb0, 0xf6, 0xde, 0xc2, 0xc1, 0x72, 0x6f, 0x09, 0xc6, 0x9f, 0x6b, 0x43, 0xbf, 0x47, 0xd6, 0x8b, 0x3c, 0x35, 0x5c, 0x95, 0x03, 0x51, 0xa6, 0xbc, 0x48, 0x3d, 0x5b, 0x43, 0x46, 0x1b, 0xe0, 0x53, 0x44, 0xcf, 0x52, 0x4f, 0xef, 0x13, 0x82, 0x3c, 0xe7, 0x45, 0x2e, 0xd9, 0x3a, 0x52, 0x5a, 0x80, 0xf4, 0x01, 0x00, 0x0f, 0x4e, 0x84, 0xcc, 0x60, 0x9d, 0xe0, 0xc1, 0x09, 0x4c, 0x03, 0x48, 0x4a, 0x91, 0x16, 0xaa, 0xe4, 0x03, 0x3d, 0x2e, 0x53, 0x61, 0xa7, 0xac, 0x1b, 0x1c, 0x20, 0xfa, 0x2c, 0x82, 0x78, 0x98, 0x61, 0x61, 0xaa, 0x88, 0x8e, 0x94, 0x61, 0x14, 0x79, 0x6b, 0x00, 0x9f, 0xd5, 0x28, 0xdd, 0x27, 0xed, 0x22, 0xf5, 0xdc, 0x89, 0x4c, 0xf1, 0x77, 0x5a, 0x95, 0xec, 0x2e, 0xd2, 0x56, 0x8a, 0xd4, 0xf7, 0x45, 0xa6, 0x5e, 0x6a, 0x55, 0xd2, 0xef, 0x92, 0xb5, 0x62, 0x62, 0xca, 0x06, 0x69, 0x03, 0x49, 0xab, 0x80, 0x36, 0x59, 0xf1, 0x16, 0x53, 0x1f, 0x58, 0x9b, 0x81, 0x15, 0xae, 0x31, 0xf5, 0xc8, 0xda, 0x27, 0xed, 0x54, 0x78, 0x31, 0x23, 0x6d, 0x05, 0x7f, 0x00, 0x56, 0x9c, 0x03, 0xd2, 0x81, 0x95, 0x27, 0x47, 0x0d, 0x8f, 0xdb, 0x61, 0xf7, 0x01, 0xaf, 0x7d, 0x7e, 0x4c, 0x3a, 0xcd, 0xb7, 0x84, 0x4c, 0x16, 0xe2, 0x51, 0x3f, 0x1f, 0x24, 0x36, 0x65, 0x6c, 0x67, 0x5e, 0xc6, 0x7e, 0x44, 0xa8, 0xbc, 0xf2, 0x56, 0x94, 0xd2, 0xf3, 0xfa, 0x65, 0xb3, 0x0f, 0x70, 0x95, 0x6e, 0x65, 0x39, 0xad, 0x0c, 0xf4, 0x19, 0xb9, 0xff, 0xfe, 0x3c, 0xb4, 0x85, 0xf0, 0x4a, 0x97, 0x6c, 0x17, 0x97, 0xdf, 0x7d, 0x5f, 0x3a, 0x46, 0x0a, 0xfd, 0x94, 0xec, 0xcc, 0x25, 0xfc, 0xdc, 0xfc, 0x7b, 0x38, 0x7f, 0xbb, 0x99, 0xfc, 0xcd, 0xb9, 0xb3, 0x77, 0x1c, 0x72, 0xf4, 0x7e, 0x88, 0x5f, 0xc0, 0x30, 0x41, 0xf7, 0xff, 0xbd, 0x4d, 0x3a, 0xd7, 0xd5, 0x1c, 0x1f, 0x99, 0x2a, 0x94, 0xe7, 0x56, 0x8a, 0x64, 0x24, 0x53, 0xf6, 0x28, 0xde, 0x0e, 0x80, 0xbd, 0x80, 0x41, 0x5e, 0xe6, 0xfa, 0x12, 0xb6, 0xa5, 0xed, 0x94, 0x3d, 0x0e, 0x79, 0x99, 0xeb, 0xcb, 0x33, 0x04, 0xe8, 0x07, 0x64, 0x19, 0x8b, 0x4e, 0xa2, 0x73, 0xf6, 0x04, 0xb7, 0x59, 0x8f, 0xe9, 0x5f, 0x49, 0x67, 0x4e, 0xe6, 0xf9, 0x95, 0x65, 0x9f, 0xdc, 0x94, 0x7e, 0xac, 0x35, 0x8b, 0xc7, 0x5b, 0x4b, 0xff, 0x46, 0xba, 0xf3, 0xa5, 0x08, 0xbc, 0x1f, 0xdd, 0x94, 0xf7, 0xf5, 0xb9, 0x02, 0xf7, 0xd6, 0x42, 0x5c, 0x2e, 0x55, 0x9e, 0x26, 0xc2, 0xa6, 0xec, 0x27, 0x18, 0xb4, 0x7a, 0x4c, 0x37, 0xc9, 0xa2, 0x35, 0x1e, 0xf6, 0xf3, 0x53, 0xb4, 0xdc, 0xb1, 0xc6, 0xbf, 0xb5, 0x50, 0xf0, 0x9c, 0xf1, 0xec, 0x67, 0x88, 0xc1, 0xdf, 0x46, 0x1d, 0xf9, 0xf4, 0x7f, 0xd4, 0x91, 0x9f, 0xcf, 0xd5, 0x91, 0x0d, 0x72, 0x47, 0xe4, 0x6a, 0x22, 0xd9, 0x2f, 0x50, 0xa5, 0xc2, 0x80, 0x1e, 0x91, 0x6d, 0x2b, 0x87, 0xca, 0x79, 0x69, 0xb9, 0x95, 0x89, 0x54, 0x13, 0x99, 0x46, 0xc5, 0xff, 0x25, 0xf2, 0x36, 0x2b, 0x73, 0x2f, 0x5a, 0x83, 0xf6, 0x3f, 0x20, 0x6d, 0x2b, 0x0b, 0x0d, 0xd2, 0x87, 0x87, 0x63, 0xbf, 0x0a, 0xe9, 0x11, 0xc0, 0x3e, 0x62, 0xf4, 0x21, 0xe9, 0x26, 0x56, 0x3b, 0x07, 0x4b, 0x8e, 0xac, 0x74, 0x23, 0x9d, 0xa7, 0xec, 0x29, 0x12, 0x3b, 0xd1, 0x70, 0x5e, 0xe1, 0xf4, 0x31, 0xd9, 0xaa, 0x5f, 0x3a, 0x84, 0x8e, 0x0b, 0xe7, 0xd4, 0xb0, 0x94, 0x29, 0xfb, 0x35, 0xce, 0xb8, 0x1b, 0x9f, 0x3c, 0x84, 0xf1, 0x38, 0x9a, 0xa8, 0x21, 0xc4, 0xce, 0x3a, 0x85, 0xe3, 0x9b, 0xba, 0xc1, 0x96, 0xad, 0x3b, 0x8f, 0x1f, 0x12, 0x6a, 0x4d, 0xc6, 0xaf, 0x55, 0xfa, 0x67, 0x98, 0xdd, 0x1d, 0x6b, 0xb2, 0xd3, 0xb9, 0x62, 0xbf, 0x47, 0x56, 0x81, 0x5d, 0x6b, 0xc9, 0x09, 0xf2, 0x88, 0x35, 0xd9, 0x9b, 0x28, 0x27, 0x3b, 0x64, 0x19, 0x18, 0xa0, 0x5c, 0xec, 0x39, 0xb6, 0x33, 0x4b, 0xd6, 0x64, 0xa0, 0x58, 0xa0, 0x90, 0x60, 0xf2, 0x62, 0x90, 0x47, 0x37, 0x2f, 0x42, 0x2b, 0x64, 0x4d, 0x76, 0x0e, 0x60, 0x73, 0x81, 0xd4, 0x6a, 0xc3, 0x3e, 0x0b, 0xe2, 0x6f, 0x4d, 0xf6, 0xdc, 0x6a, 0x03, 0x6f, 0x1f, 0x4c, 0x95, 0x28, 0xb1, 0xdf, 0x84, 0xb7, 0x6f, 0x4d, 0xf6, 0x22, 0x42, 0xd4, 0x07, 0x4a, 0x29, 0xd5, 0x70, 0x34, 0xd0, 0x96, 0x7d, 0x7e, 0x53, 0x21, 0x04, 0xaf, 0xaf, 0xa2, 0x17, 0x7a, 0x8f, 0x10, 0xe5, 0xf8, 0x44, 0x09, 0x9e, 0xbb, 0x82, 0x9d, 0x86, 0x27, 0xa0, 0xdc, 0x1b, 0x25, 0xbe, 0x74, 0x05, 0x7d, 0x4a, 0xee, 0x39, 0x99, 0x68, 0xac, 0x4c, 0xfc, 0x3d, 0xc1, 0x7e, 0x89, 0x51, 0xd8, 0xa9, 0x39, 0xbd, 0xeb, 0x51, 0xff, 0xc7, 0x02, 0xd9, 0x9a, 0x5f, 0xa1, 0x3e, 0xdf, 0x17, 0x37, 0x75, 0xbe, 0x8d, 0xe6, 0x76, 0xea, 0x83, 0xe6, 0xe1, 0x72, 0xac, 0xd6, 0x9e, 0x7d, 0x79, 0x53, 0xae, 0xe1, 0xbe, 0x7b, 0x5a, 0x7b, 0x7a, 0x8f, 0xb4, 0x12, 0x5d, 0x96, 0x32, 0xf1, 0x32, 0x65, 0x67, 0x41, 0x8d, 0x6b, 0x00, 0x04, 0xc0, 0x58, 0x7d, 0x35, 0x65, 0xaf, 0x82, 0xb0, 0xe0, 0x00, 0x9a, 0x38, 0xfc, 0x53, 0x3f, 0xa2, 0xdf, 0xde, 0x58, 0x13, 0x87, 0x7e, 0xaa, 0x77, 0xf4, 0x03, 0xd2, 0x85, 0xc8, 0x04, 0xdf, 0xb2, 0x84, 0x74, 0x4e, 0xd9, 0x6b, 0xdc, 0xd9, 0xba, 0x35, 0xd9, 0x6b, 0xc0, 0x5f, 0x04, 0x18, 0xc4, 0xa6, 0xd0, 0x99, 0xb5, 0x35, 0xef, 0xab, 0xd8, 0x4f, 0x00, 0x58, 0x91, 0x1e, 0x12, 0x6a, 0xd5, 0x80, 0x17, 0x9a, 0x37, 0x99, 0xbd, 0xb8, 0xa2, 0x1a, 0x9c, 0xe9, 0xcf, 0x66, 0xe4, 0x66, 0x4f, 0xdc, 0x9f, 0xef, 0x89, 0x77, 0x49, 0x2b, 0xf6, 0xc4, 0x63, 0xc7, 0xce, 0xd1, 0xb6, 0x1c, 0x9a, 0xe2, 0xb1, 0x83, 0xe2, 0xef, 0xc6, 0xc6, 0x60, 0xc9, 0xa8, 0x84, 0xd1, 0xb1, 0xdf, 0x21, 0xab, 0x5b, 0x59, 0x7a, 0x95, 0x21, 0x16, 0xdf, 0x71, 0x21, 0x79, 0x90, 0xde, 0x37, 0x75, 0xf1, 0x1d, 0x17, 0xf2, 0x18, 0x05, 0xf8, 0x3b, 0x64, 0xc5, 0x58, 0x3d, 0xa8, 0x18, 0x5f, 0x23, 0x83, 0x20, 0x14, 0x08, 0xf0, 0x88, 0xa5, 0xc8, 0xf3, 0x69, 0x64, 0xbc, 0x8d, 0x8f, 0x18, 0xb1, 0x40, 0x79, 0x40, 0xda, 0xaa, 0x1c, 0x49, 0xab, 0x7c, 0xe4, 0x7c, 0x13, 0xe2, 0x13, 0xc1, 0xda, 0x51, 0x45, 0x82, 0x4a, 0xf2, 0xfb, 0xe0, 0x28, 0x42, 0x7d, 0xe3, 0xf1, 0xfb, 0x4c, 0x0d, 0xa1, 0x4f, 0x09, 0x92, 0xee, 0xd8, 0x1f, 0x42, 0x6b, 0x14, 0xd0, 0xa0, 0xe9, 0x8e, 0xfe, 0x98, 0x6c, 0xa4, 0xba, 0xf4, 0x3c, 0x19, 0xc9, 0xe4, 0x82, 0xcf, 0xf2, 0xed, 0x8f, 0x48, 0xa6, 0x60, 0x3b, 0x01, 0xd3, 0x49, 0x9d, 0x78, 0x0f, 0x49, 0xf7, 0xbf, 0x6a, 0x0c, 0xfb, 0x53, 0x28, 0x03, 0xd7, 0xab, 0xcb, 0x5c, 0xb7, 0xfc, 0xe7, 0xf9, 0x6e, 0xf9, 0x21, 0xe9, 0x3a, 0x59, 0x62, 0x67, 0x3e, 0x8b, 0x3d, 0x0f, 0xeb, 0x44, 0xc3, 0x2c, 0xf4, 0x4f, 0x40, 0x02, 0x02, 0xb9, 0x1c, 0xe7, 0x79, 0x63, 0xc6, 0x5f, 0x70, 0xc6, 0x46, 0xb4, 0xbe, 0x1a, 0xe7, 0xf9, 0x6c, 0xd6, 0x36, 0x59, 0x72, 0x82, 0x3b, 0x59, 0x7a, 0x26, 0x90, 0xb6, 0xe8, 0x44, 0x5f, 0x96, 0x1e, 0xa2, 0xe7, 0xc4, 0x6c, 0xf7, 0x83, 0x10, 0x3d, 0x27, 0xea, 0x7d, 0xef, 0x92, 0x96, 0x13, 0xd8, 0x51, 0xca, 0x94, 0x25, 0x41, 0xd1, 0x9c, 0x78, 0x89, 0x63, 0xe8, 0x50, 0x45, 0x39, 0xc5, 0x9e, 0xd3, 0x1a, 0x5e, 0x08, 0x9f, 0x8c, 0x58, 0x1a, 0x3a, 0xd4, 0x88, 0xf7, 0xcc, 0x19, 0xa0, 0xf4, 0x13, 0xb2, 0xdd, 0x60, 0x86, 0x76, 0xcc, 0x0b, 0x3b, 0x94, 0x9e, 0x49, 0x94, 0xbd, 0x8d, 0x7a, 0x42, 0x0f, 0x8c, 0xe7, 0x68, 0x83, 0xa8, 0x41, 0x63, 0x8b, 0x0d, 0x6d, 0x16, 0xa2, 0x36, 0x18, 0x1a, 0x6c, 0x65, 0xff, 0x9f, 0xac, 0xa0, 0x29, 0xa4, 0x3b, 0x1b, 0x62, 0x53, 0xd0, 0x02, 0x2b, 0x26, 0x3c, 0x7d, 0x44, 0x36, 0x93, 0xb1, 0xf3, 0xba, 0x80, 0xdb, 0xd1, 0x63, 0x0f, 0x11, 0x83, 0xf7, 0xca, 0x46, 0x98, 0xd5, 0x77, 0x2b, 0x63, 0x2f, 0xd8, 0xce, 0xe1, 0xe3, 0xfa, 0x23, 0xb2, 0x56, 0xf7, 0xc0, 0xb8, 0x47, 0xa6, 0x42, 0xaa, 0x54, 0x28, 0xee, 0x8d, 0xfe, 0x7d, 0x81, 0x50, 0x3d, 0xf6, 0x43, 0x0d, 0x6b, 0xce, 0x7a, 0xe5, 0x77, 0x7b, 0xb7, 0x0e, 0x56, 0x1e, 0xf5, 0xbf, 0x35, 0x85, 0x99, 0x7d, 0x83, 0xf7, 0xba, 0x95, 0xbb, 0x59, 0x03, 0xfe, 0x21, 0x09, 0x3a, 0xc1, 0x45, 0xe2, 0xe1, 0x6d, 0x5c, 0xc4, 0x0f, 0x16, 0xc0, 0x8e, 0x11, 0x9a, 0xe9, 0x8b, 0xb1, 0xaa, 0x80, 0x6f, 0xa4, 0xbc, 0xa1, 0x2f, 0xaf, 0x03, 0x36, 0x5b, 0x67, 0x20, 0x92, 0x8b, 0xb1, 0x61, 0x45, 0x63, 0x9d, 0x67, 0x08, 0x81, 0xc2, 0x4e, 0xae, 0x72, 0x51, 0xb2, 0x32, 0x28, 0x2c, 0x0e, 0xa0, 0xd3, 0xd5, 0x56, 0x0d, 0xb9, 0xb3, 0x49, 0x2d, 0xb2, 0xfa, 0xc6, 0x3a, 0x5d, 0x70, 0xd5, 0xb7, 0x49, 0x25, 0xb3, 0xbb, 0xa4, 0x75, 0x21, 0xaa, 0xde, 0xdf, 0x84, 0xb4, 0xbc, 0x10, 0xa1, 0xf1, 0x1f, 0x2c, 0x62, 0x37, 0xfe, 0xf8, 0x3f, 0x01, 0x00, 0x00, 0xff, 0xff, 0x64, 0x1d, 0x7f, 0xf1, 0x2c, 0x12, 0x00, 0x00, }
victorgabr/pps
dev/dev.py
<filename>dev/dev.py # this module is for experimentation using notebook or console. # from pyplanscoring import PyDicomParser import pydicom if __name__ == '__main__': filename = r"C:\Users\vgalves\Downloads\monacoplangetalldetails\2018CompLung_lmh.dcm" obj = pydicom.read_file(filename, force=True) pass
horizonltd/corona
request_Corona_Test/admin.py
<filename>request_Corona_Test/admin.py from django.contrib import admin from .models import RequestCoronaTest, Material # class StateAdmin(admin.ModelAdmin): # search_fields = ['__all__'] # class LgaAdmin(admin.ModelAdmin): # search_fields = ['__all__'] # class WardAdmin(admin.ModelAdmin): # search_fields = ['__all__'] # admin.site.register(State, StateAdmin) # admin.site.register(Lga, LgaAdmin) # admin.site.register(Ward, WardAdmin) @admin.register(RequestCoronaTest) class RequestCoronaTestAdmin(admin.ModelAdmin): list_display = ['name', 'phone', 'email', 'date', 'preparedDateOfTest'] # date_hierarchy = ('preparedDateOfTest') list_filter = ['name', 'phone', ] list_per_page = 20 search_fields = ['name', 'preparedDateOfTest', ] @admin.register(Material) class MaterialAdmin(admin.ModelAdmin): list_display = ['title', 'description', 'author', 'material', 'date'] # date_hierarchy = ('date') list_filter = ['date', 'title', ] list_per_page = 20 search_fields = ['title', 'description', ]
yez131300/ball
HelloWorld/src/resource.js
<gh_stars>0 var s_HelloWorld = "res/HelloWorld.png"; var g_ressources = [ //image {src:s_HelloWorld} //plist //fnt //tmx //bgm //effect ];
tnnfnc/apps
src/main/java/it/tnnfnc/security/algorithm/KeyAlgorithmParameter.java
<reponame>tnnfnc/apps<filename>src/main/java/it/tnnfnc/security/algorithm/KeyAlgorithmParameter.java /* * Copyright (c) 2015, <NAME>. All rights reserved. * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by the * Free Software Foundation, either version 3 of the License, or (at your option) * any later version. * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY * or FITNESS FOR A PARTICULAR PURPOSE. * * THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. * EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER * PARTIES PROVIDE THE PROGRAM �AS IS� WITHOUT WARRANTY OF ANY KIND, EITHER * EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE * QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE * DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. */ package it.tnnfnc.security.algorithm; /** * KeyboardKey parameters with a byte array with an offset and a length. * * @author <NAME> * */ public class KeyAlgorithmParameter implements I_AlgorithmParameter { private byte[] key; /** * A key from a byte array. * * @param key * full key array. */ public KeyAlgorithmParameter(byte[] key) { this(key, 0, key.length); } /** * A key from a subset of a byte array of fixed length starting from an * offset. * * @param key * @param keyOff * @param keyLen */ public KeyAlgorithmParameter(byte[] key, int keyOff, int keyLen) { this.key = new byte[keyLen]; System.arraycopy(key, keyOff, this.key, 0, keyLen); } /** * Returns the key array. * * @return the key array. */ public byte[] getKey() { byte k[] = new byte[key.length]; System.arraycopy(key, 0, k, 0, k.length); return k; } }
zhanglong11/vue-manage
src/api/device/equipment/checkRecord.js
<gh_stars>0 import axios from '@/utils/axios' /* * @任务编号:2824 || 解决项目id问题 * @开发人员:张龙 * @日期:2020-03-07 * @任务类型: 修改代码 */ export default { //列表 list: param => axios.construction.post(`/equipmentAcceptance/list`, { projectId: localStorage.getItem('projectId'), ...param }), //添加 add: param => axios.construction.post(`/equipmentAcceptance/save`, { ...param, projectId: localStorage.getItem('projectId') }), //更新 update: param => axios.construction.post(`/equipmentAcceptance/save`, { projectId: localStorage.getItem('projectId'), ...param }), //删除 delete: id => axios.construction.get(`/equipmentAcceptance/delete/${id}`), //详情 detail: id => axios.construction.get(`/equipmentAcceptance/get/${id}`), //提交审核 submitAudit: param => axios.construction.post(`/equipmentAcceptance/submit`, { ...param, projectId: localStorage.getItem('projectId') }), //审核 audit: param => axios.construction.post(`/equipmentAcceptance/audit`, param) }
nfco/netforce
netforce_cms/netforce_cms/nf_editor/plugins/nf_image/plugin.js
CKEDITOR.plugins.add('nf_image', { requires: 'widget', icons: 'nf_image', init: function( editor ) { editor.widgets.add( 'nf_image', { template: '<div class="nf-image">'+ '<img src=""/>'+ '</div>', upcast: function( element ) { return element.name == 'div' && element.hasClass( 'nf-image' ); } } ); } });
lupudeni/freecycle
src/main/java/com/denisalupu/freecycle/config/SecurityConfig.java
<filename>src/main/java/com/denisalupu/freecycle/config/SecurityConfig.java package com.denisalupu.freecycle.config; import lombok.AllArgsConstructor; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.security.authentication.dao.DaoAuthenticationProvider; import org.springframework.security.config.annotation.authentication.builders.AuthenticationManagerBuilder; import org.springframework.security.config.annotation.web.builders.HttpSecurity; import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter; import org.springframework.security.core.userdetails.UserDetailsService; import org.springframework.security.crypto.password.PasswordEncoder; @Configuration @AllArgsConstructor public class SecurityConfig extends WebSecurityConfigurerAdapter { private final UserDetailsService authenticationService; private final PasswordEncoder passwordEncoder; @Override protected void configure(HttpSecurity http) throws Exception { http.csrf().disable() .authorizeRequests() .antMatchers("/users/registration", "/doc/**").permitAll() .anyRequest().authenticated() .and() .httpBasic(); } @Override protected void configure(AuthenticationManagerBuilder auth) { auth.authenticationProvider(authProvider()); } @Bean public DaoAuthenticationProvider authProvider() { DaoAuthenticationProvider authProvider = new DaoAuthenticationProvider(); authProvider.setUserDetailsService(authenticationService); authProvider.setPasswordEncoder(passwordEncoder); return authProvider; } }
stepney141/GlukKazan.github.io
control/scripts/filler-dark.js
(function() { var checkVersion = Dagaz.Model.checkVersion; Dagaz.Model.checkVersion = function(design, name, value) { if (name != "filler-dark") { checkVersion(design, name, value); } } var CheckInvariants = Dagaz.Model.CheckInvariants; Dagaz.Model.CheckInvariants = function(board) { var design = Dagaz.Model.design; var dark = []; var pattern = null; _.each(design.allPositions(), function(pos) { var piece = board.getPiece(pos); if (piece === null) return; if (piece.type != 9) return; dark.push(pos); pattern = piece; }); if (pattern !== null) { var group = []; _.each(dark, function(pos) { _.each(design.allDirections(), function(dir) { var p = design.navigate(1, pos, dir); if (p === null) return; if (_.indexOf(dark, p) >= 0) return; if (_.indexOf(group, p) >= 0) return; var piece = board.getPiece(p); if (piece === null) return; if (piece.type >= 7) return; group.push(p); }); }); var init = []; _.each(group, function(pos) { _.each(design.allDirections(), function(dir) { var p = design.navigate(1, pos, dir); if (p === null) return; var piece = board.getPiece(p); if (piece === null) return; if (piece.type >= 10) { init.push(p); } }); }); var f = false; while (init.length > 0) { var pos = init.pop(); var start = board.getPiece(pos); if (start !== null) { var done = [pos]; _.each(design.allDirections(), function(dir) { var p = design.navigate(1, pos, dir); if ((p === null) || (_.indexOf(done, p) >= 0)) return; var piece = board.getPiece(p); if ((piece === null) || (piece.player < 3)) return; if ((start.type == 11) && (piece.type != 9)) return; var g = [p]; for (var i = 0; i < g.length; i++) { _.each(design.allDirections(), function(d) { var q = design.navigate(1, g[i], d); if ((q === null) || (_.indexOf(done, q) >= 0)) return; var x = board.getPiece(q); if ((x === null) || (x.player < 3)) return; if (x.type != piece.type) return; done.push(q); g.push(q); }); } }); done = _.union(done, group); done = _.union(done, dark); f = true; if (start.type == 10) { var pieces = []; for (var i = 0; i < 7; i++) { pieces.push(Dagaz.Model.createPiece(i, 3)); } _.each(board.moves, function(move) { _.each(done, function(pos) { move.movePiece(pos, pos, pieces[_.random(6)]); }); move.sound = 10; }); } if (start.type == 11) { var wall = Dagaz.Model.createPiece(8, 3); _.each(board.moves, function(move) { _.each(done, function(pos) { move.movePiece(pos, pos, wall); }); move.sound = 11; }); } } } if (f) return; _.each(board.moves, function(move) { _.each(group, function(pos) { move.movePiece(pos, pos, pattern); }); }); } CheckInvariants(board); } })();
CardinalDevelopment/cardinal2
src/main/java/in/twizmwaz/cardinal/module/filter/type/LayerFilter.java
/* * Copyright (c) 2016, <NAME> * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package in.twizmwaz.cardinal.module.filter.type; import lombok.AllArgsConstructor; import org.bukkit.Material; import org.bukkit.block.Block; @AllArgsConstructor public class LayerFilter extends ObjectTypeFilter<Block> { private final int layer; private final Coordinate coordinate; @Override public Class<Block> getType() { return Block.class; } @Override public Boolean evaluate(Block block) { return getCoord(block) == layer || block.getWorld().getBlockAt( layerOrBlock(Coordinate.X, block.getX()), layerOrBlock(Coordinate.Y, block.getY()), layerOrBlock(Coordinate.Z, block.getZ())).getType().equals(Material.AIR); } private int layerOrBlock(Coordinate coord, int block) { return coordinate.equals(coord) ? layer : block; } private int getCoord(Block block) { switch (coordinate) { case X: return block.getX(); case Y: return block.getY(); case Z: return block.getZ(); default: return 0; } } public enum Coordinate { X, Y, Z; } }
lossyrob/geotrellis-before-locationtech
geotools/build.sbt
import Dependencies._ name := "geotrellis-geotools" libraryDependencies ++= Seq( "org.geotools" % "gt-coverage" % Version.geotools, "org.geotools" % "gt-epsg-hsql" % Version.geotools, "org.geotools" % "gt-geotiff" % Version.geotools, "org.geotools" % "gt-main" % Version.geotools, "org.geotools" % "gt-referencing" % Version.geotools, jts, spire, "org.geotools" % "gt-shapefile" % Version.geotools % "test", scalatest % "test", // This is one finicky dependency. Being explicit in hopes it will stop hurting Travis. "javax.media" % "jai_core" % "1.1.3" from "http://download.osgeo.org/webdav/geotools/javax/media/jai_core/1.1.3/jai_core-1.1.3.jar" ) resolvers ++= Seq( "geosolutions" at "http://maven.geo-solutions.it/", "osgeo" at "http://download.osgeo.org/webdav/geotools/" ) fork in Test := false parallelExecution in Test := false initialCommands in console := """ import geotrellis.geotools._ import geotrellis.raster._ import geotrellis.vector._ import com.vividsolutions.jts.{geom => jts} import org.geotools.coverage.grid._ import org.geotools.coverage.grid.io._ import org.geotools.gce.geotiff._ """
torukobyte/HRMS_Spring
hrms/src/main/java/torukobyte/hrms/api/controllers/JobSeekerLanguagesController.java
<reponame>torukobyte/HRMS_Spring package torukobyte.hrms.api.controllers; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.web.bind.annotation.*; import torukobyte.hrms.business.abstracts.JobSeekerLanguageService; import torukobyte.hrms.core.utilities.results.DataResult; import torukobyte.hrms.core.utilities.results.Result; import torukobyte.hrms.entities.concretes.JobSeekerLanguage; import torukobyte.hrms.entities.dtos.addDtos.JobSeekerLanguageAddDto; import java.util.List; @RestController @RequestMapping("/api/jobseekerLanguages") @CrossOrigin public class JobSeekerLanguagesController { private final JobSeekerLanguageService jobSeekerLanguageService; @Autowired public JobSeekerLanguagesController(JobSeekerLanguageService jobSeekerLanguageService) { this.jobSeekerLanguageService = jobSeekerLanguageService; } @GetMapping("/getJobseekerLanguagesByCvId") public DataResult<List<JobSeekerLanguage>> getJobseekerLanguagesByCvId(@RequestParam int cvId) { return this.jobSeekerLanguageService.getJobSeekerLanguageByCurriculaVitaeId(cvId); } @PostMapping("/addJobseekerLanguages") public Result addJobseekerLanguage(@RequestBody JobSeekerLanguageAddDto jobSeekerLanguage) { return this.jobSeekerLanguageService.addJobSeekerLanguage(jobSeekerLanguage); } @DeleteMapping("/deleteJobSeekerLanguageById") public Result deleteJobSeekerLanguageById(@RequestParam int id) { return this.jobSeekerLanguageService.deleteJobSeekerLanguageById(id); } }
ScalablyTyped/SlinkyTyped
o/openui5/src/main/scala/typingsSlinky/openui5/sap/m/TimePicker.scala
package typingsSlinky.openui5.sap.m import org.scalablytyped.runtime.StObject import scala.scalajs.js import scala.scalajs.js.`|` import scala.scalajs.js.annotation.{JSGlobalScope, JSGlobal, JSImport, JSName, JSBracketAccess} @js.native trait TimePicker extends MaskInput { /** * Gets current value of property <code>dateValue</code>.Holds a reference to a JavaScript Date Object. * The <code>value</code> (string)property will be set according to it. Alternatively, if the * <code>value</code>and <code>valueFormat</code> pair properties are supplied instead,the * <code>dateValue</code> will be instantiated аccording to the parsed<code>value</code>. * @returns Value of property <code>dateValue</code> */ def getDateValue(): js.Any = js.native /** * Gets current value of property <code>displayFormat</code>.Determines the format, displayed in the * input field and the picker sliders.The default value is the browser's medium time format locale * setting{@link sap.ui.core.LocaleData#getTimePattern}.If data binding with type {@link * sap.ui.model.type.Time} is used for the<code>value</code> property, the <code>displayFormat</code> * propertyis ignored as the information is provided from the binding itself. * @returns Value of property <code>displayFormat</code> */ def getDisplayFormat(): String = js.native /** * Gets current value of property <code>localeId</code>.Defines the locale used to parse string values * representing time.Determines the locale, used to interpret the string, supplied by * the<code>value</code> property.Example: AM in the string "09:04 AM" is locale (language) * dependent.The format comes from the browser language settings if not set explicitly.Used in * combination with 12 hour <code>valueFormat</code> containing 'a', whichstands for day period * string.Default value is taken from browser's locale setting. * @returns Value of property <code>localeId</code> */ def getLocaleId(): String = js.native /** * Gets current value of property <code>minutesStep</code>.Sets the minutes slider step.The minutes * slider is populated only by multiples of the step.Default value is <code>1</code>. * @returns Value of property <code>minutesStep</code> */ def getMinutesStep(): Double = js.native /** * Gets current value of property <code>secondsStep</code>.Sets the seconds slider step.The seconds * slider is populated only by multiples of the step.Default value is <code>1</code>. * @returns Value of property <code>secondsStep</code> */ def getSecondsStep(): Double = js.native /** * Gets current value of property <code>title</code>.Displays the text of the general picker label and * is read by screen readers.It is visible only on phone. * @returns Value of property <code>title</code> */ def getTitle(): String = js.native /** * Gets current value of property <code>valueFormat</code>.Determines the format of the value * property.The default value is the browser's medium time format locale setting{@link * sap.ui.core.LocaleData#getTimePattern}.If data binding with type {@link sap.ui.model.type.Time} is * used for the<code>value</code> property, the <code>valueFormat</code> propertyis ignored as the * information is provided from the binding itself. * @returns Value of property <code>valueFormat</code> */ def getValueFormat(): String = js.native /** * Called after the picker closes. */ def onAfterClose(): Unit = js.native /** * Called after the picker appears. */ def onAfterOpen(): Unit = js.native /** * Called before the picker appears. */ def onBeforeOpen(): Unit = js.native /** * Sets the <code>dateValue</code> JavaScript date object.Recommended usage is when <code>value</code> * is not set, as they are mutually exclusive. * @param oDate New date object * @returns this instance, used for chaining */ def setDateValue(oDate: js.Any): TimePicker | js.Error = js.native /** * Sets the <code>displayFormat</code>. * @param sDisplayFormat The format of the string inside the input * @returns this instance, used for chaining */ def setDisplayFormat(sDisplayFormat: String): TimePicker = js.native /** * Sets the locale of the control.Used for parsing and formatting the time values in languages * different than English.Necessary for translation and auto-complete of the day periods, such as AM * and PM. * @param sLocaleId A locale identifier like 'en_US' * @returns this instance, used for chaining */ def setLocaleId(sLocaleId: String): TimePicker = js.native /** * Sets the minutes slider step. * @param iStep The step used to generate values for the minutes slider * @returns this */ def setMinutesStep(iStep: js.Any): js.Any = js.native /** * Sets the seconds slider step. * @param iStep The step used to generate values for the seconds slider * @returns this */ def setSecondsStep(iStep: js.Any): js.Any = js.native /** * Sets the <code>valueFormat</code> property. * @param sValueFormat The format of strings that are set as value to the control * @returns this instance, used for chaining */ def setValueFormat(sValueFormat: String): TimePicker = js.native }
talonos2/BruteLoL
src/brutelol/FightTest.java
<reponame>talonos2/BruteLoL /* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package brutelol; import brutelol.characters.instances.Ashe; import brutelol.characters.instances.GenericChar; import brutelol.characters.instances.Rammus; import brutelol.characters.instances.Soraka; import brutelol.characters.instances.Xayah; import brutelol.characters.lib.AbstractLolCharacter; import brutelol.charbuild.Build; import brutelol.charbuild.ItemSet; import brutelol.fights.Fight; import brutelol.items.abstracts.Item; import brutelol.items.instances.InfinityEdge; import brutelol.items.instances.Items; import java.util.ArrayList; import java.util.List; /** * * @author Talonos */ public class FightTest { public static void main(String[] args) { Items.getAllItems(); List<Item> xayahItems = new ArrayList<>(); Items.getAllItems(); xayahItems.add(new InfinityEdge()); xayahItems.add(new InfinityEdge()); ItemSet xi = new ItemSet(xayahItems); List<Item> eItems = new ArrayList<>(); eItems.add(new InfinityEdge()); ItemSet ei = new ItemSet(eItems); ItemSet ni = new ItemSet(new ArrayList<Item>()); Fight f = new Fight(); AbstractLolCharacter rammus = new Rammus(new Build(ni)); AbstractLolCharacter ashe = new Ashe(new Build(ni)); AbstractLolCharacter soraka = new Soraka(new Build(ni)); AbstractLolCharacter enemy = new GenericChar(new Build(ni)); f.addAlly(rammus, 1, 1); f.addEnemy(ashe, 1, 1); f.addEnemy(soraka, 1, 1); f.addEnemy(enemy, 1, 1); System.out.println(rammus.getStats()); System.out.println(ashe.getStats()); System.out.println(soraka.getStats()); System.out.println(enemy.getStats()); f.setTime(20); f.runFight(); } }
codeforamerica/michigan-benefits
app/forms/are_you_flint_water_form.rb
<filename>app/forms/are_you_flint_water_form.rb class AreYouFlintWaterForm < Form set_attributes_for :member, :flint_water end
staniel359/muffon-api
app/services/muffon/profile/playlists/playlist.rb
<reponame>staniel359/muffon-api<filename>app/services/muffon/profile/playlists/playlist.rb module Muffon module Profile class Playlists class Playlist < Muffon::Profile::Base def call data end private def data { id: playlist.id, playlist_track_id: playlist_track&.id, title: playlist.title, image: playlist.image_data, tracks_count: playlist.playlist_tracks_count, created: created_formatted }.compact end def playlist @args[:playlist] end def playlist_track return if @args[:track_id].blank? playlist.playlist_tracks.find_by( track_id: @args[:track_id] ) end def created_formatted datetime_formatted( playlist.created_at ) end end end end end
caohangwei1314/mall-vue-server
src/main/java/com/caohangwei/mallvueserver/service/impl/OrderServiceImpl.java
<gh_stars>0 package com.caohangwei.mallvueserver.service.impl; import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl; import com.caohangwei.mallvueserver.entity.Order; import com.caohangwei.mallvueserver.entity.OrderDetail; import com.caohangwei.mallvueserver.entity.ShoppingCart; import com.caohangwei.mallvueserver.entity.User; import com.caohangwei.mallvueserver.exception.BusinessException; import com.caohangwei.mallvueserver.mapper.OrderMapper; import com.caohangwei.mallvueserver.request.OrderAddRequest; import com.caohangwei.mallvueserver.request.OrderRequest; import com.caohangwei.mallvueserver.response.OrderResponse; import com.caohangwei.mallvueserver.service.OrderDetailService; import com.caohangwei.mallvueserver.service.OrderService; import com.caohangwei.mallvueserver.service.ShoppingCartService; import com.caohangwei.mallvueserver.util.UserContextHolder; import lombok.extern.slf4j.Slf4j; import org.springframework.beans.BeanUtils; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; import java.util.ArrayList; import java.util.List; @Service @Slf4j public class OrderServiceImpl extends ServiceImpl<OrderMapper, Order> implements OrderService { @Autowired private ShoppingCartService shoppingCartService; @Autowired private OrderService orderService; @Autowired private OrderDetailService orderDetailService; @Override @Transactional public boolean submit(OrderAddRequest orderAddRequest) { OrderRequest orderRequest = orderAddRequest.getOrder(); if(orderRequest == null){ throw new BusinessException("订单为空"); } User user = UserContextHolder.getUser(); shoppingCartService .lambdaUpdate() .in(ShoppingCart::getId,orderAddRequest.getCart()) .remove(); Order order = new Order(); BeanUtils.copyProperties(orderAddRequest.getOrder(),order); order.setUserid(user.getUserid()); orderService.save(order); orderRequest.getOrderDetail().forEach(obj -> { orderDetailService.save(obj); }); return true; } @Override public List<OrderResponse> getOrderList() { User user = UserContextHolder.getUser(); List<Order> orderList = this .lambdaQuery() .eq(Order::getUserid,user.getUserid()) .list(); List<OrderResponse> orderResponseList = new ArrayList<>(); orderList.forEach(obj -> { OrderResponse orderResponse = new OrderResponse(); BeanUtils.copyProperties(obj,orderResponse); orderResponse.setOrderDetail(orderDetailService .lambdaQuery() .eq(OrderDetail::getOrderId,obj.getOrderid()) .list()); orderResponseList.add(orderResponse); }); return orderResponseList; } }
KaiyuWei/leetcode
cpp/381-390/Lexicographical Numbers.cpp
<reponame>KaiyuWei/leetcode class Solution { void dfs(int curr, int n, vector<int>& result, int& index) { if (curr <= n) { result[index++] = curr; } else { return; } for (int i = 0; i <= 9; i++) { dfs(curr*10 + i, n, result, index); } } public: vector<int> lexicalOrder(int n) { vector<int> result(n); int index = 0; for (int i = 1; i <= 9; i++) dfs(i, n, result, index); return result; } };
dazito/twitterfy
src/main/java/com/dazito/twitterfy/aws/sqs/SqsPublisher.java
package com.dazito.twitterfy.aws.sqs; import com.amazonaws.auth.AWSCredentialsProvider; import com.amazonaws.services.sqs.AmazonSQSAsync; import com.amazonaws.services.sqs.AmazonSQSAsyncClientBuilder; import com.amazonaws.services.sqs.model.SendMessageRequest; import com.dazito.twitterfy.Publisher; import com.dazito.twitterfy.configuration.TwitterfyConfiguration; import com.dazito.twitterfy.model.TweetModel; import com.dazito.twitterfy.util.AwsUtil; import com.google.gson.Gson; /** * Created by daz on 10/05/2017. */ public class SqsPublisher implements Publisher { private AmazonSQSAsync amazonSQSAsync; private String region = TwitterfyConfiguration.getConfiguration().getAwsSnsRegion(); private final String queueUrl; private final AWSCredentialsProvider awsCredentialsProvider; private final Gson gson; public SqsPublisher() { gson = new Gson(); awsCredentialsProvider = AwsUtil.awsCredentialsProvider(); amazonSQSAsync = AmazonSQSAsyncClientBuilder .standard() .withCredentials(awsCredentialsProvider) .withRegion(region) .build(); queueUrl = TwitterfyConfiguration.getConfiguration().getAwsSqsQueueUrl(); } @Override public void publish(TweetModel tweetModel) { SendMessageRequest sendMessageRequest = new SendMessageRequest() .withQueueUrl(queueUrl) .withMessageBody(gson.toJson(tweetModel, TweetModel.class)); amazonSQSAsync.sendMessage(sendMessageRequest); } }
flyingff/droidbot
src/main/java/com/twilight/h264/decoder/AVFrame.java
package com.twilight.h264.decoder; public class AVFrame { public int imageWidthWOEdge; public int imageHeightWOEdge; public int imageWidth; public int imageHeight; // FF_COMMON_FRAME /**\ * pointer to the picture planes.\ * This might be different from the first allocated byte\ * - encoding: \ * - decoding: \ */ public int[][] data_base = new int[4][]; //uint8_t *data[4];\ public int[] data_offset = new int[4]; // Offset in each data_base public int[] linesize = new int[4]; /**\ * pointer to the first allocated byte of the picture. Can be used in get_buffer/release_buffer.\ * This isn't used by libavcodec unless the default get/release_buffer() is used.\ * - encoding: \ * - decoding: \ */ public int[][] base = new int[4][]; //uint8_t *base[4];\ /**\ * 1 -> keyframe, 0-> not\ * - encoding: Set by libavcodec.\ * - decoding: Set by libavcodec.\ */ public int key_frame; /**\ * Picture type of the frame, see ?_TYPE below.\ * - encoding: Set by libavcodec. for coded_picture (and set by user for input).\ * - decoding: Set by libavcodec.\ */ public int pict_type; /**\ * presentation timestamp in time_base units (time when frame should be shown to user)\ * If AV_NOPTS_VALUE then frame_rate = 1/time_base will be assumed.\ * - encoding: MUST be set by user.\ * - decoding: Set by libavcodec.\ */ public long pts; /**\ * picture number in bitstream order\ * - encoding: set by\ * - decoding: Set by libavcodec.\ */ public int coded_picture_number; /**\ * picture number in display order\ * - encoding: set by\ * - decoding: Set by libavcodec.\ */ public int display_picture_number; /**\ * quality (between 1 (good) and FF_LAMBDA_MAX (bad)) \ * - encoding: Set by libavcodec. for coded_picture (and set by user for input).\ * - decoding: Set by libavcodec.\ */ public int quality; /**\ * buffer age (1->was last buffer and dint change, 2->..., ...).\ * Set to INT_MAX if the buffer has not been used yet.\ * - encoding: unused\ * - decoding: MUST be set by get_buffer().\ */ public int age; /**\ * is this picture used as reference\ * The values for this are the same as the MpegEncContext.picture_structure\ * variable, that is 1->top field, 2->bottom field, 3->frame/both fields.\ * Set to 4 for delayed, non-reference frames.\ * - encoding: unused\ * - decoding: Set by libavcodec. (before get_buffer() call)).\ */ public int reference; /**\ * QP table\ * - encoding: unused\ * - decoding: Set by libavcodec.\ */ public int[] qscale_table; //int8_t *qscale_table;\ /**\ * QP store stride\ * - encoding: unused\ * - decoding: Set by libavcodec.\ */ public int qstride; /**\ * mbskip_table[mb]>=1 if MB didn't change\ * stride= mb_width = (width+15)>>4\ * - encoding: unused\ * - decoding: Set by libavcodec.\ */ public int[] mbskip_table; //uint8_t *mbskip_table;\ /**\ * motion vector table\ * @code\ * example:\ * int mv_sample_log2= 4 - motion_subsample_log2;\ * int mb_width= (width+15)>>4;\ * int mv_stride= (mb_width << mv_sample_log2) + 1;\ * motion_val[direction][x + y*mv_stride][0->mv_x, 1->mv_y];\ * @endcode\ * - encoding: Set by user.\ * - decoding: Set by libavcodec.\ */ //public int[][][] motion_val = new int[2][][]; // int16_t (*motion_val[2])[2];\ // Change to segment/offset style on motion_val_base[2][offset][2] public int[] motion_val_offset = new int[2]; // int16_t (*motion_val[2])[2];\ /**\ * macroblock type table\ * mb_type_base + mb_width + 2\ * - encoding: Set by user.\ * - decoding: Set by libavcodec.\ */ public int mb_type_offset; //uint32_t *mb_type;\ /**\ * log2 of the size of the block which a single vector in motion_val represents: \ * (4->16x16, 3->8x8, 2-> 4x4, 1-> 2x2)\ * - encoding: unused\ * - decoding: Set by libavcodec.\ */ public int motion_subsample_log2; //uint8_t motion_subsample_log2;\ /**\ * for some private data of the user\ * - encoding: unused\ * - decoding: Set by user.\ */ public Object opaque; //void *opaque;\ /**\ * error\ * - encoding: Set by libavcodec. if flags&CODEC_FLAG_PSNR.\ * - decoding: unused\ */ public long[] error = new long[4]; //uint64_t error[4];\ /**\ * type of the buffer (to keep track of who has to deallocate data[*])\ * - encoding: Set by the one who allocates it.\ * - decoding: Set by the one who allocates it.\ * Note: User allocated (direct rendering) & internal buffers cannot coexist currently.\ */ public int type; /**\ * When decoding, this signals how much the picture must be delayed.\ * extra_delay = repeat_pict / (2*fps)\ * - encoding: unused\ * - decoding: Set by libavcodec.\ */ public int repeat_pict; /**\ * \ */ public int qscale_type; /**\ * The content of the picture is interlaced.\ * - encoding: Set by user.\ * - decoding: Set by libavcodec. (default 0)\ */ public int interlaced_frame; /**\ * If the content is interlaced, is top field displayed first.\ * - encoding: Set by user.\ * - decoding: Set by libavcodec.\ */ public int top_field_first; /**\ * Pan scan.\ * - encoding: Set by user.\ * - decoding: Set by libavcodec.\ */ public AVPanScan pan_scan; /**\ * Tell user application that palette has changed from previous frame.\ * - encoding: ??? (no palette-enabled encoder yet)\ * - decoding: Set by libavcodec. (default 0).\ */ public int palette_has_changed; /**\ * codec suggestion on buffer type if != 0\ * - encoding: unused\ * - decoding: Set by libavcodec. (before get_buffer() call)).\ */ public int buffer_hints; /**\ * DCT coefficients\ * - encoding: unused\ * - decoding: Set by libavcodec.\ */ public short[] dct_coeff; //short *dct_coeff;\ /**\ * motion reference frame index\ * the order in which these are stored can depend on the codec.\ * - encoding: Set by user.\ * - decoding: Set by libavcodec.\ */ public int[][] ref_index = new int[2][]; //int8_t *ref_index[2];\ /**\ * reordered opaque 64bit (generally an integer or a double precision float\ * PTS but can be anything). \ * The user sets AVCodecContext.reordered_opaque to represent the input at\ * that time,\ * the decoder reorders values as needed and sets AVFrame.reordered_opaque\ * to exactly one of the values provided by the user through AVCodecContext.reordered_opaque \ * - encoding: unused\ * - decoding: Read by user.\ */ public long reordered_opaque; //int64_t reordered_opaque;\ /**\ * hardware accelerator private data (FFmpeg allocated)\ * - encoding: unused\ * - decoding: Set by libavcodec\ */ public Object hwaccel_picture_private; /**\ * reordered pts from the last AVPacket that has been input into the decoder\ * - encoding: unused\ * - decoding: Read by user.\ */ public long pkt_pts; //int64_t pkt_pts;\ /**\ * dts from the last AVPacket that has been input into the decoder\ * - encoding: unused\ * - decoding: Read by user.\ */ public long pkt_dts; //int64_t pkt_dts;\ /** * halfpel luma planes. */ public int[] interpolated = new int[3]; //uint8_t *interpolated[3]; public int[][][] motion_val_base = new int[2][][]; // int16_t (*motion_val_base[2])[2]; public long[] mb_type_base; // uint32_t *mb_type_base; public int[] field_poc = new int[2]; ///< h264 top/bottom POC public int poc; ///< h264 frame POC public int frame_num; ///< h264 frame_num (raw frame_num from slice header) public int mmco_reset; ///< h264 MMCO_RESET set this 1. Reordering code must not mix pictures before and after MMCO_RESET. public int pic_id; /**< h264 pic_num (short -> no wrap version of pic_num, pic_num & max_pic_num; long -> long_pic_num) */ public int long_ref; ///< 1->long term reference 0->short term reference public int[][][] ref_poc = new int[2][2][16]; ///< h264 POCs of the frames used as reference (FIXME need per slice) public int[][] ref_count = new int[2][2]; ///< number of entries in ref_poc (FIXME need per slice) public int mbaff; ///< h264 1 -> MBAFF frame 0-> not MBAFF public int mb_var_sum; ///< sum of MB variance for current frame public int mc_mb_var_sum; ///< motion compensated MB variance for current frame public int[] mb_var; ///< Table for MB variances public int[] mc_mb_var; ///< Table for motion compensated MB variances public int[] mb_mean; ///< Table for MB luminance public int[] mb_cmp_score; ///< Table for MB cmp scores, for mb decision FIXME remove /* uint16_t *mb_var; ///< Table for MB variances uint16_t *mc_mb_var; ///< Table for motion compensated MB variances uint8_t *mb_mean; ///< Table for MB luminance int32_t *mb_cmp_score; ///< Table for MB cmp scores, for mb decision FIXME remove */ public int b_frame_score; /* */ public AVFrame copyTo(AVFrame ret) { /////////////??????????????????????????? //To do: Implement this method! ret.age = age; ret.b_frame_score = b_frame_score; for(int i=0;i<base.length;i++) ret.base[i] = base[i]; ret.buffer_hints = buffer_hints; ret.coded_picture_number = coded_picture_number; for(int i=0;i<data_base.length;i++) { ret.data_base[i] = data_base[i]; ret.data_offset[i] = data_offset[i]; } // for ret.dct_coeff = dct_coeff; ret.display_picture_number = display_picture_number; System.arraycopy(error, 0, ret.error, 0, error.length); System.arraycopy(field_poc, 0, ret.field_poc, 0, field_poc.length); ret.frame_num = frame_num; ret.imageWidth = imageWidth; ret.imageHeight = imageHeight; ret.imageWidthWOEdge = imageWidthWOEdge; ret.imageHeightWOEdge = imageHeightWOEdge; ret.interlaced_frame = interlaced_frame; System.arraycopy(interpolated, 0, ret.interpolated, 0, interpolated.length); ret.key_frame = key_frame; System.arraycopy(linesize, 0, ret.linesize, 0, linesize.length); ret.long_ref = long_ref; ret.mb_cmp_score = mb_cmp_score; ret.mb_mean = mb_mean; ret.mb_type_base = mb_type_base; ret.mb_type_offset = mb_type_offset; ret.mb_var = mb_var; ret.mb_var_sum = mb_var_sum; ret.mbaff = mbaff; ret.mbskip_table = mbskip_table; ret.mc_mb_var = mc_mb_var; ret.mc_mb_var_sum = mc_mb_var_sum; ret.mmco_reset = mmco_reset; ret.motion_subsample_log2 = motion_subsample_log2; //??????????????? Can we copy it at this depth? System.arraycopy(motion_val_base, 0, ret.motion_val_base, 0, motion_val_base.length); System.arraycopy(motion_val_offset, 0, ret.motion_val_offset, 0, motion_val_offset.length); ret.opaque = opaque; ret.palette_has_changed = palette_has_changed; ret.pan_scan = pan_scan; ret.pic_id = pic_id; ret.pict_type = pict_type; ret.pkt_dts = pkt_dts; ret.pkt_pts = pkt_pts; ret.poc = poc; ret.pts = pts; ret.qscale_table = qscale_table; ret.qscale_type = qscale_type; ret.qstride = qstride; ret.quality = quality; for(int i=0;i<ref_count.length;i++) System.arraycopy(ref_count[i], 0, ret.ref_count[i], 0, ref_count[i].length); System.arraycopy(ref_index, 0, ret.ref_index, 0, ref_index.length); for(int i=0;i<ref_poc.length;i++) for(int j=0;j<ref_poc[i].length;j++) System.arraycopy(ref_poc[i][j], 0, ret.ref_poc[i][j], 0, ref_poc[i][j].length); ret.reference = reference; ret.reordered_opaque = reordered_opaque; ret.repeat_pict = repeat_pict; ret.top_field_first = top_field_first; ret.type = type; return ret; } public void resetToZero() { /////////////??????????????????????????? //To do: Implement this method! } public static void pic_as_field(AVFrame pic, int parity) { int i; for (i = 0; i < 4; ++i) { if (parity == MpegEncContext.PICT_BOTTOM_FIELD) pic.data_offset[i] += pic.linesize[i]; pic.reference = parity; pic.linesize[i] *= 2; } pic.poc = pic.field_poc[(parity == MpegEncContext.PICT_BOTTOM_FIELD) ? 1 : 0]; } public static int split_field_copy(AVFrame dest, AVFrame src, int parity, int id_add){ int match = ((src.reference & parity) != 0 ? 1 : 0); if (match != 0) { //!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! src.copyTo(dest); if (parity != MpegEncContext.PICT_FRAME) { pic_as_field(dest, parity); dest.pic_id *= 2; dest.pic_id += id_add; } } return match; } public static int build_def_list(AVFrame[] def_base, int def_offset, AVFrame[] in_base, int in_offset, int len, int is_long, int sel){ int[] i = new int[]{0,0}; int index=0; while(i[0]<len || i[1]<len){ while(i[0]<len && !(in_base[in_offset + i[0] ]!=null && (in_base[in_offset + i[0] ].reference & sel)!=0)) i[0]++; while(i[1]<len && !(in_base[in_offset + i[1] ]!=null && (in_base[in_offset + i[1] ].reference & (sel^3))!=0)) i[1]++; if(i[0] < len){ in_base[in_offset + i[0] ].pic_id= (is_long!=0 ? i[0] : in_base[in_offset + i[0] ].frame_num); split_field_copy(def_base[def_offset + index++], in_base[in_offset + i[0]++ ], sel , 1); } if(i[1] < len){ in_base[in_offset + i[1] ].pic_id= (is_long!=0 ? i[1] : in_base[in_offset + i[1] ].frame_num); split_field_copy(def_base[def_offset + index++], in_base[in_offset + i[1]++ ], sel^3, 0); } } return index; } public static int add_sorted(AVFrame[] sorted_base, int sorted_offset, AVFrame[] src_base, int src_offset, int len, int limit, int dir){ int i, best_poc; int out_i= 0; for(;;){ best_poc= (dir!=0 ? Integer.MIN_VALUE : Integer.MAX_VALUE); for(i=0; i<len; i++){ int poc= src_base[src_offset + i].poc; if(((poc > limit) ^ (dir!=0)) && ((poc < best_poc) ^ (dir!=0))){ best_poc= poc; sorted_base[sorted_offset + out_i]= src_base[src_offset + i]; } } if(best_poc == (dir!=0 ? Integer.MIN_VALUE : Integer.MAX_VALUE)) break; limit= sorted_base[sorted_offset + out_i++].poc - dir; } return out_i; } /* #define MB_TYPE_INTRA MB_TYPE_INTRA4x4 //default mb_type if there is just one type #define IS_INTRA4x4(a) ((a)&MB_TYPE_INTRA4x4) #define IS_INTRA16x16(a) ((a)&MB_TYPE_INTRA16x16) #define IS_PCM(a) ((a)&MB_TYPE_INTRA_PCM) #define IS_INTRA(a) ((a)&7) #define IS_INTER(a) ((a)&(MB_TYPE_16x16|MB_TYPE_16x8|MB_TYPE_8x16|MB_TYPE_8x8)) #define IS_SKIP(a) ((a)&MB_TYPE_SKIP) #define IS_INTRA_PCM(a) ((a)&MB_TYPE_INTRA_PCM) #define IS_INTERLACED(a) ((a)&MB_TYPE_INTERLACED) #define IS_DIRECT(a) ((a)&MB_TYPE_DIRECT2) #define IS_GMC(a) ((a)&MB_TYPE_GMC) #define IS_16X16(a) ((a)&MB_TYPE_16x16) #define IS_16X8(a) ((a)&MB_TYPE_16x8) #define IS_8X16(a) ((a)&MB_TYPE_8x16) #define IS_8X8(a) ((a)&MB_TYPE_8x8) #define IS_SUB_8X8(a) ((a)&MB_TYPE_16x16) //note reused #define IS_SUB_8X4(a) ((a)&MB_TYPE_16x8) //note reused #define IS_SUB_4X8(a) ((a)&MB_TYPE_8x16) //note reused #define IS_SUB_4X4(a) ((a)&MB_TYPE_8x8) //note reused #define IS_ACPRED(a) ((a)&MB_TYPE_ACPRED) #define IS_QUANT(a) ((a)&MB_TYPE_QUANT) #define IS_DIR(a, part, list) ((a) & (MB_TYPE_P0L0<<((part)+2*(list)))) #define USES_LIST(a, list) ((a) & ((MB_TYPE_P0L0|MB_TYPE_P1L0)<<(2*(list)))) ///< does this mb use listX, note does not work if subMBs #define HAS_CBP(a) ((a)&MB_TYPE_CBP) */ public static AVFrame avcodec_alloc_frame() { AVFrame ret = new AVFrame(); ret.pts = MpegEncContext.AV_NOPTS_VALUE; ret.key_frame = 1; return ret; } }
whebz/ConnectX
src/forzaquattro/view/StatsMenu.java
<filename>src/forzaquattro/view/StatsMenu.java package forzaquattro.view; import java.awt.BorderLayout; import java.awt.Dimension; import java.awt.GridLayout; import java.awt.Toolkit; import java.util.ArrayList; import java.util.List; import java.util.Optional; import javax.swing.JFrame; import javax.swing.JPanel; import javax.swing.JScrollPane; import javax.swing.JTable; import javax.swing.WindowConstants; import javax.swing.border.TitledBorder; import forzaquattro.controller.Controller; import forzaquattro.controller.Controller.Difficult; import forzaquattro.controller.StatsManagerImpl; import forzaquattro.model.GameType; import forzaquattro.model.GameVariant; import forzaquattro.model.Stats; /** * This class is the implementation of the menu that shows the statistics of the previous played games. * */ public class StatsMenu implements Menu { private static final double MENU_PERC_WIDTH = 0.3; private static final double MENU_PERC_HEIGHT = 0.8; private static final int TABLES_NUM = 5; private final Dimension screenSize = Toolkit.getDefaultToolkit().getScreenSize(); private Optional<JFrame> frame; private StatsManagerImpl sm; /** * Constructor for the Stats Menu. * Initializes the frame * @param c * the controller to get reference of stats manager */ public StatsMenu(final Controller c) { this.sm = c.getStatsManager(); System.out.println("Creating StatsMenu..."); // log this.frame = Optional.of(new JFrame("Forza4-5 - Statistics")); this.frame.get().setSize((int) (screenSize.getWidth() * MENU_PERC_WIDTH), (int) (screenSize.getHeight() * MENU_PERC_HEIGHT)); this.frame.get().setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); Stats s = sm.getStats(); JPanel mainPanel = new JPanel(new GridLayout(1, 1)); JPanel panelPvsC = new JPanel(new GridLayout(TABLES_NUM, 1)); panelPvsC.setBorder(new TitledBorder("Player vs Computer")); // creating tables for PvsComputer stats for (GameVariant v: GameVariant.values()) { List<StatsTableLine> lines = new ArrayList<>(); for (Difficult d: Controller.Difficult.values()) { lines.add(new StatsTableLine(d.toString(), s.getWons(v, GameType.PvsC, d), s.getLosts(v, GameType.PvsC, d), s.getDraws(v, GameType.PvsC, d), s.getWonPerc(v, GameType.PvsC, d))); } StatsTableModel tableModel = new StatsTableModel(lines, GameType.PvsC); JTable tablePvsC1 = new JTable(tableModel); JScrollPane scrc1 = new JScrollPane(tablePvsC1); scrc1.setBorder(new TitledBorder(v.toString())); panelPvsC.add(scrc1); } // creating table for PvsPlayer stats List<StatsTableLine> lines = new ArrayList<>(); for (GameVariant v: GameVariant.values()) { Difficult d = Difficult.EASY; // only for compatibility (to use same methods for PvsP and PvsC) lines.add(new StatsTableLine(v.toString(), s.getWons(v, GameType.PvsP, d), s.getLosts(v, GameType.PvsP, d), s.getDraws(v, GameType.PvsP, d), s.getWonPerc(v, GameType.PvsP, d))); } StatsTableModel tableModel = new StatsTableModel(lines, GameType.PvsP); JTable tablePvsC1 = new JTable(tableModel); JScrollPane scrc1 = new JScrollPane(tablePvsC1); scrc1.setBorder(new TitledBorder("Player1 vs Player2")); panelPvsC.add(scrc1); mainPanel.add(panelPvsC); this.frame.get().getContentPane().add(mainPanel, BorderLayout.CENTER); this.frame.get().setResizable(true); this.frame.get().setLocationRelativeTo(null); this.frame.get().setDefaultCloseOperation(WindowConstants.HIDE_ON_CLOSE); } @Override public void show() { if (this.frame.isPresent()) { this.frame.get().setVisible(true); System.out.println("Showing StatsMenu..."); // log } } }
StarExecMiami/StarExec
src/org/starexec/test/junit/backend/GridEngineBackendTests.java
<gh_stars>10-100 package org.starexec.test.junit.backend; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.BDDMockito; import org.powermock.api.mockito.PowerMockito; import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; import org.starexec.backend.GridEngineBackend; import org.starexec.exceptions.StarExecException; import org.starexec.util.Util; import org.testng.Assert; import java.io.IOException; import java.util.Set; /* @RunWith(PowerMockRunner.class) @PrepareForTest({Util.class}) public class GridEngineBackendTests { final GridEngineBackend backend = new GridEngineBackend(); private static final String testSGEOutputString = "job-ID prior name user state submit/start at queue slots ja-task-ID \n"+ "-----------------------------------------------------------------------------------------------------------------\n" + " 998 0.55500 job_768.ba tomcat qw 12/03/2015 13:18:55 1 \n" + " 999 0.55500 job_769.ba tomcat qw 12/03/2015 13:18:55 1 \n" + " 1000 0.55500 job_770.ba tomcat qw 12/03/2015 13:18:55 1 "; @Test public void getActiveExecutionIdsTest() throws IOException { PowerMockito.mockStatic(Util.class); System.setProperty("line.separator", "\n"); BDDMockito.given(Util.executeCommand("qstat -s a")).willReturn(testSGEOutputString); Set<Integer> ids = backend.getActiveExecutionIds(); Assert.assertTrue(ids.size()==3); Assert.assertTrue(ids.contains(998)); Assert.assertTrue(ids.contains(999)); Assert.assertTrue(ids.contains(1000)); } final String slotsTestString = "qname one_job.q\n" +"hostlist @one_jobhosts\n" +"seq_no 0\n" +"load_thresholds np_load_avg=1.75\n" +"suspend_thresholds NONE\n" +"nsuspend 1\n" +"suspend_interval 00:05:00\n" +"priority 0\n" +"min_cpu_interval 00:05:00\n" +"processors UNDEFINED\n" +"qtype BATCH INTERACTIVE\n" +"ckpt_list NONE\n" +"pe_list make\n" +"rerun FALSE\n" +"slots 1\n" +"tmpdir /tmp\n" +"shell /bin/csh\n" +"prolog NONE"; @Test public void getSlotsInQueueTest() { String testQueueName = "all.q"; String testCommand = GridEngineBackend.QUEUE_GET_SLOTS_PATTERN.replace(GridEngineBackend.QUEUE_NAME_PATTERN, testQueueName); PowerMockito.mockStatic(Util.class); try { BDDMockito.given(Util.executeCommand(testCommand)).willReturn(slotsTestString); Integer slots = backend.getSlotsInQueue(testQueueName); Assert.assertEquals(slots, new Integer(1)); } catch (IOException e) { Assert.fail("Caught IOException: " + Util.getStackTrace(e)); } catch (StarExecException e) { Assert.fail("Caught StarExecException: " + Util.getStackTrace(e)); } } }*/
gharia/kurento-java
kurento-basicroom/src/main/java/org/kurento/basicroom/RoomHandler.java
/* * (C) Copyright 2014 Kurento (http://kurento.org/) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.kurento.basicroom; import java.io.IOException; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import javax.annotation.PreDestroy; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.web.socket.CloseStatus; import org.springframework.web.socket.TextMessage; import org.springframework.web.socket.WebSocketSession; import org.springframework.web.socket.handler.TextWebSocketHandler; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.google.gson.JsonObject; /** * @author <NAME> (<EMAIL>) * @author <NAME> (<EMAIL>) * @since 1.0.0 */ public class RoomHandler extends TextWebSocketHandler { private static final String USER = "user"; private static final Logger log = LoggerFactory.getLogger(RoomHandler.class); private static final Gson gson = new GsonBuilder().create(); private static final String HANDLER_THREAD_NAME = "handler"; private static final ExecutorService executor = Executors.newFixedThreadPool(10); @Autowired private RoomManager roomManager; @PreDestroy public void close() { executor.shutdown(); } @Override public void handleTextMessage(WebSocketSession session, TextMessage message) throws Exception { final JsonObject jsonMessage = gson.fromJson(message.getPayload(), JsonObject.class); // FIXME: Hack to ignore real userName sent by browser // if (jsonMessage.get("id").getAsString().equals("joinRoom")) { // jsonMessage.add("name", new JsonPrimitive(UUID.randomUUID() // .toString())); // } final RoomParticipant user = (RoomParticipant) session.getAttributes().get(USER); if (user != null) { log.debug("Incoming message from user '{}': {}", user.getName(), jsonMessage); } else { log.debug("Incoming message from new user: {}", jsonMessage); } switch (jsonMessage.get("id").getAsString()) { case "receiveVideoFrom": executor.submit(new Runnable() { @Override public void run() { updateThreadName("rv:" + user.getName()); receiveVideoFrom(user, jsonMessage); updateThreadName(HANDLER_THREAD_NAME); } }); break; case "joinRoom": joinRoom(jsonMessage, session); break; case "leaveRoom": leaveRoom(user); break; default: break; } updateThreadName(HANDLER_THREAD_NAME); } private void receiveVideoFrom(final RoomParticipant user, final JsonObject jsonMessage) { final String senderName = jsonMessage.get("sender").getAsString(); final String sdpOffer = jsonMessage.get("sdpOffer").getAsString(); Room room = user.getRoom(); final RoomParticipant sender = room.getParticipant(senderName); if (sender != null) { user.receiveVideoFrom(sender, sdpOffer); } else { log.warn("PARTICIPANT {}: Requesting send video for user {} in room {} but it is not found", user.getName(), senderName, user.getRoom().getName()); } } private void joinRoom(JsonObject jsonMessage, final WebSocketSession session) throws IOException, InterruptedException, ExecutionException { final String roomName = jsonMessage.get("room").getAsString(); final String userName = jsonMessage.get("name").getAsString(); updateThreadName(userName); log.debug("PARTICIPANT {}: trying to join room {}", userName, roomName); final Room room = roomManager.getRoom(roomName); if (!room.isClosed()) { room.execute(new Runnable() { @Override public void run() { updateThreadName("r>" + userName); final RoomParticipant user = room.join(userName, session); session.getAttributes().put(USER, user); updateThreadName("r>" + HANDLER_THREAD_NAME); } }); } else { log.warn("Trying to join from room {} but it is closed", room.getName()); } } private void leaveRoom(final RoomParticipant user) throws IOException, InterruptedException, ExecutionException { final Room room = user.getRoom(); final String threadName = Thread.currentThread().getName(); if (!room.isClosed()) { room.execute(new Runnable() { @Override public void run() { updateThreadName("room>" + threadName); room.leave(user); if (room.getParticipants().isEmpty()) { roomManager.removeRoom(room); } updateThreadName("room>" + HANDLER_THREAD_NAME); } }); } else { log.warn("Trying to leave from room {} but it is closed", room.getName()); } } @Override public void afterConnectionClosed(WebSocketSession session, CloseStatus status) throws Exception { RoomParticipant user = (RoomParticipant) session.getAttributes().get(USER); if (user != null) { updateThreadName(user.getName() + "|wsclosed"); leaveRoom(user); updateThreadName(HANDLER_THREAD_NAME); } } @Override public void handleTransportError(WebSocketSession session, Throwable exception) throws Exception { RoomParticipant user = (RoomParticipant) session.getAttributes().get(USER); if (user != null && !user.isClosed()) { log.warn("Transport error", exception); } } private void updateThreadName(final String name) { Thread.currentThread().setName("user:" + name); } }
karussell/bach
deprecated/src-20190611/demo/jigsaw-quick-start/greetings-world-with-main-and-test/src/test/com.greetings/module-info.java
open module com.greetings { // main requires org.astro; // test requires org.apiguardian.api; requires org.junit.jupiter.api; }
bocahrokok/ipfs-app
node_modules/xor-distance/index.js
<reponame>bocahrokok/ipfs-app<gh_stars>10-100 module.exports = dist function dist (a, b) { if (a.length !== b.length) throw new Error('Inputs should have the same length') var result = Buffer.allocUnsafe(a.length) for (var i = 0; i < a.length; i++) result[i] = a[i] ^ b[i] return result } dist.compare = function compare (a, b) { if (a.length !== b.length) throw new Error('Inputs should have the same length') for (var i = 0; i < a.length; i++) { if (a[i] === b[i]) continue return a[i] < b[i] ? -1 : 1 } return 0 } dist.gt = function gt (a, b) { return dist.compare(a, b) === 1 } dist.lt = function lt (a, b) { return dist.compare(a, b) === -1 } dist.eq = function eq (a, b) { return dist.compare(a, b) === 0 }
GinSmile/LeetCode
src/Algorithms/P179_LargestNumber.java
/* * Given a list of non negative integers, arrange them such that they form the largest number. For example, given [3, 30, 34, 5, 9], the largest formed number is 9534330. Note: The result may be very large, so you need to return a string instead of an integer. Credits: Special thanks to @ts for adding this problem and creating all test cases. 这道题的思路就是重写compare函数,规定两个字符串之间的大小。 * */ package Algorithms; import java.util.Arrays; import java.util.Comparator; public class P179_LargestNumber { public static void main(String...args){ int[] nums = {0,0}; String resString = largestNumber(nums); System.out.print(resString); } public static String largestNumber(int[] nums) { String[] arr = new String[nums.length]; for(int i=0; i<nums.length; i++){ arr[i] = String.valueOf(nums[i]); } Comparator myCom = new MyComparator(); Arrays.sort(arr,myCom);//从小到大排列,这里的大小必须重新定义 String reString=""; for(int i=0; i<nums.length; i++){ reString += arr[i]; } if(arr[0].equals("0")) return "0";//注意特殊情况 return reString; } static class MyComparator implements Comparator<String>{ //重写compare函数,让最终的结果为9排在前面,注意这里!9是最小的,0是最大的 //返回1代表o1>o2,比如0>999 //返回-1代表o1<o2 @Override public int compare(String o1, String o2) { //把字符串转换为整形数组 char[] c1 = o1.toCharArray(); char[] c2= o2.toCharArray(); int[] d1 = new int[c1.length]; int[] d2 = new int[c2.length]; for(int i=0; i<c1.length; i++){ d1[i] = c1[i] - '0'; } for(int i=0; i<c2.length; i++){ d2[i] = c2[i] - '0'; } //比较大小 /* * 大小规定为: * 1>2>3>...>9 * 53>5 * 55==5,53==53535353 * 56<5 * * 即: * 比较第一个,如果数值上d1[i] > d2[i] ,说明d1要排在前面,返回-1 * * */ int i = 0; int j = 0; int loop1 = 0; int loop2 = 0; while(loop1 < 2 || loop2 < 2){ if(d1[i] > d2[j]){ return -1; }else if(d1[i] < d2[j]){ return 1; } if(i != d1.length - 1){ i++; }else{ i = 0;//如果d1为最后一个数,那么i赋0,因为5353==53535353 loop1++; } if(j != d2.length - 1){ j++; }else{ j = 0; loop2++; } } return 0; } } }
kindone/proptest
PropertyContext.cpp
#include "PropertyContext.hpp" #include "PropertyBase.hpp" #include "util/std.hpp" namespace proptest { ostream& operator<<(ostream& os, const Failure& f) { auto detail = f.str.str(); if (detail.empty()) os << f.condition << " (" << f.filename << ":" << f.lineno << ")"; else os << f.condition << " (" << f.filename << ":" << f.lineno << ") with " << f.str.str(); return os; } PropertyContext::PropertyContext() : lastStreamExists(false), oldContext(PropertyBase::getContext()) { PropertyBase::setContext(this); } PropertyContext::~PropertyContext() { PropertyBase::setContext(oldContext); } void PropertyContext::tag(const char* file, int lineno, string key, string value) { auto itr = tags.find(key); // key already exists if (itr != tags.end()) { auto& valueMap = itr->second; auto valueItr = valueMap.find(value); // value already exists if (valueItr != valueMap.end()) { auto& tag = valueItr->second; tag.count++; } else { valueMap.insert(pair<string, Tag>(value, Tag(file, lineno, value))); } } else { map<string, Tag> valueMap; valueMap.insert(pair<string, Tag>(value, Tag(file, lineno, value))); tags.insert(pair<string, map<string, Tag>>(key, valueMap)); } } void PropertyContext::succeed(const char*, int, const char*, const stringstream&) { // DO NOTHING lastStreamExists = false; } void PropertyContext::fail(const char* filename, int lineno, const char* condition, const stringstream& str) { failures.push_back(Failure(filename, lineno, condition, str)); lastStreamExists = true; } stringstream& PropertyContext::getLastStream() { static stringstream defaultStr; if (failures.empty() || !lastStreamExists) return defaultStr; return failures.back().str; } stringstream PropertyContext::flushFailures(int indent) { const auto doIndent = +[](stringstream& str, int indent) { for (int i = 0; i < indent; i++) str << " "; }; stringstream allFailures; auto itr = failures.begin(); if (itr != failures.end()) { // doIndent(allFailures, indent); allFailures << *itr++; } for (; itr != failures.end(); ++itr) { allFailures << "," << endl; doIndent(allFailures, indent); allFailures << *itr; } failures.clear(); return allFailures; } void PropertyContext::printSummary() { for (auto tagKV : tags) { auto& key = tagKV.first; auto& valueMap = tagKV.second; cout << " " << key << ": " << endl; size_t total = 0; for (auto valueKV : valueMap) { auto tag = valueKV.second; total += tag.count; } for (auto valueKV : valueMap) { auto value = valueKV.first; auto tag = valueKV.second; cout << " " << value << ": " << tag.count << "/" << total << " (" << static_cast<double>(tag.count) / total * 100 << "%)" << endl; } } } } // namespace proptest
Brimes7/Synaps-FE
src/actions/userActions.js
import {action} from './action'; import {createAxios, createAxiosAuth} from '../utilities/createAxios.js'; import firebase from '../config/firebase/FirebaseConfig.js'; export const SIGNED_IN = 'SIGNED_IN'; export const SIGNIN_FAILED = 'SIGNIN_FAILED'; export const SIGNOUT = 'SIGNOUT'; export const ATTEMPT_SIGNIN = 'ATTEMPT_SIGNIN'; /** * Signed In * * @description Call this function when the user is already signed in but the * app just loads or when the user first signs in. * * @category Actions * @function * @name signedIn * @param {User} user * @returns {function} */ export const signedIn = (user) => dispatch => { dispatch(action(SIGNED_IN, user)); //calls reducer //checkUserRegistered(user.uid, dispatch); }; /** * Sign out * * @category Actions * @function * @name signOut * returns {function} */ export const signOut = () => dispatch => { firebase .auth() .signOut() .then(() => { dispatch(action(SIGNOUT)); }); }; export const EMAIL_PROVIDER = 'EMAIL_PROVIDER'; export const GOOGLE_PROVIDER = 'GOOGLE_PROVIDER'; /** * @typedef {("GOOGLE_PROVIDER" | "EMAIL_PROVIDER")} AuthType */ /** * Sign In * * @category Actions * @function * @name signIn * @param {AuthType} authType * @param {string} [email] * @param {string} [password] * @returns {function} */ export const signIn = (authType, email, password) => dispatch => { dispatch(action(ATTEMPT_SIGNIN)); if (authType === EMAIL_PROVIDER) { dispatch(RegisterWithGoogleEmailAndPassword(email, password)); } else if (authType === GOOGLE_PROVIDER) { dispatch(signInWithGoogleAuthProvider()); } }; /** * Sign In With Google Auth * @category Actions * @function * @name RegisterWithGoogleEmailAndPassword * @returns {function} */ const signInWithGoogleAuthProvider = () => dispatch => { firebase .auth() .signInWithPopup(new firebase.auth.GoogleAuthProvider()) .then(res => { dispatch(signedIn(res.user)); dispatch(checkUser(res.user)); }) .catch(err => { dispatch(action(SIGNIN_FAILED, err.message)); console.log(err); }); }; /** * * Register with google email and password. * * @category Actions * @function * @name RegisterWithGoogleEmailAndPassword * @property {string} email * @property {string} password * @returns {function(*): Promise<firebase.auth.UserCredential>} * @param email * @param password */ const RegisterWithGoogleEmailAndPassword = (email, password) => dispatch => { return firebase .auth() .createUserWithEmailAndPassword(email, password) .then(() => { dispatch(signInWithEmailAndPassword(email, password)); }) .catch(error => { if (error.code.includes('email-already-in-use')) { dispatch(signInWithEmailAndPassword(email, password)); } else { dispatch(action(SIGNIN_FAILED, error.message)); } }); }; /** * Sign in with email and password. * * @param {string} email * @param {string} password * @returns {function(*): Promise<firebase.auth.UserCredential>} */ const signInWithEmailAndPassword = (email, password) => dispatch => { return firebase .auth() .signInWithEmailAndPassword(email, password) .then(res => { dispatch(signedIn(res.user)); }) .catch(err => { dispatch(action(SIGNIN_FAILED, err.message)); }); }; export const CHECK_USER_REGISTERED = 'CHECK_USER_REGISTERED'; export const USER_ATTEMPT_REGISTER = 'USER_ATTEMPT_REGISTER'; export const USER_REGISTER_FAILED = 'USER_REGISTER_FAILED'; export const USER_REGISTER_COMPLETE = 'USER_REGISTER_COMPLETE'; //functions for registering. Need to use .then to check database. //registers user export const register = (user) => dispatch => { dispatch(action(USER_ATTEMPT_REGISTER)); const userR = {uid: user.uid, username: user.email}; createAxios() .post('/api/register', userR) .then(res => { if (res.status === 201) { dispatch(action(USER_REGISTER_COMPLETE)); } else { dispatch(action(USER_REGISTER_FAILED)); } }) .catch(err => { console.log(err); dispatch(action(USER_REGISTER_FAILED, err.message)); dispatch(signOut(dispatch)); }); }; export const checkUser = (user) => dispatch => { dispatch(action(CHECK_USER_REGISTERED)); createAxiosAuth(user.uid) .get('/api/users/me') .then(res => { if (res.status === 200) { } else { dispatch(register(user)); } }) .catch(err => { console.log(err); dispatch(register(user, dispatch)); }); };
dongy6/type-inference
ICC/SOOT-Nightly/jasmin-github/lib/jas/src/jas/Pair.java
<reponame>dongy6/type-inference<filename>ICC/SOOT-Nightly/jasmin-github/lib/jas/src/jas/Pair.java package jas; public class Pair<T, U> { protected T o1; protected U o2; public Pair() { o1 = null; o2 = null; } public Pair( T o1, U o2 ) { this.o1 = o1; this.o2 = o2; } public int hashCode() { return o1.hashCode() + o2.hashCode(); } public boolean equals( Object other ) { if( other instanceof Pair) { Pair p = (Pair) other; return o1.equals( p.o1 ) && o2.equals( p.o2 ); } else return false; } public String toString() { return "<"+o1+","+o2+">"; } public T getO1() { return o1; } public U getO2() { return o2; } }
miklosgergely/cloudbreak
core/src/main/java/com/sequenceiq/cloudbreak/converter/v4/stacks/StackValidationV4RequestToStackValidationConverter.java
package com.sequenceiq.cloudbreak.converter.v4.stacks; import static com.sequenceiq.cloudbreak.converter.util.ExceptionMessageFormatterUtil.formatAccessDeniedMessage; import java.util.Collection; import java.util.HashSet; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.function.Predicate; import javax.inject.Inject; import org.springframework.stereotype.Component; import com.sequenceiq.cloudbreak.api.endpoint.v4.stacks.request.HostGroupV4Request; import com.sequenceiq.cloudbreak.api.endpoint.v4.stacks.request.StackValidationV4Request; import com.sequenceiq.cloudbreak.api.endpoint.v4.stacks.request.network.NetworkV4Request; import com.sequenceiq.cloudbreak.api.util.ConverterUtil; import com.sequenceiq.cloudbreak.cloud.PlatformParametersConsts; import com.sequenceiq.cloudbreak.cloud.model.Platform; import com.sequenceiq.cloudbreak.cloud.model.SpecialParameters; import com.sequenceiq.cloudbreak.common.mappable.CloudPlatform; import com.sequenceiq.cloudbreak.converter.AbstractConversionServiceAwareConverter; import com.sequenceiq.cloudbreak.converter.v4.environment.network.EnvironmentNetworkConverter; import com.sequenceiq.cloudbreak.domain.Blueprint; import com.sequenceiq.cloudbreak.domain.Network; import com.sequenceiq.cloudbreak.domain.stack.StackValidation; import com.sequenceiq.cloudbreak.domain.stack.cluster.host.HostGroup; import com.sequenceiq.cloudbreak.domain.stack.instance.InstanceGroup; import com.sequenceiq.cloudbreak.dto.credential.Credential; import com.sequenceiq.cloudbreak.exception.BadRequestException; import com.sequenceiq.cloudbreak.service.CloudbreakRestRequestThreadLocalService; import com.sequenceiq.cloudbreak.service.blueprint.BlueprintService; import com.sequenceiq.cloudbreak.service.environment.EnvironmentClientService; import com.sequenceiq.cloudbreak.service.environment.credential.CredentialClientService; import com.sequenceiq.cloudbreak.service.environment.credential.CredentialConverter; import com.sequenceiq.cloudbreak.service.network.NetworkService; import com.sequenceiq.cloudbreak.service.stack.CloudParameterCache; import com.sequenceiq.cloudbreak.service.user.UserService; import com.sequenceiq.cloudbreak.service.workspace.WorkspaceService; import com.sequenceiq.cloudbreak.workspace.model.User; import com.sequenceiq.cloudbreak.workspace.model.Workspace; import com.sequenceiq.environment.api.v1.credential.model.response.CredentialResponse; import com.sequenceiq.environment.api.v1.environment.model.response.DetailedEnvironmentResponse; @Component public class StackValidationV4RequestToStackValidationConverter extends AbstractConversionServiceAwareConverter<StackValidationV4Request, StackValidation> { @Inject private BlueprintService blueprintService; @Inject private NetworkService networkService; @Inject private CredentialClientService credentialClientService; @Inject private EnvironmentClientService environmentClientService; @Inject private ConverterUtil converterUtil; @Inject private CloudParameterCache cloudParameterCache; @Inject private WorkspaceService workspaceService; @Inject private UserService userService; @Inject private CloudbreakRestRequestThreadLocalService restRequestThreadLocalService; @Inject private Map<CloudPlatform, EnvironmentNetworkConverter> environmentNetworkConverterMap; @Inject private CredentialConverter credentialConverter; @Override public StackValidation convert(StackValidationV4Request stackValidationRequest) { StackValidation stackValidation = new StackValidation(); Set<InstanceGroup> instanceGroups = converterUtil.convertAllAsSet(stackValidationRequest.getInstanceGroups(), InstanceGroup.class); stackValidation.setInstanceGroups(instanceGroups); stackValidation.setEnvironmentCrn(stackValidationRequest.getEnvironmentCrn()); stackValidation.setHostGroups(convertHostGroupsFromJson(instanceGroups, stackValidationRequest.getHostGroups())); User user = userService.getOrCreate(restRequestThreadLocalService.getCloudbreakUser()); Workspace workspace = workspaceService.get(restRequestThreadLocalService.getRequestedWorkspaceId(), user); formatAccessDeniedMessage( () -> validateBlueprint(stackValidationRequest, stackValidation, workspace), "blueprint", stackValidationRequest.getBlueprintName() ); DetailedEnvironmentResponse environment = environmentClientService.getByCrn(stackValidation.getEnvironmentCrn()); CredentialResponse credentialResponse = environment.getCredential(); formatAccessDeniedMessage( () -> validateCredential(stackValidation, credentialResponse), "credential", Optional.ofNullable(credentialResponse).map(CredentialResponse::getName).orElse("NULL") ); formatAccessDeniedMessage( () -> validateNetwork(stackValidationRequest.getNetworkId(), stackValidationRequest.getNetwork(), stackValidation, environment), "network", stackValidationRequest.getNetworkId() ); return stackValidation; } private void validateBlueprint(StackValidationV4Request stackValidationRequest, StackValidation stackValidation, Workspace workspace) { Set<Blueprint> allAvailableInWorkspace = blueprintService.getAllAvailableInWorkspace(workspace); if (stackValidationRequest.getBlueprintName() == null) { throw new BadRequestException("Cluster definition is not configured for the validation request!"); } if (stackValidationRequest.getBlueprintName() != null) { selectBlueprint(allAvailableInWorkspace, stackValidation, cd -> cd.getName().equals(stackValidationRequest.getBlueprintName())); } } private void validateCredential(StackValidation stackValidation, CredentialResponse credentialResponse) { if (credentialResponse != null) { Credential credential = credentialConverter.convert(credentialResponse); stackValidation.setCredential(credential); } else if (stackValidation.getCredential() == null) { throw new BadRequestException("Credential is not configured for the validation request!"); } } private void validateNetwork(Long networkId, NetworkV4Request networkRequest, StackValidation stackValidation, DetailedEnvironmentResponse environment) { SpecialParameters specialParameters = cloudParameterCache.getPlatformParameters().get(Platform.platform(stackValidation.getCredential().cloudPlatform())).specialParameters(); if (networkId != null) { Network network = networkService.get(networkId); stackValidation.setNetwork(network); } else { if (environment != null && environment.getNetwork() != null) { CloudPlatform cloudPlatform = CloudPlatform.valueOf(environment.getCloudPlatform()); EnvironmentNetworkConverter environmentNetworkConverter = environmentNetworkConverterMap.get(cloudPlatform); if (environmentNetworkConverter != null) { Network network = environmentNetworkConverter.convertToLegacyNetwork(environment.getNetwork()); stackValidation.setNetwork(network); } } else if (networkRequest != null) { Network network = converterUtil.convert(networkRequest, Network.class); stackValidation.setNetwork(network); } else if (specialParameters.getSpecialParameters().get(PlatformParametersConsts.NETWORK_IS_MANDATORY)) { throw new BadRequestException("Network is not configured for the validation request!"); } } } private Set<HostGroup> convertHostGroupsFromJson(Collection<InstanceGroup> instanceGroups, Iterable<HostGroupV4Request> hostGroupsJsons) { Set<HostGroup> hostGroups = new HashSet<>(); for (HostGroupV4Request json : hostGroupsJsons) { HostGroup hostGroup = new HostGroup(); hostGroup.setName(json.getName()); String instanceGroupName = json.getInstanceGroupName(); if (instanceGroupName != null) { Optional<InstanceGroup> instanceGroup = instanceGroups.stream().filter(instanceGroup1 -> instanceGroup1.getGroupName().equals(instanceGroupName)).findFirst(); if (!instanceGroup.isPresent()) { throw new BadRequestException(String.format("Cannot find instance group named '%s' in instance group list", instanceGroupName)); } } hostGroups.add(hostGroup); } return hostGroups; } private void selectBlueprint(Set<Blueprint> blueprints, StackValidation stackValidation, Predicate<Blueprint> predicate) { blueprints.stream() .filter(predicate) .findFirst().ifPresent(stackValidation::setBlueprint); } }
fagarine/dangkang
dk-game-core/src/main/java/cn/laoshini/dk/server/channel/JsonMessageChannelReader.java
<gh_stars>10-100 package cn.laoshini.dk.server.channel; import io.netty.channel.ChannelHandlerContext; import cn.laoshini.dk.domain.msg.ReqMessage; /** * JSON格式消息到达读取处理 * * @author fagarine */ public class JsonMessageChannelReader implements INettyChannelReader<ReqMessage<?>> { private LastChannelReader delegate = new LastChannelReader(); @Override public void channelRead(ChannelHandlerContext ctx, ReqMessage<?> msg) { delegate.channelRead(ctx, msg); } }
ctomc/jboss-jstl-api_spec
src/main/java/org/apache/taglibs/standard/lang/jstl/Constants.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.taglibs.standard.lang.jstl; import java.util.MissingResourceException; import java.util.ResourceBundle; /** * <p>This contains all of the non-public constants, including * messsage strings read from the resource file. * * @author <NAME> - Art Technology Group * @author <NAME> */ public class Constants { //------------------------------------- // Resources static ResourceBundle sResources = ResourceBundle.getBundle("org.apache.taglibs.standard.lang.jstl.Resources"); //------------------------------------- // Messages from the resource bundle //------------------------------------- public static final String EXCEPTION_GETTING_BEANINFO = getStringResource("EXCEPTION_GETTING_BEANINFO"); public static final String NULL_EXPRESSION_STRING = getStringResource("NULL_EXPRESSION_STRING"); public static final String PARSE_EXCEPTION = getStringResource("PARSE_EXCEPTION"); public static final String CANT_GET_PROPERTY_OF_NULL = getStringResource("CANT_GET_PROPERTY_OF_NULL"); public static final String NO_SUCH_PROPERTY = getStringResource("NO_SUCH_PROPERTY"); public static final String NO_GETTER_METHOD = getStringResource("NO_GETTER_METHOD"); public static final String ERROR_GETTING_PROPERTY = getStringResource("ERROR_GETTING_PROPERTY"); public static final String CANT_GET_INDEXED_VALUE_OF_NULL = getStringResource("CANT_GET_INDEXED_VALUE_OF_NULL"); public static final String CANT_GET_NULL_INDEX = getStringResource("CANT_GET_NULL_INDEX"); public static final String NULL_INDEX = getStringResource("NULL_INDEX"); public static final String BAD_INDEX_VALUE = getStringResource("BAD_INDEX_VALUE"); public static final String EXCEPTION_ACCESSING_LIST = getStringResource("EXCEPTION_ACCESSING_LIST"); public static final String EXCEPTION_ACCESSING_ARRAY = getStringResource("EXCEPTION_ACCESSING_ARRAY"); public static final String CANT_FIND_INDEX = getStringResource("CANT_FIND_INDEX"); public static final String TOSTRING_EXCEPTION = getStringResource("TOSTRING_EXCEPTION"); public static final String BOOLEAN_TO_NUMBER = getStringResource("BOOLEAN_TO_NUMBER"); public static final String STRING_TO_NUMBER_EXCEPTION = getStringResource("STRING_TO_NUMBER_EXCEPTION"); public static final String COERCE_TO_NUMBER = getStringResource("COERCE_TO_NUMBER"); public static final String BOOLEAN_TO_CHARACTER = getStringResource("BOOLEAN_TO_CHARACTER"); public static final String EMPTY_STRING_TO_CHARACTER = getStringResource("EMPTY_STRING_TO_CHARACTER"); public static final String COERCE_TO_CHARACTER = getStringResource("COERCE_TO_CHARACTER"); public static final String NULL_TO_BOOLEAN = getStringResource("NULL_TO_BOOLEAN"); public static final String STRING_TO_BOOLEAN = getStringResource("STRING_TO_BOOLEAN"); public static final String COERCE_TO_BOOLEAN = getStringResource("COERCE_TO_BOOLEAN"); public static final String COERCE_TO_OBJECT = getStringResource("COERCE_TO_OBJECT"); public static final String NO_PROPERTY_EDITOR = getStringResource("NO_PROPERTY_EDITOR"); public static final String PROPERTY_EDITOR_ERROR = getStringResource("PROPERTY_EDITOR_ERROR"); public static final String ARITH_OP_NULL = getStringResource("ARITH_OP_NULL"); public static final String ARITH_OP_BAD_TYPE = getStringResource("ARITH_OP_BAD_TYPE"); public static final String ARITH_ERROR = getStringResource("ARITH_ERROR"); public static final String ERROR_IN_EQUALS = getStringResource("ERROR_IN_EQUALS"); public static final String UNARY_OP_BAD_TYPE = getStringResource("UNARY_OP_BAD_TYPE"); public static final String NAMED_VALUE_NOT_FOUND = getStringResource("NAMED_VALUE_NOT_FOUND"); public static final String CANT_GET_INDEXED_PROPERTY = getStringResource("CANT_GET_INDEXED_PROPERTY"); public static final String COMPARABLE_ERROR = getStringResource("COMPARABLE_ERROR"); public static final String BAD_IMPLICIT_OBJECT = getStringResource("BAD_IMPLICIT_OBJECT"); public static final String ATTRIBUTE_EVALUATION_EXCEPTION = getStringResource("ATTRIBUTE_EVALUATION_EXCEPTION"); public static final String ATTRIBUTE_PARSE_EXCEPTION = getStringResource("ATTRIBUTE_PARSE_EXCEPTION"); public static final String UNKNOWN_FUNCTION = getStringResource("UNKNOWN_FUNCTION"); public static final String INAPPROPRIATE_FUNCTION_ARG_COUNT = getStringResource("INAPPROPRIATE_FUNCTION_ARG_COUNT"); public static final String FUNCTION_INVOCATION_ERROR = getStringResource("FUNCTION_INVOCATION_ERROR"); //------------------------------------- // Getting resources //------------------------------------- /** * * **/ public static String getStringResource(String pResourceName) throws MissingResourceException { try { String ret = sResources.getString(pResourceName); if (ret == null) { String str = "ERROR: Unable to load resource " + pResourceName; System.err.println(str); throw new MissingResourceException (str, "org.apache.taglibs.standard.lang.jstl.Constants", pResourceName); } else { return ret; } } catch (MissingResourceException exc) { System.err.println("ERROR: Unable to load resource " + pResourceName + ": " + exc); throw exc; } } //------------------------------------- }
dcycle/scrapers-ca
ca_on_vaughan/people.py
from utils import CanadianScraper, CanadianPerson as Person import re COUNCIL_PAGE = 'http://www.vaughan.ca/council/Pages/default.aspx' class VaughanPersonScraper(CanadianScraper): def scrape(self): regional_councillor_seat_number = 1 page = self.lxmlize(COUNCIL_PAGE) councillors = page.xpath('//table[@class="ms-rteTable-0"]//a[./img]') assert len(councillors), 'No councillors found' for councillor in councillors: url = councillor.attrib['href'] page = self.lxmlize(url) title = page.xpath('//div[@class="PL_Title"]')[0].text_content() if "Councillor" in title: district, name = re.split(r'Councillor', title) role = 'Councillor' if "Regional" in district: role = 'Regional Councillor' district = "Vaughan (seat {})".format(regional_councillor_seat_number) regional_councillor_seat_number += 1 else: name = re.search(r'Mayor ([^,]+)', page.xpath('//meta[@name="keywords"]/@content')[0]).group(1) district = 'Vaughan' role = 'Mayor' name = name.strip() contact_details_url = None if role == 'Mayor': contact_details_url = page.xpath('//a[contains(@href,"/Contact-the-Mayor")]/@href')[0] detail = self.lxmlize(contact_details_url) contact_info = detail.xpath('//div[@id="ctl00_PlaceHolderMain_RichHtmlField1__ControlWrapper_RichHtmlField"]')[0] else: contact_node = page.xpath('//div[contains(@id, "WebPartWPQ")][contains(., "Phone")]') if contact_node: contact_info = contact_node[0] else: contact_info = page.xpath('//div[@id="WebPartWPQ3"]')[0] phone = re.findall(r'[0-9]{3}-[0-9]{3}-[0-9]{4} ext\. [0-9]{4}', contact_info.text_content())[0].replace('ext. ', 'x') fax = re.findall(r'[0-9]{3}-[0-9]{3}-[0-9]{4}', contact_info.text_content())[1] email = self.get_email(contact_info) p = Person(primary_org='legislature', name=name, district=district.strip(), role=role) p.add_source(COUNCIL_PAGE) if contact_details_url: p.add_source(contact_details_url) p.add_source(url) p.add_contact('voice', phone, 'legislature') p.add_contact('fax', fax, 'legislature') p.add_contact('email', email) image = page.xpath('//img[contains(@alt, "Councillor")]/@src') if image: p.image = image[0] if page.xpath('.//a[contains(@href,"facebook")]'): p.add_link(page.xpath('.//a[contains(@href,"facebook")]')[0].attrib['href']) if page.xpath('.//a[contains(@href,"twitter")]'): p.add_link(page.xpath('.//a[contains(@href,"twitter")]')[0].attrib['href']) if page.xpath('.//a[contains(@href,"youtube")]'): p.add_link(page.xpath('.//a[contains(@href, "youtube")]')[0].attrib['href']) yield p
asheraryam/ezEngine
Code/Engine/Foundation/Reflection/Implementation/ArrayProperty.h
#pragma once /// \file #include <Foundation/Reflection/Implementation/AbstractProperty.h> class ezRTTI; /// \brief Do not cast into this class or any of its derived classes, use ezTypedArrayProperty instead. template <typename Type> class ezTypedArrayProperty : public ezAbstractArrayProperty { public: ezTypedArrayProperty(const char* szPropertyName) : ezAbstractArrayProperty(szPropertyName) { m_Flags = ezPropertyFlags::GetParameterFlags<Type>(); EZ_CHECK_AT_COMPILETIME_MSG( !std::is_pointer<Type>::value || ezVariant::TypeDeduction<typename ezTypeTraits<Type>::NonConstReferencePointerType>::value == ezVariantType::Invalid, "Pointer to standard types are not supported."); } virtual const ezRTTI* GetSpecificType() const override { return ezGetStaticRTTI<typename ezTypeTraits<Type>::NonConstReferencePointerType>(); } }; /// \brief Specialization of ezTypedArrayProperty to retain the pointer in const char*. template <> class ezTypedArrayProperty<const char*> : public ezAbstractArrayProperty { public: ezTypedArrayProperty(const char* szPropertyName) : ezAbstractArrayProperty(szPropertyName) { m_Flags = ezPropertyFlags::GetParameterFlags<const char*>(); } virtual const ezRTTI* GetSpecificType() const override { return ezGetStaticRTTI<const char*>(); } }; template <typename Class, typename Type> class ezAccessorArrayProperty : public ezTypedArrayProperty<Type> { public: using RealType = typename ezTypeTraits<Type>::NonConstReferenceType; using GetCountFunc = ezUInt32 (Class::*)() const; using GetValueFunc = Type (Class::*)(ezUInt32 uiIndex) const; using SetValueFunc = void (Class::*)(ezUInt32 uiIndex, Type value); using InsertFunc = void (Class::*)(ezUInt32 uiIndex, Type value); using RemoveFunc = void (Class::*)(ezUInt32 uiIndex); ezAccessorArrayProperty( const char* szPropertyName, GetCountFunc getCount, GetValueFunc getter, SetValueFunc setter, InsertFunc insert, RemoveFunc remove) : ezTypedArrayProperty<Type>(szPropertyName) { EZ_ASSERT_DEBUG(getCount != nullptr, "The get count function of an array property cannot be nullptr."); EZ_ASSERT_DEBUG(m_Getter != nullptr, "The get value function of an array property cannot be nullptr."); m_GetCount = getCount; m_Getter = getter; m_Setter = setter; m_Insert = insert; m_Remove = remove; if (m_Setter == nullptr) ezAbstractArrayProperty::m_Flags.Add(ezPropertyFlags::ReadOnly); } virtual ezUInt32 GetCount(const void* pInstance) const override { return (static_cast<const Class*>(pInstance)->*m_GetCount)(); } virtual void GetValue(const void* pInstance, ezUInt32 uiIndex, void* pObject) const override { EZ_ASSERT_DEBUG(uiIndex < GetCount(pInstance), "GetValue: uiIndex ('{0}') is out of range ('{1}')", uiIndex, GetCount(pInstance)); *static_cast<RealType*>(pObject) = (static_cast<const Class*>(pInstance)->*m_Getter)(uiIndex); } virtual void SetValue(void* pInstance, ezUInt32 uiIndex, const void* pObject) override { EZ_ASSERT_DEBUG(uiIndex < GetCount(pInstance), "SetValue: uiIndex ('{0}') is out of range ('{1}')", uiIndex, GetCount(pInstance)); EZ_ASSERT_DEBUG(m_Setter != nullptr, "The property '{0}' has no setter function, thus it is read-only.", ezAbstractProperty::GetPropertyName()); (static_cast<Class*>(pInstance)->*m_Setter)(uiIndex, *static_cast<const RealType*>(pObject)); } virtual void Insert(void* pInstance, ezUInt32 uiIndex, const void* pObject) override { EZ_ASSERT_DEBUG(uiIndex <= GetCount(pInstance), "Insert: uiIndex ('{0}') is out of range ('{1}')", uiIndex, GetCount(pInstance)); EZ_ASSERT_DEBUG(m_Insert != nullptr, "The property '{0}' has no insert function, thus it is read-only.", ezAbstractProperty::GetPropertyName()); (static_cast<Class*>(pInstance)->*m_Insert)(uiIndex, *static_cast<const RealType*>(pObject)); } virtual void Remove(void* pInstance, ezUInt32 uiIndex) override { EZ_ASSERT_DEBUG(uiIndex < GetCount(pInstance), "Remove: uiIndex ('{0}') is out of range ('{1}')", uiIndex, GetCount(pInstance)); EZ_ASSERT_DEBUG(m_Remove != nullptr, "The property '{0}' has no setter function, thus it is read-only.", ezAbstractProperty::GetPropertyName()); (static_cast<Class*>(pInstance)->*m_Remove)(uiIndex); } virtual void Clear(void* pInstance) override { SetCount(pInstance, 0); } virtual void SetCount(void* pInstance, ezUInt32 uiCount) override { EZ_ASSERT_DEBUG(m_Insert != nullptr && m_Remove != nullptr, "The property '{0}' has no remove and insert function, thus it is fixed-size.", ezAbstractProperty::GetPropertyName()); while (uiCount < GetCount(pInstance)) { Remove(pInstance, GetCount(pInstance) - 1); } while (uiCount > GetCount(pInstance)) { RealType elem = RealType(); Insert(pInstance, GetCount(pInstance), &elem); } } private: GetCountFunc m_GetCount; GetValueFunc m_Getter; SetValueFunc m_Setter; InsertFunc m_Insert; RemoveFunc m_Remove; }; template <typename Class, typename Container, Container Class::*Member> struct ezArrayPropertyAccessor { using ContainerType = typename ezTypeTraits<Container>::NonConstReferenceType; using Type = typename ezTypeTraits<typename ezContainerSubTypeResolver<ContainerType>::Type>::NonConstReferenceType; static const ContainerType& GetConstContainer(const Class* pInstance) { return (*pInstance).*Member; } static ContainerType& GetContainer(Class* pInstance) { return (*pInstance).*Member; } }; template <typename Class, typename Container, typename Type> class ezMemberArrayProperty : public ezTypedArrayProperty<typename ezTypeTraits<Type>::NonConstReferenceType> { public: using RealType = typename ezTypeTraits<Type>::NonConstReferenceType; using GetConstContainerFunc = const Container& (*)(const Class* pInstance); using GetContainerFunc = Container& (*)(Class* pInstance); ezMemberArrayProperty(const char* szPropertyName, GetConstContainerFunc constGetter, GetContainerFunc getter) : ezTypedArrayProperty<RealType>(szPropertyName) { EZ_ASSERT_DEBUG(constGetter != nullptr, "The const get count function of an array property cannot be nullptr."); m_ConstGetter = constGetter; m_Getter = getter; if (m_Getter == nullptr) ezAbstractArrayProperty::m_Flags.Add(ezPropertyFlags::ReadOnly); } virtual ezUInt32 GetCount(const void* pInstance) const override { return m_ConstGetter(static_cast<const Class*>(pInstance)).GetCount(); } virtual void GetValue(const void* pInstance, ezUInt32 uiIndex, void* pObject) const override { EZ_ASSERT_DEBUG(uiIndex < GetCount(pInstance), "GetValue: uiIndex ('{0}') is out of range ('{1}')", uiIndex, GetCount(pInstance)); *static_cast<RealType*>(pObject) = m_ConstGetter(static_cast<const Class*>(pInstance))[uiIndex]; } virtual void SetValue(void* pInstance, ezUInt32 uiIndex, const void* pObject) override { EZ_ASSERT_DEBUG(uiIndex < GetCount(pInstance), "SetValue: uiIndex ('{0}') is out of range ('{1}')", uiIndex, GetCount(pInstance)); EZ_ASSERT_DEBUG(m_Getter != nullptr, "The property '{0}' has no non-const array accessor function, thus it is read-only.", ezAbstractProperty::GetPropertyName()); m_Getter(static_cast<Class*>(pInstance))[uiIndex] = *static_cast<const RealType*>(pObject); } virtual void Insert(void* pInstance, ezUInt32 uiIndex, const void* pObject) override { EZ_ASSERT_DEBUG(uiIndex <= GetCount(pInstance), "Insert: uiIndex ('{0}') is out of range ('{1}')", uiIndex, GetCount(pInstance)); EZ_ASSERT_DEBUG(m_Getter != nullptr, "The property '{0}' has no non-const array accessor function, thus it is read-only.", ezAbstractProperty::GetPropertyName()); m_Getter(static_cast<Class*>(pInstance)).Insert(*static_cast<const RealType*>(pObject), uiIndex); } virtual void Remove(void* pInstance, ezUInt32 uiIndex) override { EZ_ASSERT_DEBUG(uiIndex < GetCount(pInstance), "Remove: uiIndex ('{0}') is out of range ('{1}')", uiIndex, GetCount(pInstance)); EZ_ASSERT_DEBUG(m_Getter != nullptr, "The property '{0}' has no non-const array accessor function, thus it is read-only.", ezAbstractProperty::GetPropertyName()); m_Getter(static_cast<Class*>(pInstance)).RemoveAtAndCopy(uiIndex); } virtual void Clear(void* pInstance) override { EZ_ASSERT_DEBUG(m_Getter != nullptr, "The property '{0}' has no non-const array accessor function, thus it is read-only.", ezAbstractProperty::GetPropertyName()); m_Getter(static_cast<Class*>(pInstance)).Clear(); } virtual void SetCount(void* pInstance, ezUInt32 uiCount) override { EZ_ASSERT_DEBUG(m_Getter != nullptr, "The property '{0}' has no non-const array accessor function, thus it is read-only.", ezAbstractProperty::GetPropertyName()); m_Getter(static_cast<Class*>(pInstance)).SetCount(uiCount); } private: GetConstContainerFunc m_ConstGetter; GetContainerFunc m_Getter; };
neurodebian/htcondor
src/condor_procd/proc_family_io.cpp
<reponame>neurodebian/htcondor /*************************************************************** * * Copyright (C) 1990-2007, Condor Team, Computer Sciences Department, * University of Wisconsin-Madison, WI. * * Licensed under the Apache License, Version 2.0 (the "License"); you * may not use this file except in compliance with the License. You may * obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ***************************************************************/ #include "condor_common.h" #include "proc_family_io.h" // IMPORTANT: these string constants must match the proc_family_error_t // enumeration (in proc_family_io.h) in both number and order // static const char* proc_family_error_strings[] = { "SUCCESS", "ERROR: Invalid root PID", "ERROR: Invalid watcher PID", "ERROR: Invalid snapshot interval", "ERROR: A family with the given root PID is already registered", "ERROR: No family with the given PID is registered", "ERROR: The given PID is not found on the system", "ERROR: The given PID is not part of the family tree", "ERROR: Attempt to unregister root family", "ERROR: Bad environment tracking information", "ERROR: Bad login tracking information", "ERROR: Bad information for using GLExec", "ERROR: No group ID available for tracking", "ERROR: This ProcD is not able to use GLExec", "ERROR: No cgroup available for tracking" }; // helper for looking up error strings // const char* proc_family_error_lookup(proc_family_error_t error_code) { if ((error_code < 0) || (error_code >= PROC_FAMILY_ERROR_MAX)) { return NULL; } return proc_family_error_strings[error_code]; }
htsiah/HRLab
public/javascripts/leavereport.view.js
<reponame>htsiah/HRLab<filename>public/javascripts/leavereport.view.js $(function(){ $("#navReports").addClass("active open"); switch(path) { case "/report/myleaverequest": $("#navMyLeaveRequest").addClass("active"); break; case "/report/myteamleaverequest": $("#navMyTeamLeaveRequest").addClass("active"); break; case "/report/allstaffleaverequest": $("#navAllStaffLeaveRequest").addClass("active"); break; }; });
ouyanggao/aikexue
src/src/extra/pzg/doExp1.js
var doExp1 = myLayer.extend({ sprite: null, changeDelete: true, layerName: "doExp1", preLayer: "doLayer", ctor: function() { this._super() this.expCtor() this.initPeople() this.initUI() return true }, initUI:function(){ var self = this loadPlist("do_plist") var uiList = [ "select_1","select_2","select_3","select_4","select_5", "showLight","light","pian1","pian2","lightLine_1","lightLine_2", "lightLine_3","pot1","pot2","lightfx","guangping","rotaBg2" ] var node = loadNode(res.pzg_doExp1_json,uiList) self.inside_node.addChild(node) self.nodebs.show(function(){ self.nodebs.say({key:"do_tip1"}) }) var btn_result = new ccui.Button(res.btn_result_normal,res.btn_result_select) btn_result.setPosition(1050,460) self.addChild(btn_result) btn_result.addClickEventListener(function(){ self.nodebs.say({key:"result"}) }) var createSp = function(img,pos,father){ var sp = new cc.Sprite(img) sp.setPosition(pos) father.addChild(sp) return sp } //四个箭头 var arrowList = [] arrowList[0] = createSp("#jiantou01.png",cc.p(90,15),node.lightLine_2) arrowList[1] = createSp("#jiantou01.png",cc.p(185,30),node.lightLine_2) arrowList[2] = createSp("#jiantou01.png",cc.p(90,15),node.lightLine_3) arrowList[3] = createSp("#jiantou01.png",cc.p(196,32),node.lightLine_3) //对点的旋转旋转旋转 var potList = [node.pot1,node.pot2] for(var i = 0 ; i < 2 ; i++){ var sp = potList[i] sp.index = i createTouchEvent({ item:sp, begin:function(data){ var item = data.item var pos = data.pos //item.curSp = 1 if(!item.getParent().isVisible()) return false return true }, move:function(data){ var item = data.item var delta = data.delta var posx = item.x + delta.x var index = item.index if(posx >= 10 && posx <= 150){ item.x = posx item.curSp = Math.floor((item.x-6)/4) //cc.log(item.curSp) if(item.x >= 78) item.curSp = Math.floor((item.x - 68 -6)/4) switch(index){ case 0: node.pian1.setSpriteFrame(sprintf("pian%02d.png",item.curSp)) for(var j = 0 ; j < arrowList.length ; j++){ arrowList[j].setSpriteFrame(sprintf("jiantou%02d.png",item.curSp)) //改变大小 if(j > 1){ item.num = item.curSp if(item.curSp > 9 && item.curSp <= 18) item.num = 18 - item.curSp item.num = -1/8 * item.num + 9/8 //cc.log(item.num) if(item.num > 1) item.num = 1 arrowList[j].setScale(item.num) if(node.judge){ if(j == 3){ node.light.curOp = item.num * 255 node.light.curScal = item.num node.light.setOpacity(node.light.curOp) node.light.setScale(node.light.curScal) } } } } break case 1: node.pian2.setSpriteFrame(sprintf("pian%02d.png",item.curSp)) for(var j = 2 ; j < arrowList.length ; j++){ //改变大小 if(j > 1){ item.num = item.curSp if(item.curSp > 9 && item.curSp <= 18) item.num = 18 - item.curSp item.num = -1/8 * item.num + 9/8 //cc.log(item.num) if(item.num > 1) item.num = 1 arrowList[j].setScale(item.num) if(j == 3){ node.light.curOp = item.num * 255 node.light.curScal = item.num node.light.setOpacity(node.light.curOp) node.light.setScale(node.light.curScal) } } } break } } } }) } var wenziList = [] var normalList = [] var selectList = [] var judgeList = [node.showLight,node.pian1,null,node.pian2,node.guangping] for(var i = 0 ; i < 5 ; i++){ wenziList[i] = createSp(sprintf("#wenzi_%d.png",i+1),cc.p(500,50),self) wenziList[i].setVisible(false) var img = sprintf("#normal_%d.png",i+1) var sp = createSp(img,node[uiList[i]].getPosition(),self) normalList[i] = sp selectList[i] = node[uiList[i]] sp.index = i createTouchEvent({ item:sp, begin:function(data){ var item = data.item var index = item.index if(!node.judge && index == 3) return false for(var j = 0 ; j < 5 ; j++){ if(index == j){ self.nodebs.say({key:self.addList[j].key,force:true}) selectList[j].setVisible(true) normalList[j].setVisible(false) wenziList[j].setVisible(true) }else{ selectList[j].setVisible(false) normalList[j].setVisible(true) wenziList[j].setVisible(false) //对于第三个特殊处理 if(!node.judge && j == 3) normalList[j].setVisible(false) } } return true } }) //点击物体触发事件 var judge = judgeList[i] if(judge){ judge.index = i createTouchEvent({ item:judge, begin:function(data){ var item = data.item var index = item.index if(judgeOpInPos(data)){ if(!node.judge && index == 3) return false for(var j = 0 ; j < 5 ; j++){ if(index == j && item){ self.nodebs.say({key:self.addList[j].key,force:true}) selectList[j].setVisible(true) normalList[j].setVisible(false) wenziList[j].setVisible(true) }else{ selectList[j].setVisible(false) normalList[j].setVisible(true) wenziList[j].setVisible(false) //对于第三个特殊处理 if(!node.judge && j == 3) normalList[j].setVisible(false) } } return true } return false } }) } } normalList[3].setVisible(false) //点击切换 var touchList = [] for(var i = 0 ; i < 2 ; i++){ var img = sprintf("#touch_%d.png",2*i+1) var sp = createSp(img,cc.p(150*i+100,480),self) sp.index = i touchList[i] = sp var img2 = sprintf("#touch_%d.png",2*i+2) touchList[2+i] = createSp(img2,cc.p(150*i+100,480),self) touchList[2+i].setVisible(false) createTouchEvent({ item:sp, begin:function(data){ var item = data.item var index = item.index for(var j = 0 ; j < 2 ; j++){ if(index == j){ touchList[2+j].setVisible(true) touchList[j].setVisible(false) if(index == 0) twoFun(false) else{ twoFun(true) if(!node.say2){ node.say2 = true self.nodebs.say({key:"do_tip2",force:true}) } } }else{ touchList[2+j].setVisible(false) touchList[j].setVisible(true) } } return true } }) } touchList[2].setVisible(true) touchList[0].setVisible(false) var lightList = [node.lightLine_1,node.lightLine_2,node.lightLine_3,node.lightfx] //判断显示光路 var judgeShow = createSp("#judgeShow.png",cc.p(100,400),self) judgeShow.gou = createSp("#img_gou.png",cc.p(10,20),judgeShow) judgeShow.gou.setVisible(false) createTouchEvent({ item:judgeShow, begin:function(data){ var item = data.item if(item.gou.isVisible()){ item.gou.setVisible(false) for(var i = 0 ; i < lightList.length ; i++){ lightList[i].setVisible(false) } }else{ item.gou.setVisible(true) for(var i = 0 ; i < lightList.length ; i++){ lightList[i].setVisible(true) } } } }) node.judge = false//用于判断处于一个还是两个偏正片 node.light.curScal = 0.7 node.light.curOp = 200 var twoFun = function(flag){ node.judge = flag normalList[3].setVisible(flag) selectList[3].setVisible(false) selectList[2].getChildren()[1].setVisible(flag) node.rotaBg2.setVisible(flag) node.pian2.setVisible(flag) if(flag){ node.light.setScale(node.light.curScal) node.light.setOpacity(node.light.curOp) }else{ node.light.setScale(1) node.light.setOpacity(255) } } }, initPeople : function(){ this.nodebs = addPeople({ id: "student", pos: cc.p(1000, 130) }) this.addChild(this.nodebs,99) var addList = [ {key:"do_tip4",sound:res.do_sound4}, {key:"do_tip5",sound:res.do_sound5}, {key:"do_tip6",sound:res.do_sound6}, {key:"do_tip7",sound:res.do_sound7}, {key:"do_tip8",sound:res.do_sound8}, ] this.addList = addList for (var i = 0 ; i < addList.length ; i++){ addContent({ people: this.nodebs, key: addList[i].key, sound: addList[i].sound, }) } var addList2 = [ {key:"do_tip1",img: res.do_tip1,sound:res.do_sound1}, {key:"do_tip2",img: res.do_tip2,sound:res.do_sound2}, ] this.addList = addList for (var i = 0 ; i < addList2.length ; i++){ addContent({ people: this.nodebs, key: addList2[i].key, img: addList2[i].img, sound: addList2[i].sound, }) } addContent({ people: this.nodebs, key: "result", img: res.do_tip3, sound: res.do_sound3, id: "result" }) }, })
mwkohout/cassandra-bloomfilters
test/unit/org/apache/cassandra/service/pager/PagingStateTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.cassandraBloomFilters.service.pager; import java.nio.ByteBuffer; import org.junit.Test; import org.apache.cassandraBloomFilters.config.CFMetaData; import org.apache.cassandraBloomFilters.config.ColumnDefinition; import org.apache.cassandraBloomFilters.cql3.ColumnIdentifier; import org.apache.cassandraBloomFilters.db.*; import org.apache.cassandraBloomFilters.db.rows.*; import org.apache.cassandraBloomFilters.db.marshal.*; import org.apache.cassandraBloomFilters.transport.Server; import org.apache.cassandraBloomFilters.utils.ByteBufferUtil; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; public class PagingStateTest { private PagingState makeSomePagingState(int protocolVersion) { CFMetaData metadata = CFMetaData.Builder.create("ks", "tbl") .addPartitionKey("k", AsciiType.instance) .addClusteringColumn("c1", AsciiType.instance) .addClusteringColumn("c1", Int32Type.instance) .addRegularColumn("myCol", AsciiType.instance) .build(); ByteBuffer pk = ByteBufferUtil.bytes("someKey"); ColumnDefinition def = metadata.getColumnDefinition(new ColumnIdentifier("myCol", false)); Clustering c = Clustering.make(ByteBufferUtil.bytes("c1"), ByteBufferUtil.bytes(42)); Row row = BTreeRow.singleCellRow(c, BufferCell.live(def, 0, ByteBufferUtil.EMPTY_BYTE_BUFFER)); PagingState.RowMark mark = PagingState.RowMark.create(metadata, row, protocolVersion); return new PagingState(pk, mark, 10, 0); } @Test public void testSerializationBackwardCompatibility() { /* * Tests that the serialized paging state for the native protocol V3 is backward compatible * with what old nodes generate. For that, it compares the serialized format to the hard-coded * value of the same state generated on a 2.1. For the curious, said hardcoded value has been * generated by the following code: * ByteBuffer pk = ByteBufferUtil.bytes("someKey"); * CellName cn = CellNames.compositeSparse(new ByteBuffer[]{ ByteBufferUtil.bytes("c1"), ByteBufferUtil.bytes(42) }, * new ColumnIdentifier("myCol", false), * false); * PagingState state = new PagingState(pk, cn.toByteBuffer(), 10); * System.out.println("PagingState = " + ByteBufferUtil.bytesToHex(state.serialize())); */ PagingState state = makeSomePagingState(Server.VERSION_3); String serializedState = ByteBufferUtil.bytesToHex(state.serialize(Server.VERSION_3)); // Note that we don't assert exact equality because we know 3.0 nodes include the "remainingInPartition" number // that is not present on 2.1/2.2 nodes. We know this is ok however because we know that 2.1/2.2 nodes will ignore // anything remaining once they have properly deserialized a paging state. assertTrue(serializedState.startsWith("0007736f6d654b65790014000263310000040000002a0000056d79636f6c000000000a")); } @Test public void testSerializeDeserializeV3() { PagingState state = makeSomePagingState(Server.VERSION_3); ByteBuffer serialized = state.serialize(Server.VERSION_3); assertEquals(serialized.remaining(), state.serializedSize(Server.VERSION_3)); assertEquals(state, PagingState.deserialize(serialized, Server.VERSION_3)); } @Test public void testSerializeDeserializeV4() { PagingState state = makeSomePagingState(Server.VERSION_4); ByteBuffer serialized = state.serialize(Server.VERSION_4); assertEquals(serialized.remaining(), state.serializedSize(Server.VERSION_4)); assertEquals(state, PagingState.deserialize(serialized, Server.VERSION_4)); } }
zuxqoj/cdap
cdap-api/src/main/java/io/cdap/cdap/api/dataset/DatasetContext.java
<reponame>zuxqoj/cdap /* * Copyright © 2015 <NAME>, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package io.cdap.cdap.api.dataset; import io.cdap.cdap.api.annotation.Beta; /** * Provides access to the context for a dataset including its environment and configuration */ @Beta public class DatasetContext { private final String namespaceId; private DatasetContext(String namespaceId) { this.namespaceId = namespaceId; } /** * Returns the namespace for a dataset * * @return the dataset's namespace */ public String getNamespaceId() { return namespaceId; } @Override public String toString() { return "DatasetContext{" + "namespaceId='" + namespaceId + '\'' + '}'; } /** * Constructs a new {@link DatasetContext} containing the specified namespace id * * @param namespaceId the namespace id to construct the {@link DatasetContext} with * @return a new {@link DatasetContext} containing the specified namespace id */ public static DatasetContext from(String namespaceId) { if (namespaceId == null) { throw new IllegalArgumentException("Namespace Id can not be null"); } return new DatasetContext(namespaceId); } }
TISparta/competitive-programming-solutions
Codeforces/Round-184-Div2/C.cpp
<gh_stars>1-10 // Tags: Greedy // Difficulty: 3.6 // Priority: 1 // Date: 22-05-2021 #include <bits/stdc++.h> #define all(A) begin(A), end(A) #define rall(A) rbegin(A), rend(A) #define sz(A) int(A.size()) #define pb push_back #define mp make_pair using namespace std; typedef long long ll; typedef pair <int, int> pii; typedef pair <ll, ll> pll; typedef vector <int> vi; typedef vector <ll> vll; typedef vector <pii> vpii; typedef vector <pll> vpll; int main () { ios::sync_with_stdio(false); cin.tie(0); int n; cin >> n; map <ll, int> z; for (int i = 0; i < n; i++) { ll a; cin >> a; z[a] += 1; } ll cnt = 0; ll mx = 0; for (auto [key, value]: z) { if (value & 1) cnt += 1; if (value / 2) z[key + 1] += value / 2; mx = key; } cout << mx - cnt + 1 << '\n'; return (0); }
vorburger/flowsim
flowsim-ui/app/scripts/controllers/verify.js
<reponame>vorburger/flowsim 'use strict'; /** * @ngdoc function * @name flowsimUiApp.controller:VerifyCtrl * @description * # VerifyCtrl * Controller of the flowsimUiApp */ angular.module('flowsimUiApp') .controller('VerifyCtrl', function ($scope, $stateParams, Subscriber) { function clearErrors() { $scope.errorMsg = ''; } clearErrors(); Subscriber.verify($stateParams.token, function(err) { if(err) { $scope.errorMsg = err.message; console.log(err.details); } else { $scope.success = true; } }); });
sergio-fry/youtube-fetcher
spec/features/search_spec.rb
require 'rails_helper' RSpec.feature "Search", type: :feature do scenario 'Type query and sea results' do podcast = FactoryGirl.create :podcast, title: 'Peter Episode' episode = FactoryGirl.create :episode, title: 'Moscow Episode' visit '/' fill_in 'Search', with: 'Peter' click_on 'Submit' expect(page).to have_content 'Peter Episode' end end
madettmann/DCS-Discover
staticfiles/jmol/jsmol/j2s/JS/UnitCellIterator.js
<gh_stars>100-1000 Clazz.declarePackage ("JS"); Clazz.load (["J.api.AtomIndexIterator"], "JS.UnitCellIterator", ["JU.Lst", "$.P3", "$.P3i", "JU.BoxInfo", "$.Logger", "$.Point3fi"], function () { c$ = Clazz.decorateAsClass (function () { this.atoms = null; this.center = null; this.translation = null; this.nFound = 0; this.maxDistance2 = 0; this.distance2 = 0; this.unitCell = null; this.minXYZ = null; this.maxXYZ = null; this.t = null; this.p = null; this.ipt = -2147483648; this.unitList = null; this.done = false; this.nAtoms = 0; this.listPt = 0; Clazz.instantialize (this, arguments); }, JS, "UnitCellIterator", null, J.api.AtomIndexIterator); Clazz.makeConstructor (c$, function () { }); Clazz.defineMethod (c$, "set", function (unitCell, atom, atoms, bsAtoms, distance) { this.unitCell = unitCell; this.atoms = atoms; this.addAtoms (bsAtoms); this.p = new JU.P3 (); if (distance > 0) this.setCenter (atom, distance); return this; }, "J.api.SymmetryInterface,JM.Atom,~A,JU.BS,~N"); Clazz.overrideMethod (c$, "setModel", function (modelSet, modelIndex, zeroBase, atomIndex, center, distance, rd) { }, "JM.ModelSet,~N,~N,~N,JU.T3,~N,J.atomdata.RadiusData"); Clazz.overrideMethod (c$, "setCenter", function (center, distance) { if (distance == 0) return; this.maxDistance2 = distance * distance; this.center = center; this.translation = new JU.P3 (); var pts = JU.BoxInfo.unitCubePoints; var min = JU.P3.new3 (3.4028235E38, 3.4028235E38, 3.4028235E38); var max = JU.P3.new3 (-3.4028235E38, -3.4028235E38, -3.4028235E38); this.p = new JU.P3 (); var ptC = new JU.P3 (); ptC.setT (center); this.unitCell.toFractional (ptC, true); for (var i = 0; i < 8; i++) { this.p.scaleAdd2 (-2.0, pts[i], pts[7]); this.p.scaleAdd2 (distance, this.p, center); this.unitCell.toFractional (this.p, true); if (min.x > this.p.x) min.x = this.p.x; if (max.x < this.p.x) max.x = this.p.x; if (min.y > this.p.y) min.y = this.p.y; if (max.y < this.p.y) max.y = this.p.y; if (min.z > this.p.z) min.z = this.p.z; if (max.z < this.p.z) max.z = this.p.z; } this.minXYZ = JU.P3i.new3 (Clazz.doubleToInt (Math.floor (min.x)), Clazz.doubleToInt (Math.floor (min.y)), Clazz.doubleToInt (Math.floor (min.z))); this.maxXYZ = JU.P3i.new3 (Clazz.doubleToInt (Math.ceil (max.x)), Clazz.doubleToInt (Math.ceil (max.y)), Clazz.doubleToInt (Math.ceil (max.z))); if (JU.Logger.debugging) JU.Logger.info ("UnitCellIterator minxyz/maxxyz " + this.minXYZ + " " + this.maxXYZ); this.t = JU.P3i.new3 (this.minXYZ.x - 1, this.minXYZ.y, this.minXYZ.z); this.nextCell (); }, "JU.T3,~N"); Clazz.overrideMethod (c$, "addAtoms", function (bsAtoms) { this.done = (bsAtoms == null); if (this.done) return; this.unitList = new JU.Lst (); var cat = ""; var ops = this.unitCell.getSymmetryOperations (); var nOps = ops.length; for (var i = bsAtoms.nextSetBit (0); i >= 0; i = bsAtoms.nextSetBit (i + 1)) { var a = this.atoms[i]; for (var j = 0; j < nOps; j++) { var pt = new JU.P3 (); pt.setT (a); if (j > 0) { this.unitCell.toFractional (pt, false); ops[j].rotTrans (pt); this.unitCell.unitize (pt); this.unitCell.toCartesian (pt, false); } else { this.unitCell.toUnitCell (pt, null); }var key = "_" + Clazz.floatToInt (pt.x * 100) + "_" + Clazz.floatToInt (pt.y * 100) + "_" + Clazz.floatToInt (pt.z * 100) + "_"; if (cat.indexOf (key) >= 0) continue; cat += key; this.unitList.addLast ( Clazz.newArray (-1, [a, pt])); } } this.nAtoms = this.unitList.size (); this.done = (this.nAtoms == 0); if (JU.Logger.debugging) JU.Logger.info ("UnitCellIterator " + this.nAtoms + " unique points found"); }, "JU.BS"); Clazz.overrideMethod (c$, "hasNext", function () { while ((this.ipt < this.nAtoms || this.nextCell ())) { this.p.add2 (this.unitList.get (this.listPt = this.ipt++)[1], this.translation); if ((this.distance2 = this.p.distanceSquared (this.center)) < this.maxDistance2 && this.distance2 > 0.1) { this.nFound++; return true; }} return false; }); Clazz.defineMethod (c$, "nextCell", function () { if (this.done) return false; if (++this.t.x >= this.maxXYZ.x) { this.t.x = this.minXYZ.x; if (++this.t.y >= this.maxXYZ.y) { this.t.y = this.minXYZ.y; if (++this.t.z >= this.maxXYZ.z) { this.done = true; this.ipt = this.nAtoms; return false; }}}this.translation.set (this.t.x, this.t.y, this.t.z); this.unitCell.toCartesian (this.translation, false); this.ipt = 0; return true; }); Clazz.overrideMethod (c$, "next", function () { return (this.done || this.ipt < 0 ? -1 : this.getAtom ().i); }); Clazz.defineMethod (c$, "getAtom", function () { return (this.unitList.get (this.listPt)[0]); }); Clazz.overrideMethod (c$, "foundDistance2", function () { return (this.nFound > 0 ? this.distance2 : 3.4028235E38); }); Clazz.overrideMethod (c$, "getPosition", function () { var a = this.getAtom (); if (JU.Logger.debugging) JU.Logger.info ("draw ID p_" + this.nFound + " " + this.p + " //" + a + " " + this.t); if (this.p.distanceSquared (a) < 0.0001) return a; var p = new JU.Point3fi (); p.setT (this.p); p.i = a.i; p.sD = a.getElementNumber (); return p; }); Clazz.overrideMethod (c$, "release", function () { this.atoms = null; this.center = null; this.translation = null; }); });
Laurens-makel/iaf
core/src/test/java/nl/nn/adapterframework/configuration/digester/FrankDigesterRulesTest.java
<reponame>Laurens-makel/iaf<filename>core/src/test/java/nl/nn/adapterframework/configuration/digester/FrankDigesterRulesTest.java /* Copyright 2020 WeAreFrank! Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package nl.nn.adapterframework.configuration.digester; import static org.junit.Assert.assertTrue; import java.io.IOException; import java.util.ArrayList; import java.util.List; import org.junit.Test; import org.mockito.Mockito; import org.xml.sax.SAXException; import nl.nn.adapterframework.core.Resource; import nl.nn.adapterframework.util.XmlUtils; public class FrankDigesterRulesTest extends Mockito { private class DummyDigesterRulesParser extends DigesterRulesHandler { private List<DigesterRule> rules = new ArrayList<>(); @Override protected void handle(DigesterRule rule) { rules.add(rule); } public int size() { return rules.size(); } } @Test public void parseDigesterRulesXml() { DummyDigesterRulesParser handler = new DummyDigesterRulesParser(); Resource digesterRules = Resource.getResource("digester-rules.xml"); try { XmlUtils.parseXml(digesterRules.asInputSource(), handler); } catch (IOException e) { throw new IllegalStateException("unable to open digesterRules file", e); } catch (SAXException e) { throw new IllegalStateException("unable to parse digesterRules file", e); } assertTrue("must at least have 33 patterns", handler.size() >= 33); } }
baratrumus/job4j-study
chapter_006/src/main/java/ru/job4j/io/files/FileSearch.java
<reponame>baratrumus/job4j-study<filename>chapter_006/src/main/java/ru/job4j/io/files/FileSearch.java package ru.job4j.io.files; import ru.job4j.io.Search; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.PrintWriter; import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; import static java.nio.charset.StandardCharsets.UTF_8; /** * 1. Создать программу для поиска файла. * 2. Программа должна искать данные в заданном каталоге и подкаталогах. * 3. Имя файла может задаваться, целиком, по маске, по регулярному выражение(не обязательно). * 4. Программа должна собираться в jar и запускаться через java -jar find.jar -d c:/ -n *.txt -m -o log.txt * -d d:/#Java/Programms/test -n *.txt -m -o log.txt * Ключи * -d - директория в которая начинать поиск. * -n - имя файл, маска, либо регулярное выражение. * -m - искать по макс, либо -f - полное совпадение имени. -r регулярное выражение. * -o - результат записать в файл. * 5. Программа должна записывать результат в файл. * 6. В программе должна быть валидация ключей и подсказка. */ public class FileSearch { private Search search = new Search(); String separator = File.separator; private static final String LS = System.getProperty("line.separator"); /** * allFiles - лист всех файлов из директории поиска, c поддиректориями */ public void findFiles(Args args) throws IOException { List<File> resList = new ArrayList<>(); String beginDir = args.directory(); List<File> allFiles = search.files(beginDir); String maskOfFile = args.filename(); File targetFile = new File(args.output()); String searchType = args.searchMaskOrFullName(); resList = findByMask(beginDir, allFiles, maskOfFile, searchType); writeResult(resList, args.output()); } private void writeResult(List<File> list, String logFileName) throws IOException { File logDir = new File(System.getProperty("java.io.tmpdir") + separator + "SearchLog"); String logFile = logDir.getAbsolutePath() + separator + logFileName; //try { logDir.mkdir(); //logFile.createNewFile(); // } catch (IOException ex) { // System.out.println(ex.getMessage()); //} try (PrintWriter out = new PrintWriter(new FileOutputStream(logFile))) { for (File f : list) { out.println(f.getAbsolutePath()); } } catch (Exception e) { e.printStackTrace(); } } private List<File> findByMask(String beginDir, List<File> allFiles, String maskOfFile, String searchType) { List<File> resList = new ArrayList<>(); Pattern regEx = Pattern.compile(maskOfFile, Pattern.CASE_INSENSITIVE); if ("m".equals(searchType)) { /* try (DirectoryStream<Path> dir = Files.newDirectoryStream( Paths.get(beginDir), maskOfFile)) { for (Path entry : dir) { resList.add(entry.toFile()); } } catch (IOException ex) { System.err.println("An IOException was caught: " + ex.getMessage()); ex.printStackTrace(); }*/ for (File file : allFiles) { String fileName = file.getName(); if (fileName.contains(maskOfFile) || fileName.equalsIgnoreCase(maskOfFile) || fileName.endsWith(maskOfFile) || fileName.startsWith(maskOfFile)) { resList.add(file); } } } else if ("f".equals(searchType)) { for (File file : allFiles) { if (file.getName().contains(maskOfFile)) { resList.add(file); } } } else if ("r".equals(searchType)) { for (File file : allFiles) { Matcher matcher = regEx.matcher(file.getName()); if (matcher.matches()) { resList.add(file); } } } else if (searchType != null) { System.out.println("Отсутствует ключ типа поиска"); } return resList; } public static void main(String[] args) throws org.apache.commons.cli.ParseException, IOException { Args arguments = new Args(args); FileSearch filesearch = new FileSearch(); filesearch.findFiles(arguments); } }
ShinjiKatoA16/aoj-itp
itp1_5d.py
#!/usr/bin/python3 # -*- coding: utf-8 -*- import sys n = int(sys.stdin.readline()) for i in range(1, n+1): if i % 3 == 0: print(' ', i, sep='', end='') else: x = i while x > 0: if x % 10 == 3: print(' ', i, sep='', end='') break x //= 10 print()
drtierney/hyperskill-problems
Python/Topics/Stack in Python/Reversing a string/main.py
from collections import deque n = int(input()) stack = deque() for _ in range(n): stack.append(input()) while stack: print(stack.pop())
odys-z/hello
challenge/leet/medium/q1079.py
<reponame>odys-z/hello<gh_stars>0 ''' 1079. Letter Tile Possibilities https://leetcode.com/problems/letter-tile-possibilities/ You have n tiles, where each tile has one letter tiles[i] printed on it. Return the number of possible non-empty sequences of letters you can make using the letters printed on those tiles. Example 1: Input: tiles = "AAB" Output: 8 Explanation: The possible sequences are "A", "B", "AA", "AB", "BA", "AAB", "ABA", "BAA". Example 2: Input: tiles = "AAABBC" Output: 188 Example 3: Input: tiles = "V" Output: 1 Constraints: 1 <= tiles.length <= 7 tiles consists of uppercase English letters. Created on 20 Mar 2021 @author: <NAME> ''' from unittest import TestCase class Solution: '''61.15% ''' def numTilePossibilities(self, tiles: str) -> int: tl = len(tiles) def backtrack(ix): # ix = 1, ... n-1 if ix == 1: return {tiles[:ix]} backs = backtrack(ix-1) res = set() for b in backs: li = len(b) for j in range(li + 1): res.add( b[:j] + tiles[ix - 1] + b[j:li] ) res.add(tiles[ix - 1]) return backs.union(res) if tl <= 1: return tl res = backtrack(tl) return len(res) if __name__ == '__main__': t = TestCase() s = Solution() t.assertEqual(8, s.numTilePossibilities('AAB')) t.assertEqual(1, s.numTilePossibilities('V')) t.assertEqual(188, s.numTilePossibilities('AAABBC')) print('OK!')
ONSdigital/dp-import-reporter
config/config.go
package config import ( "encoding/json" "time" "github.com/kelseyhightower/envconfig" "github.com/pkg/errors" ) // KafkaSecProtocolTLS informs service to use TLS protocol for kafka const KafkaSecProtocolTLS = "TLS" // Config struct to hold application configuration. type Config struct { BindAddress string `envconfig:"BIND_ADDR"` DatasetAPIURL string `envconfig:"DATASET_API_URL"` DatasetAPIAuthToken string `envconfig:"DATASET_API_AUTH_TOKEN" json:"-"` CacheSize int `envconfig:"CACHE_SIZE"` CacheExpiry int `envconfig:"CACHE_EXPIRY"` GracefulShutdownTimeout time.Duration `envconfig:"GRACEFUL_SHUTDOWN_TIMEOUT"` ServiceAuthToken string `envconfig:"SERVICE_AUTH_TOKEN" json:"-"` ZebedeeURL string `envconfig:"ZEBEDEE_URL"` ReportEventTopic string `envconfig:"CONSUMER_TOPIC"` ReportEventGroup string `envconfig:"CONSUMER_GROUP"` KafkaBrokers []string `envconfig:"KAFKA_ADDR"` KafkaVersion string `envconfig:"KAFKA_VERSION"` KafkaSecProtocol string `envconfig:"KAFKA_SEC_PROTO"` KafkaSecCACerts string `envconfig:"KAFKA_SEC_CA_CERTS"` KafkaSecClientCert string `envconfig:"KAFKA_SEC_CLIENT_CERT"` KafkaSecClientKey string `envconfig:"KAFKA_SEC_CLIENT_KEY" json:"-"` KafkaSecSkipVerify bool `envconfig:"KAFKA_SEC_SKIP_VERIFY"` KafkaOffsetOldest bool `envconfig:"KAFKA_OFFSET_OLDEST"` } var config *Config var processConfig func(prefix string, spec interface{}) error = envconfig.Process // Get configures the application and returns the configuration func Get() (*Config, error) { if config != nil { return config, nil } config = &Config{ BindAddress: ":22200", DatasetAPIURL: "http://localhost:22000", DatasetAPIAuthToken: "<PASSWORD>", CacheSize: 100 * 1024 * 1024, CacheExpiry: 60, GracefulShutdownTimeout: time.Second * 5, ServiceAuthToken: "<PASSWORD>", ZebedeeURL: "http://localhost:8082", ReportEventTopic: "report-events", ReportEventGroup: "dp-import-reporter", KafkaBrokers: []string{"localhost:9092"}, KafkaVersion: "1.0.2", KafkaOffsetOldest: true, } if err := processConfig("", config); err != nil { return nil, errors.Wrap(err, "config: error while attempting to load environment config") } if config.KafkaSecProtocol != "" && config.KafkaSecProtocol != KafkaSecProtocolTLS { return nil, errors.New("KAFKA_SEC_PROTO has invalid value") } config.ServiceAuthToken = "Bearer " + config.ServiceAuthToken return config, nil } // String is implemented to prevent sensitive fields being logged. // The config is returned as JSON with sensitive fields omitted. func (config Config) String() string { json, _ := json.Marshal(config) return string(json) }
akquinet/drools
drools-compiler/src/main/java/org/drools/rule/builder/dialect/asm/ASMEvalBuilder.java
<gh_stars>1-10 package org.drools.rule.builder.dialect.asm; import org.drools.WorkingMemory; import org.drools.rule.Declaration; import org.drools.rule.builder.RuleBuildContext; import org.drools.spi.CompiledInvoker; import org.drools.spi.EvalExpression; import org.drools.spi.Tuple; import org.mvel2.asm.MethodVisitor; import java.util.Map; import static org.drools.rule.builder.dialect.asm.InvokerGenerator.createInvokerClassGenerator; import static org.mvel2.asm.Opcodes.ACC_PUBLIC; import static org.mvel2.asm.Opcodes.ACONST_NULL; import static org.mvel2.asm.Opcodes.ALOAD; import static org.mvel2.asm.Opcodes.ARETURN; import static org.mvel2.asm.Opcodes.INVOKESTATIC; import static org.mvel2.asm.Opcodes.IRETURN; public class ASMEvalBuilder extends AbstractASMEvalBuilder { protected byte[] createEvalBytecode(final RuleBuildContext ruleContext, final Map vars) { final InvokerDataProvider data = new InvokerContext(vars); final ClassGenerator generator = createInvokerClassGenerator(data, ruleContext) .setInterfaces(EvalExpression.class, CompiledInvoker.class); generator.addMethod(ACC_PUBLIC, "createContext", generator.methodDescr(Object.class), new ClassGenerator.MethodBody() { public void body(MethodVisitor mv) { mv.visitInsn(ACONST_NULL); mv.visitInsn(ARETURN); } }).addMethod(ACC_PUBLIC, "clone", generator.methodDescr(EvalExpression.class), new ClassGenerator.MethodBody() { public void body(MethodVisitor mv) { mv.visitVarInsn(ALOAD, 0); mv.visitInsn(ARETURN); } }).addMethod(ACC_PUBLIC, "replaceDeclaration", generator.methodDescr(null, Declaration.class, Declaration.class) ).addMethod(ACC_PUBLIC, "evaluate", generator.methodDescr(Boolean.TYPE, Tuple.class, Declaration[].class, WorkingMemory.class, Object.class), new String[]{"java/lang/Exception"}, new GeneratorHelper.EvaluateMethod() { public void body(MethodVisitor mv) { final Declaration[] declarations = (Declaration[])vars.get("declarations"); final String[] globals = (String[])vars.get("globals"); final String[] globalTypes = (String[])vars.get("globalTypes"); objAstorePos = 5; int[] declarationsParamsPos = parseDeclarations(declarations, 2, 1, 3, true); // @{ruleClassName}.@{methodName}(@foreach{declarations}, @foreach{globals}) StringBuilder evalMethodDescr = new StringBuilder("("); for (int i = 0; i < declarations.length; i++) { load(declarationsParamsPos[i]); // declarations[i] evalMethodDescr.append(typeDescr(declarations[i].getTypeName())); } // @foreach{type : globalTypes, identifier : globals} @{type} @{identifier} = ( @{type} ) workingMemory.getGlobal( "@{identifier}" ); parseGlobals(globals, globalTypes, 3, evalMethodDescr); evalMethodDescr.append(")Z"); mv.visitMethodInsn(INVOKESTATIC, data.getInternalRuleClassName(), data.getMethodName(), evalMethodDescr.toString()); mv.visitInsn(IRETURN); } }); return generator.generateBytecode(); } }
progital/test-redwood
web/src/pages/HomePage/HomePage.js
<filename>web/src/pages/HomePage/HomePage.js import { Flex, Box } from 'theme-ui'; import Button from 'components/ui/ButtonOutline'; import DefaultLayout from 'layouts/DefaultLayout'; import NewUser from 'components/account/NewUser'; import LoginUser from 'components/account/LoginUser'; import { useState } from 'react'; import { Redirect, routes } from '@redwoodjs/router'; import { useAuth } from '@redwoodjs/auth'; const HomePage = () => { // login, newuser const [status, setStatus] = useState('login'); const { isAuthenticated } = useAuth(); let actionLabel = null; let ActionForm = null; switch (status) { case 'login': { actionLabel = 'Create Account'; ActionForm = LoginUser; break; } case 'newuser': { actionLabel = 'Sign In'; ActionForm = NewUser; break; } } const actionHanlder = () => { switch (status) { case 'login': { setStatus('newuser'); break; } case 'newuser': { setStatus('login'); break; } } }; if (isAuthenticated) { return <Redirect to={routes.dashboard()} />; } return ( <DefaultLayout headerAction={() => ( <> <Box mx="auto" /> <Button onClick={actionHanlder}>{actionLabel}</Button> </> )} > <Flex sx={{ alignItems: 'center', justifyContent: 'center', flex: '1 1 auto', }} as="main" > <ActionForm /> </Flex> </DefaultLayout> ); }; export default HomePage;
Xibo-Yue/WanAndroid
app/src/main/java/com/example/lengary_l/wanandroid/data/source/CategoriesDataSource.java
<filename>app/src/main/java/com/example/lengary_l/wanandroid/data/source/CategoriesDataSource.java<gh_stars>0 package com.example.lengary_l.wanandroid.data.source; import android.support.annotation.NonNull; import com.example.lengary_l.wanandroid.data.CategoryDetailData; import java.util.List; import io.reactivex.Observable; /** * Created by CoderLengary */ public interface CategoriesDataSource { Observable<List<CategoryDetailData>> getCategories(@NonNull boolean forceUpdate); }
all3g/pieces
requests/requests-cookies.py
<reponame>all3g/pieces<filename>requests/requests-cookies.py #!/usr/bin/python # -*- coding: utf-8 -*- import requests url = 'https://www.metasploit.com' r = requests.get(url) print(r.cookies) # If a response contains some Cookies, you can quickly access them: print(r.cookies['value']) # To send your own cookies to the server, you can use the cookies parameter url = 'http://httpbin.org/cookies' cookies = dict(cookies_are='working') # cookies = old_cookies.update(new_cookies) r.requests.get(url, cookies=cookies) print(r.cookies)
coolslow/Algorithm
src/com/coolslow/playground/binary_tree/NO129_SumRootToLeafNumbers.java
package com.coolslow.playground.binary_tree; /** * 129. 求根节点到叶子节点的数字之和 * 给你一个二叉树的根节点 root ,树中每个节点都存放有一个 0 到 9 之间的数字。 * 每条从根节点到叶节点的路径都代表一个数字: * - 例如,从根节点到叶节点的路径 1 -> 2 -> 3 表示数字 123 。 * 计算从根节点到叶节点生成的 所有数字之和 。 * * 叶节点 是指没有子节点的节点。 * * 示例: * 1 * / \ * 2 3 * 输入:root = [1, 2, 3] * 输出:25 * * 解释: * 从根到叶子节点路径 1 -> 2代表数字12 * 从根到叶子节点路径 1 -> 3代表数字13 * 因此:数字总和为:12 + 13 = 25 */ public class NO129_SumRootToLeafNumbers { // public int sumNumbers(TreeNode root) { // int height = getTreeHeight(root); // } // 递归求解,无需去求树的高度 // private int getTreeHeight(TreeNode root) { // if (root == null) { // return 0; // } // int leftHeight = getTreeHeight(root.left); // int rightHeight = getTreeHeight(root.right); // int res = Math.max(leftHeight, rightHeight) + 1; // return res; // } public int sumNumbers(TreeNode root) { return helper(root, 0); } public int helper(TreeNode root, int sum) { if (root == null) return 0; int tmp = sum * 10 + root.val; if(root.left == null && root.right == null) { return tmp; } else { return helper(root.left, tmp) + helper(root.right, tmp); } } }
pymma/drools47jdk8
drools-solver/drools-solver-core/src/main/java/org/drools/solver/config/localsearch/LocalSearchSolverConfig.java
<reponame>pymma/drools47jdk8<filename>drools-solver/drools-solver-core/src/main/java/org/drools/solver/config/localsearch/LocalSearchSolverConfig.java package org.drools.solver.config.localsearch; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.util.List; import com.thoughtworks.xstream.annotations.XStreamAlias; import com.thoughtworks.xstream.annotations.XStreamImplicit; import org.apache.commons.io.IOUtils; import org.drools.RuleBase; import org.drools.RuleBaseFactory; import org.drools.compiler.DroolsParserException; import org.drools.compiler.PackageBuilder; import org.drools.solver.config.localsearch.decider.accepter.AccepterConfig; import org.drools.solver.config.localsearch.decider.forager.ForagerConfig; import org.drools.solver.config.localsearch.decider.selector.SelectorConfig; import org.drools.solver.config.localsearch.evaluation.scorecalculator.ScoreCalculatorConfig; import org.drools.solver.config.localsearch.finish.FinishConfig; import org.drools.solver.core.evaluation.EvaluationHandler; import org.drools.solver.core.localsearch.DefaultLocalSearchSolver; import org.drools.solver.core.localsearch.LocalSearchSolver; import org.drools.solver.core.localsearch.bestsolution.BestSolutionRecaller; import org.drools.solver.core.localsearch.decider.Decider; import org.drools.solver.core.localsearch.decider.DefaultDecider; /** * @author <NAME> */ @XStreamAlias("localSearchSolver") public class LocalSearchSolverConfig { private Long randomSeed = null; @XStreamImplicit(itemFieldName = "scoreDrl") private List<String> scoreDrlList = null; @XStreamAlias("scoreCalculator") private ScoreCalculatorConfig scoreCalculatorConfig = new ScoreCalculatorConfig(); @XStreamAlias("finish") private FinishConfig finishConfig = new FinishConfig(); // TODO this new is pointless due to xstream @XStreamAlias("selector") private SelectorConfig selectorConfig = new SelectorConfig(); @XStreamAlias("accepter") private AccepterConfig accepterConfig = new AccepterConfig(); @XStreamAlias("forager") private ForagerConfig foragerConfig = new ForagerConfig(); public Long getRandomSeed() { return randomSeed; } public void setRandomSeed(Long randomSeed) { this.randomSeed = randomSeed; } public List<String> getScoreDrlList() { return scoreDrlList; } public void setScoreDrlList(List<String> scoreDrlList) { this.scoreDrlList = scoreDrlList; } public ScoreCalculatorConfig getScoreCalculatorConfig() { return scoreCalculatorConfig; } public void setScoreCalculatorConfig(ScoreCalculatorConfig scoreCalculatorConfig) { this.scoreCalculatorConfig = scoreCalculatorConfig; } public FinishConfig getFinishConfig() { return finishConfig; } public void setFinishConfig(FinishConfig finishConfig) { this.finishConfig = finishConfig; } public SelectorConfig getSelectorConfig() { return selectorConfig; } public void setSelectorConfig(SelectorConfig selectorConfig) { this.selectorConfig = selectorConfig; } public AccepterConfig getAccepterConfig() { return accepterConfig; } public void setAccepterConfig(AccepterConfig accepterConfig) { this.accepterConfig = accepterConfig; } public ForagerConfig getForagerConfig() { return foragerConfig; } public void setForagerConfig(ForagerConfig foragerConfig) { this.foragerConfig = foragerConfig; } // ************************************************************************ // Builder methods // ************************************************************************ public LocalSearchSolver buildSolver() { DefaultLocalSearchSolver localSearchSolver = new DefaultLocalSearchSolver(); if (randomSeed != null) { localSearchSolver.setRandomSeed(randomSeed); } else { localSearchSolver.setRandomSeed(0L); } localSearchSolver.setEvaluationHandler(buildEvaluationHandler()); localSearchSolver.setBestSolutionRecaller(new BestSolutionRecaller()); localSearchSolver.setFinish(finishConfig.buildFinish()); localSearchSolver.setDecider(buildDecider()); return localSearchSolver; } protected EvaluationHandler buildEvaluationHandler() { EvaluationHandler evaluationHandler = new EvaluationHandler(); RuleBase ruleBase = buildRuleBase(); evaluationHandler.setRuleBase(ruleBase); evaluationHandler.setScoreCalculator(scoreCalculatorConfig.buildScoreCalculator()); return evaluationHandler; } private RuleBase buildRuleBase() { PackageBuilder packageBuilder = new PackageBuilder(); for (String scoreDrl : scoreDrlList) { InputStream scoreDrlIn = getClass().getResourceAsStream(scoreDrl); if (scoreDrlIn == null) { throw new IllegalArgumentException("scoreDrl (" + scoreDrl + ") does not exist as a classpath resource."); } try { packageBuilder.addPackageFromDrl(new InputStreamReader(scoreDrlIn, "utf-8")); } catch (DroolsParserException e) { throw new IllegalArgumentException("scoreDrl (" + scoreDrl + ") could not be loaded.", e); } catch (IOException e) { throw new IllegalArgumentException("scoreDrl (" + scoreDrl + ") could not be loaded.", e); } finally { IOUtils.closeQuietly(scoreDrlIn); } } RuleBase ruleBase = RuleBaseFactory.newRuleBase(); try { ruleBase.addPackage(packageBuilder.getPackage()); } catch (Exception e) { // TODO remove me if removed in drools 4.0.1 throw new IllegalArgumentException("scoreDrlList (" + scoreDrlList + ") could not be loaded.", e); } return ruleBase; } private Decider buildDecider() { DefaultDecider decider = new DefaultDecider(); decider.setMoveFactory(selectorConfig.buildMoveFactory()); decider.setAccepter(accepterConfig.buildAccepter()); decider.setForager(foragerConfig.buildForager()); return decider; } public void inherit(LocalSearchSolverConfig inheritedConfig) { if (randomSeed == null) { randomSeed = inheritedConfig.getRandomSeed(); } if (scoreDrlList == null) { scoreDrlList = inheritedConfig.getScoreDrlList(); } else { List<String> inheritedScoreDrlList = inheritedConfig.getScoreDrlList(); if (inheritedScoreDrlList != null) { for (String inheritedScoreDrl : inheritedScoreDrlList) { if (!scoreDrlList.contains(inheritedScoreDrl)) { scoreDrlList.add(inheritedScoreDrl); } } } } if (scoreCalculatorConfig == null) { scoreCalculatorConfig = inheritedConfig.getScoreCalculatorConfig(); } else if (inheritedConfig.getScoreCalculatorConfig() != null) { scoreCalculatorConfig.inherit(inheritedConfig.getScoreCalculatorConfig()); } if (finishConfig == null) { finishConfig = inheritedConfig.getFinishConfig(); } else if (inheritedConfig.getFinishConfig() != null) { finishConfig.inherit(inheritedConfig.getFinishConfig()); } if (selectorConfig == null) { selectorConfig = inheritedConfig.getSelectorConfig(); } else if (inheritedConfig.getSelectorConfig() != null) { selectorConfig.inherit(inheritedConfig.getSelectorConfig()); } if (accepterConfig == null) { accepterConfig = inheritedConfig.getAccepterConfig(); } else if (inheritedConfig.getAccepterConfig() != null) { accepterConfig.inherit(inheritedConfig.getAccepterConfig()); } if (foragerConfig == null) { foragerConfig = inheritedConfig.getForagerConfig(); } else if (inheritedConfig.getForagerConfig() != null) { foragerConfig.inherit(inheritedConfig.getForagerConfig()); } } }
millken/kwan
src/filter/const.go
<reponame>millken/kwan<gh_stars>0 package filter const ( STATUS_DDOS = 2 << 0 //DDOS模式 STATUS_BLACKLIST = 2 << 1 //存在于黑名单 STATUS_WHITELIST = 2 << 2 //存在于白名单 STATUS_CACHE_HITS = 2 << 3 //缓存命中 )
openmcac/basechurch
db/migrate/20141107024218_add_group_id_to_bulletins.rb
class AddGroupIdToBulletins < ActiveRecord::Migration def change add_column :basechurch_bulletins, :group_id, :integer end end
imranpopz/android_bootable_recovery-1
tests/unit/commands_test.cpp
/* * Copyright (C) 2018 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include <algorithm> #include <string> #include <android-base/strings.h> #include <gtest/gtest.h> #include <openssl/sha.h> #include "otautil/print_sha1.h" #include "otautil/rangeset.h" #include "private/commands.h" TEST(CommandsTest, ParseType) { ASSERT_EQ(Command::Type::ZERO, Command::ParseType("zero")); ASSERT_EQ(Command::Type::NEW, Command::ParseType("new")); ASSERT_EQ(Command::Type::ERASE, Command::ParseType("erase")); ASSERT_EQ(Command::Type::MOVE, Command::ParseType("move")); ASSERT_EQ(Command::Type::BSDIFF, Command::ParseType("bsdiff")); ASSERT_EQ(Command::Type::IMGDIFF, Command::ParseType("imgdiff")); ASSERT_EQ(Command::Type::STASH, Command::ParseType("stash")); ASSERT_EQ(Command::Type::FREE, Command::ParseType("free")); ASSERT_EQ(Command::Type::COMPUTE_HASH_TREE, Command::ParseType("compute_hash_tree")); } TEST(CommandsTest, ParseType_InvalidCommand) { ASSERT_EQ(Command::Type::LAST, Command::ParseType("foo")); ASSERT_EQ(Command::Type::LAST, Command::ParseType("bar")); } TEST(CommandsTest, ParseTargetInfoAndSourceInfo_SourceBlocksOnly) { const std::vector<std::string> tokens{ "4,569884,569904,591946,592043", "117", "4,566779,566799,591946,592043", }; TargetInfo target; SourceInfo source; std::string err; ASSERT_TRUE(Command::ParseTargetInfoAndSourceInfo( tokens, "<KEY>", &target, "1d74d1a60332fd38cf9405f1bae67917888da6cb", &source, &err)); ASSERT_EQ(TargetInfo("1d74d1a60332fd38cf9405f1bae67917888da6cb", RangeSet({ { 569884, 569904 }, { 591946, 592043 } })), target); ASSERT_EQ(SourceInfo("1d74d1a60332fd38cf9405f1bae67917888da6cb", RangeSet({ { 566779, 566799 }, { 591946, 592043 } }), {}, {}), source); ASSERT_EQ(117, source.blocks()); } TEST(CommandsTest, ParseTargetInfoAndSourceInfo_StashesOnly) { const std::vector<std::string> tokens{ "2,350729,350731", "2", "-", "6ebcf8cf1f6be0bc49e7d4a864214251925d1d15:2,0,2", }; TargetInfo target; SourceInfo source; std::string err; ASSERT_TRUE(Command::ParseTargetInfoAndSourceInfo( tokens, "6ebcf8cf1f6be0bc49e7d4a864214251925d1d15", &target, "1c25ba04d3278d6b65a1b9f17abac78425ec8b8d", &source, &err)); ASSERT_EQ( TargetInfo("6ebcf8cf1f6be0bc49e7d4a864214251925d1d15", RangeSet({ { 350729, 350731 } })), target); ASSERT_EQ( SourceInfo("1c25ba04d3278d6b65a1b9f17abac78425ec8b8d", {}, {}, { StashInfo("6ebcf8cf1f6be0bc49e7d4a864214251925d1d15", RangeSet({ { 0, 2 } })), }), source); ASSERT_EQ(2, source.blocks()); } TEST(CommandsTest, ParseTargetInfoAndSourceInfo_SourceBlocksAndStashes) { const std::vector<std::string> tokens{ "4,611641,611643,636981,637075", "96", "4,636981,637075,770665,770666", "4,0,94,95,96", "9eedf00d11061549e32503cadf054ec6fbfa7a23:2,94,95", }; TargetInfo target; SourceInfo source; std::string err; ASSERT_TRUE(Command::ParseTargetInfoAndSourceInfo( tokens, "4734d1b241eb3d0f993714aaf7d665fae43772b6", &target, "a6cbdf3f416960f02189d3a814ec7e9e95c44a0d", &source, &err)); ASSERT_EQ(TargetInfo("4734d1b241eb3d0f993714aaf7d665fae43772b6", RangeSet({ { 611641, 611643 }, { 636981, 637075 } })), target); ASSERT_EQ(SourceInfo( "a6cbdf3f416960f02189d3a814ec7e9e95c44a0d", RangeSet({ { 636981, 637075 }, { 770665, 770666 } }), // source ranges RangeSet({ { 0, 94 }, { 95, 96 } }), // source location { StashInfo("9eedf00d11061549e32503cadf054ec6fbfa7a23", RangeSet({ { 94, 95 } })), }), source); ASSERT_EQ(96, source.blocks()); } TEST(CommandsTest, ParseTargetInfoAndSourceInfo_InvalidInput) { const std::vector<std::string> tokens{ "4,611641,611643,636981,637075", "96", "4,636981,637075,770665,770666", "4,0,94,95,96", "9eedf00d11061549e32503cadf054ec6fbfa7a23:2,94,95", }; TargetInfo target; SourceInfo source; std::string err; // Mismatching block count. { std::vector<std::string> tokens_copy(tokens); tokens_copy[1] = "97"; ASSERT_FALSE(Command::ParseTargetInfoAndSourceInfo( tokens_copy, "1d74d1a60332fd38cf9405f1bae67917888da6cb", &target, "1d74d1a60332fd38cf9405f1bae67917888da6cb", &source, &err)); } // Excess stashes (causing block count mismatch). { std::vector<std::string> tokens_copy(tokens); tokens_copy.push_back("e145a2f83a33334714ac65e34969c1f115e54a6f:2,0,22"); ASSERT_FALSE(Command::ParseTargetInfoAndSourceInfo( tokens_copy, "1d74d1a60332fd38cf9405f1bae67917888da6cb", &target, "1d74d1a60332fd38cf9405f1bae67917888da6cb", &source, &err)); } // Invalid args. for (size_t i = 0; i < tokens.size(); i++) { TargetInfo target; SourceInfo source; std::string err; ASSERT_FALSE(Command::ParseTargetInfoAndSourceInfo( std::vector<std::string>(tokens.cbegin() + i + 1, tokens.cend()), "1d74d1a60332fd38cf9405f1bae67917888da6cb", &target, "1d74d1a60332fd38cf9405f1bae67917888da6cb", &source, &err)); } } TEST(CommandsTest, Parse_EmptyInput) { std::string err; ASSERT_FALSE(Command::Parse("", 0, &err)); ASSERT_EQ("invalid type", err); } TEST(CommandsTest, Parse_ABORT_Allowed) { Command::abort_allowed_ = true; const std::string input{ "abort" }; std::string err; Command command = Command::Parse(input, 0, &err); ASSERT_TRUE(command); ASSERT_EQ(TargetInfo(), command.target()); ASSERT_EQ(SourceInfo(), command.source()); ASSERT_EQ(StashInfo(), command.stash()); ASSERT_EQ(PatchInfo(), command.patch()); } TEST(CommandsTest, Parse_ABORT_NotAllowed) { const std::string input{ "abort" }; std::string err; Command command = Command::Parse(input, 0, &err); ASSERT_FALSE(command); } TEST(CommandsTest, Parse_BSDIFF) { const std::string input{ "bsdiff 0 148 " "f201a4e04bd3860da6ad47b957ef424d58a58f8c 9d5d223b4bc5c45dbd25a799c4f1a98466731599 " "4,565704,565752,566779,566799 " "68 4,64525,64545,565704,565752" }; std::string err; Command command = Command::Parse(input, 1, &err); ASSERT_TRUE(command); ASSERT_EQ(Command::Type::BSDIFF, command.type()); ASSERT_EQ(1, command.index()); ASSERT_EQ(input, command.cmdline()); ASSERT_EQ(TargetInfo("9d5d223b4bc5c45dbd25a799c4f1a98466731599", RangeSet({ { 565704, 565752 }, { 566779, 566799 } })), command.target()); ASSERT_EQ(SourceInfo("f201a4e04bd3860da6ad47b957ef424d58a58f8c", RangeSet({ { 64525, 64545 }, { 565704, 565752 } }), RangeSet(), {}), command.source()); ASSERT_EQ(StashInfo(), command.stash()); ASSERT_EQ(PatchInfo(0, 148), command.patch()); } TEST(CommandsTest, Parse_ERASE) { const std::string input{ "erase 2,5,10" }; std::string err; Command command = Command::Parse(input, 2, &err); ASSERT_TRUE(command); ASSERT_EQ(Command::Type::ERASE, command.type()); ASSERT_EQ(2, command.index()); ASSERT_EQ(input, command.cmdline()); ASSERT_EQ(TargetInfo("unknown-hash", RangeSet({ { 5, 10 } })), command.target()); ASSERT_EQ(SourceInfo(), command.source()); ASSERT_EQ(StashInfo(), command.stash()); ASSERT_EQ(PatchInfo(), command.patch()); } TEST(CommandsTest, Parse_FREE) { const std::string input{ "free hash1" }; std::string err; Command command = Command::Parse(input, 3, &err); ASSERT_TRUE(command); ASSERT_EQ(Command::Type::FREE, command.type()); ASSERT_EQ(3, command.index()); ASSERT_EQ(input, command.cmdline()); ASSERT_EQ(TargetInfo(), command.target()); ASSERT_EQ(SourceInfo(), command.source()); ASSERT_EQ(StashInfo("hash1", RangeSet()), command.stash()); ASSERT_EQ(PatchInfo(), command.patch()); } TEST(CommandsTest, Parse_IMGDIFF) { const std::string input{ "imgdiff 29629269 185 " "a6b1c49aed1b57a2aab1ec3e1505b945540cd8db 51978f65035f584a8ef7afa941dacb6d5e862164 " "2,90851,90852 " "1 2,90851,90852" }; std::string err; Command command = Command::Parse(input, 4, &err); ASSERT_TRUE(command); ASSERT_EQ(Command::Type::IMGDIFF, command.type()); ASSERT_EQ(4, command.index()); ASSERT_EQ(input, command.cmdline()); ASSERT_EQ(TargetInfo("51978f65035f584a8ef7afa941dacb6d5e862164", RangeSet({ { 90851, 90852 } })), command.target()); ASSERT_EQ(SourceInfo("a6b1c49aed1b57a2aab1ec3e1505b945540cd8db", RangeSet({ { 90851, 90852 } }), RangeSet(), {}), command.source()); ASSERT_EQ(StashInfo(), command.stash()); ASSERT_EQ(PatchInfo(29629269, 185), command.patch()); } TEST(CommandsTest, Parse_MOVE) { const std::string input{ "move 1d74d1a60332fd38cf9405f1bae67917888da6cb " "4,569884,569904,591946,592043 117 4,566779,566799,591946,592043" }; std::string err; Command command = Command::Parse(input, 5, &err); ASSERT_TRUE(command); ASSERT_EQ(Command::Type::MOVE, command.type()); ASSERT_EQ(5, command.index()); ASSERT_EQ(input, command.cmdline()); ASSERT_EQ(TargetInfo("1d74d1a60332fd38cf9405f1bae67917888da6cb", RangeSet({ { 569884, 569904 }, { 591946, 592043 } })), command.target()); ASSERT_EQ(SourceInfo("1d74d1a60332fd38cf9405f1bae67917888da6cb", RangeSet({ { 566779, 566799 }, { 591946, 592043 } }), RangeSet(), {}), command.source()); ASSERT_EQ(StashInfo(), command.stash()); ASSERT_EQ(PatchInfo(), command.patch()); } TEST(CommandsTest, Parse_NEW) { const std::string input{ "new 4,3,5,10,12" }; std::string err; Command command = Command::Parse(input, 6, &err); ASSERT_TRUE(command); ASSERT_EQ(Command::Type::NEW, command.type()); ASSERT_EQ(6, command.index()); ASSERT_EQ(input, command.cmdline()); ASSERT_EQ(TargetInfo("unknown-hash", RangeSet({ { 3, 5 }, { 10, 12 } })), command.target()); ASSERT_EQ(SourceInfo(), command.source()); ASSERT_EQ(StashInfo(), command.stash()); ASSERT_EQ(PatchInfo(), command.patch()); } TEST(CommandsTest, Parse_STASH) { const std::string input{ "stash hash1 2,5,10" }; std::string err; Command command = Command::Parse(input, 7, &err); ASSERT_TRUE(command); ASSERT_EQ(Command::Type::STASH, command.type()); ASSERT_EQ(7, command.index()); ASSERT_EQ(input, command.cmdline()); ASSERT_EQ(TargetInfo(), command.target()); ASSERT_EQ(SourceInfo(), command.source()); ASSERT_EQ(StashInfo("hash1", RangeSet({ { 5, 10 } })), command.stash()); ASSERT_EQ(PatchInfo(), command.patch()); } TEST(CommandsTest, Parse_ZERO) { const std::string input{ "zero 2,1,5" }; std::string err; Command command = Command::Parse(input, 8, &err); ASSERT_TRUE(command); ASSERT_EQ(Command::Type::ZERO, command.type()); ASSERT_EQ(8, command.index()); ASSERT_EQ(input, command.cmdline()); ASSERT_EQ(TargetInfo("unknown-hash", RangeSet({ { 1, 5 } })), command.target()); ASSERT_EQ(SourceInfo(), command.source()); ASSERT_EQ(StashInfo(), command.stash()); ASSERT_EQ(PatchInfo(), command.patch()); } TEST(CommandsTest, Parse_COMPUTE_HASH_TREE) { const std::string input{ "compute_hash_tree 2,0,1 2,3,4 sha1 unknown-salt unknown-root-hash" }; std::string err; Command command = Command::Parse(input, 9, &err); ASSERT_TRUE(command); ASSERT_EQ(Command::Type::COMPUTE_HASH_TREE, command.type()); ASSERT_EQ(9, command.index()); ASSERT_EQ(input, command.cmdline()); HashTreeInfo expected_info(RangeSet({ { 0, 1 } }), RangeSet({ { 3, 4 } }), "sha1", "unknown-salt", "unknown-root-hash"); ASSERT_EQ(expected_info, command.hash_tree_info()); ASSERT_EQ(TargetInfo(), command.target()); ASSERT_EQ(SourceInfo(), command.source()); ASSERT_EQ(StashInfo(), command.stash()); ASSERT_EQ(PatchInfo(), command.patch()); } TEST(CommandsTest, Parse_InvalidNumberOfArgs) { Command::abort_allowed_ = true; // Note that the case of having excess args in BSDIFF, IMGDIFF and MOVE is covered by // ParseTargetInfoAndSourceInfo_InvalidInput. std::vector<std::string> inputs{ "abort foo", "bsdiff", "compute_hash_tree, 2,0,1 2,0,1 unknown-algorithm unknown-salt", "erase", "erase 4,3,5,10,12 hash1", "free", "free id1 id2", "imgdiff", "move", "new", "new 4,3,5,10,12 hash1", "stash", "stash id1", "stash id1 4,3,5,10,12 id2", "zero", "zero 4,3,5,10,12 hash2", }; for (const auto& input : inputs) { std::string err; ASSERT_FALSE(Command::Parse(input, 0, &err)); } } TEST(SourceInfoTest, Overlaps) { ASSERT_TRUE(SourceInfo("1d74d1a60332fd38cf9405f1bae67917888da6cb", RangeSet({ { 7, 9 }, { 16, 20 } }), {}, {}) .Overlaps(TargetInfo("1d74d1a60332fd38cf9405f1bae67917888da6cb", RangeSet({ { 7, 9 }, { 16, 20 } })))); ASSERT_TRUE(SourceInfo("1d74d1a60332fd38cf9405f1bae67917888da6cb", RangeSet({ { 7, 9 }, { 16, 20 } }), {}, {}) .Overlaps(TargetInfo("1d74d1a60332fd38cf9405f1bae67917888da6cb", RangeSet({ { 4, 7 }, { 16, 23 } })))); ASSERT_FALSE(SourceInfo("1d74d1a60332fd38cf9405f1bae67917888da6cb", RangeSet({ { 7, 9 }, { 16, 20 } }), {}, {}) .Overlaps(TargetInfo("1d74d1a60332fd38cf9405f1bae67917888da6cb", RangeSet({ { 9, 16 } })))); } TEST(SourceInfoTest, Overlaps_EmptySourceOrTarget) { ASSERT_FALSE(SourceInfo().Overlaps(TargetInfo())); ASSERT_FALSE(SourceInfo().Overlaps( TargetInfo("1d74d1a60332fd38cf9405f1bae67917888da6cb", RangeSet({ { 7, 9 }, { 16, 20 } })))); ASSERT_FALSE(SourceInfo("1d74d1a60332fd38cf9405f1bae67917888da6cb", RangeSet({ { 7, 9 }, { 16, 20 } }), {}, {}) .Overlaps(TargetInfo())); } TEST(SourceInfoTest, Overlaps_WithStashes) { ASSERT_FALSE(SourceInfo("a6cbdf3f416960f02189d3a814ec7e9e95c44a0d", RangeSet({ { 81, 175 }, { 265, 266 } }), // source ranges RangeSet({ { 0, 94 }, { 95, 96 } }), // source location { StashInfo("9eedf00d11061549e32503cadf054ec6fbfa7a23", RangeSet({ { 94, 95 } })) }) .Overlaps(TargetInfo("1d74d1a60332fd38cf9405f1bae67917888da6cb", RangeSet({ { 175, 265 } })))); ASSERT_TRUE(SourceInfo("a6cbdf3f416960f02189d3a814ec7e9e95c44a0d", RangeSet({ { 81, 175 }, { 265, 266 } }), // source ranges RangeSet({ { 0, 94 }, { 95, 96 } }), // source location { StashInfo("9eedf00d11061549e32503cadf054ec6fbfa7a23", RangeSet({ { 94, 95 } })) }) .Overlaps(TargetInfo("1d74d1a60332fd38cf9405f1bae67917888da6cb", RangeSet({ { 265, 266 } })))); } // The block size should be specified by the caller of ReadAll (i.e. from Command instance during // normal run). constexpr size_t kBlockSize = 4096; TEST(SourceInfoTest, ReadAll) { // "2727756cfee3fbfe24bf5650123fd7743d7b3465" is the SHA-1 hex digest of 8192 * 'a'. const SourceInfo source("2727756cfee3fbfe24bf5650123fd7743d7b3465", RangeSet({ { 0, 2 } }), {}, {}); auto block_reader = [](const RangeSet& src, std::vector<uint8_t>* block_buffer) -> int { std::fill_n(block_buffer->begin(), src.blocks() * kBlockSize, 'a'); return 0; }; auto stash_reader = [](const std::string&, std::vector<uint8_t>*) -> int { return 0; }; std::vector<uint8_t> buffer(source.blocks() * kBlockSize); ASSERT_TRUE(source.ReadAll(&buffer, kBlockSize, block_reader, stash_reader)); ASSERT_EQ(source.blocks() * kBlockSize, buffer.size()); uint8_t digest[SHA_DIGEST_LENGTH]; SHA1(buffer.data(), buffer.size(), digest); ASSERT_EQ(source.hash(), print_sha1(digest)); } TEST(SourceInfoTest, ReadAll_WithStashes) { const SourceInfo source( // SHA-1 hex digest of 8192 * 'a' + 4096 * 'b'. "ee3ebea26130769c10ad13604712100346d48660", RangeSet({ { 0, 2 } }), RangeSet({ { 0, 2 } }), { StashInfo("1e41f7a59e80c6eb4dc043caae80d273f130bed8", RangeSet({ { 2, 3 } })) }); auto block_reader = [](const RangeSet& src, std::vector<uint8_t>* block_buffer) -> int { std::fill_n(block_buffer->begin(), src.blocks() * kBlockSize, 'a'); return 0; }; auto stash_reader = [](const std::string&, std::vector<uint8_t>* stash_buffer) -> int { std::fill_n(stash_buffer->begin(), kBlockSize, 'b'); return 0; }; std::vector<uint8_t> buffer(source.blocks() * kBlockSize); ASSERT_TRUE(source.ReadAll(&buffer, kBlockSize, block_reader, stash_reader)); ASSERT_EQ(source.blocks() * kBlockSize, buffer.size()); uint8_t digest[SHA_DIGEST_LENGTH]; SHA1(buffer.data(), buffer.size(), digest); ASSERT_EQ(source.hash(), print_sha1(digest)); } TEST(SourceInfoTest, ReadAll_BufferTooSmall) { const SourceInfo source("2727756cfee3fbfe24bf5650123fd7743d7b3465", RangeSet({ { 0, 2 } }), {}, {}); auto block_reader = [](const RangeSet&, std::vector<uint8_t>*) -> int { return 0; }; auto stash_reader = [](const std::string&, std::vector<uint8_t>*) -> int { return 0; }; std::vector<uint8_t> buffer(source.blocks() * kBlockSize - 1); ASSERT_FALSE(source.ReadAll(&buffer, kBlockSize, block_reader, stash_reader)); } TEST(SourceInfoTest, ReadAll_FailingReader) { const SourceInfo source( "ee3ebea26130769c10ad13604712100346d48660", RangeSet({ { 0, 2 } }), RangeSet({ { 0, 2 } }), { StashInfo("1e41f7a59e80c6eb4dc043caae80d273f130bed8", RangeSet({ { 2, 3 } })) }); std::vector<uint8_t> buffer(source.blocks() * kBlockSize); auto failing_block_reader = [](const RangeSet&, std::vector<uint8_t>*) -> int { return -1; }; auto stash_reader = [](const std::string&, std::vector<uint8_t>*) -> int { return 0; }; ASSERT_FALSE(source.ReadAll(&buffer, kBlockSize, failing_block_reader, stash_reader)); auto block_reader = [](const RangeSet&, std::vector<uint8_t>*) -> int { return 0; }; auto failing_stash_reader = [](const std::string&, std::vector<uint8_t>*) -> int { return -1; }; ASSERT_FALSE(source.ReadAll(&buffer, kBlockSize, block_reader, failing_stash_reader)); } TEST(TransferListTest, Parse) { std::vector<std::string> input_lines{ "4", // version "2", // total blocks "1", // max stashed entries "1", // max stashed blocks "stash 1d74d1a60332fd38cf9405f1bae67917888da6cb 2,0,1", "move 1d74d1a60332fd38cf9405f1bae67917888da6cb 2,0,1 1 2,0,1", }; std::string err; TransferList transfer_list = TransferList::Parse(android::base::Join(input_lines, '\n'), &err); ASSERT_TRUE(static_cast<bool>(transfer_list)); ASSERT_EQ(4, transfer_list.version()); ASSERT_EQ(2, transfer_list.total_blocks()); ASSERT_EQ(1, transfer_list.stash_max_entries()); ASSERT_EQ(1, transfer_list.stash_max_blocks()); ASSERT_EQ(2U, transfer_list.commands().size()); ASSERT_EQ(Command::Type::STASH, transfer_list.commands()[0].type()); ASSERT_EQ(Command::Type::MOVE, transfer_list.commands()[1].type()); } TEST(TransferListTest, Parse_InvalidCommand) { std::vector<std::string> input_lines{ "4", // version "2", // total blocks "1", // max stashed entries "1", // max stashed blocks "stash 1d74d1a60332fd38cf9405f1bae67917888da6cb 2,0,1", "move 1d74d1a60332fd38cf9405f1bae67917888da6cb 2,0,1 1", }; std::string err; TransferList transfer_list = TransferList::Parse(android::base::Join(input_lines, '\n'), &err); ASSERT_FALSE(static_cast<bool>(transfer_list)); } TEST(TransferListTest, Parse_ZeroTotalBlocks) { std::vector<std::string> input_lines{ "4", // version "0", // total blocks "0", // max stashed entries "0", // max stashed blocks }; std::string err; TransferList transfer_list = TransferList::Parse(android::base::Join(input_lines, '\n'), &err); ASSERT_TRUE(static_cast<bool>(transfer_list)); ASSERT_EQ(4, transfer_list.version()); ASSERT_EQ(0, transfer_list.total_blocks()); ASSERT_EQ(0, transfer_list.stash_max_entries()); ASSERT_EQ(0, transfer_list.stash_max_blocks()); ASSERT_TRUE(transfer_list.commands().empty()); }
dimitar9111/JavaScript-Core
JS Basics Exercises/08. Print Numbers in Reversed Order.js
<filename>JS Basics Exercises/08. Print Numbers in Reversed Order.js function printReversed(args) { let numbers = args.map(Number); for (let i = numbers.length - 1; i >= 0; i--){ console.log(numbers[i]); } }
adehtiarov/intellij-community
platform/platform-impl/src/com/intellij/internal/statistic/collectors/legacy/plugins/LegacyNonBundledPluginsUsagesCollector.java
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.internal.statistic.collectors.legacy.plugins; import com.intellij.ide.plugins.IdeaPluginDescriptor; import com.intellij.ide.plugins.PluginManagerCore; import com.intellij.internal.statistic.UsagesCollector; import com.intellij.internal.statistic.beans.GroupDescriptor; import com.intellij.internal.statistic.beans.UsageDescriptor; import com.intellij.util.containers.ContainerUtil; import org.jetbrains.annotations.NotNull; import java.util.List; import java.util.Set; @Deprecated // to be removed in 2018.2 public class LegacyNonBundledPluginsUsagesCollector extends UsagesCollector { private static final String GROUP_ID = "non-bundled-plugins"; @NotNull public GroupDescriptor getGroupId() { return GroupDescriptor.create(GROUP_ID, GroupDescriptor.HIGHER_PRIORITY); } @NotNull public Set<UsageDescriptor> getUsages() { final IdeaPluginDescriptor[] plugins = PluginManagerCore.getPlugins(); final List<IdeaPluginDescriptor> nonBundledEnabledPlugins = ContainerUtil.filter(plugins, d -> d.isEnabled() && !d.isBundled() && d.getPluginId() != null); return ContainerUtil.map2Set(nonBundledEnabledPlugins, descriptor -> new UsageDescriptor(descriptor.getPluginId().getIdString(), 1)); } }
FateRevoked/mage
Mage.Sets/src/mage/cards/m/MaelstromBlockade.java
package mage.cards.m; import java.util.UUID; import mage.abilities.effects.common.ExileTargetEffect; import mage.cards.CardImpl; import mage.cards.CardSetInfo; import mage.constants.CardType; import mage.target.common.TargetAttackingCreature; /** * * @author NinthWorld */ public final class MaelstromBlockade extends CardImpl { public MaelstromBlockade(UUID ownerId, CardSetInfo setInfo) { super(ownerId, setInfo, new CardType[]{CardType.INSTANT}, "{2}{W/B}"); // Exile target attacking creature. this.getSpellAbility().addEffect(new ExileTargetEffect()); this.getSpellAbility().addTarget(new TargetAttackingCreature()); } public MaelstromBlockade(final MaelstromBlockade card) { super(card); } @Override public MaelstromBlockade copy() { return new MaelstromBlockade(this); } }
acrucetta/Chicago_COVI_WebApp
env/lib/python3.8/site-packages/plotly/graph_objs/splom/dimension/__init__.py
<filename>env/lib/python3.8/site-packages/plotly/graph_objs/splom/dimension/__init__.py<gh_stars>1000+ import sys if sys.version_info < (3, 7): from ._axis import Axis else: from _plotly_utils.importers import relative_import __all__, __getattr__, __dir__ = relative_import(__name__, [], ["._axis.Axis"])
kdonovan/searchbot
lib/searchbot/sources/acquisitions_direct/listings_page.rb
<reponame>kdonovan/searchbot class Searchbot::Sources::AcquisitionsDirect::ListingsPage < Searchbot::Generic::ListingsPage def raw_listings doc.css(".portfolio-item:not(.sold):not(.under-loi)").select do |l| !l.at('h2').text.include?('(Coming Soon)') end end def more_pages_available? false end end
zbx911/CHRLINE
CHRLINE/poll.py
<gh_stars>0 # -*- coding: utf-8 -*- import os, sys, threading, time, traceback class Poll(object): def __init__(self): pass def __fetchOps(self, count=100): fetchOps = self.fetchOps if self.DEVICE_TYPE in self.SYNC_SUPPORT: fetchOps = self.sync ops = fetchOps(self.revision, count) if 'error' in ops: raise Exception(ops['error']) for op in ops: opType = self.checkAndGetValue(op, 'type', 3) if opType != -1: self.setRevision(self.checkAndGetValue(op, 'revision', 1)) yield op def __execute(self, op, func): try: func(op, self) except Exception as e: self.log(traceback.format_exc()) def setRevision(self, revision): if revision is None: self.log(f'revision is None!!') revision = 0 self.revision = max(revision, self.revision) def trace(self, func, isThreading=True): while self.is_login: for op in self.__fetchOps(): opType = self.checkAndGetValue(op, 'type', 'val_3', 3) if opType != 0 and opType != -1: if isThreading: _td = threading.Thread(target=self.__execute, args=(op, func)) _td.daemon = True _td.start() else: self.__execute(op, func)
nanaze/error-prone
core/src/test/java/com/google/errorprone/bugpatterns/ClassNamedLikeTypeParameterTest.java
/* * Copyright 2018 The Error Prone Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.errorprone.bugpatterns; import com.google.errorprone.CompilationTestHelper; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** Tests for {@link ClassNamedLikeTypeParameter} */ @RunWith(JUnit4.class) public class ClassNamedLikeTypeParameterTest { private final CompilationTestHelper compilationHelper = CompilationTestHelper.newInstance(ClassNamedLikeTypeParameter.class, getClass()); @Test public void positiveCases() { compilationHelper .addSourceLines( "Test.java", "public class Test {", " // BUG: Diagnostic contains: ", " static class A {}", " // BUG: Diagnostic contains: ", " static class B2 {}", " // BUG: Diagnostic contains: ", " static class FooT {}", " // BUG: Diagnostic contains: ", " static class X {}", "}") .doTest(); } @Test public void negativeCases() { compilationHelper .addSourceLines( "Test.java", "public class Test {", " static class CAPITALT {}", " static class MyClass {}", " static class FooBar {}", " static class HasGeneric<X> {", " public <T> void genericMethod(X foo, T bar) {}", " }", "}") .doTest(); } }
davidlrichmond/macports-ports
net/airctl/files/patch-airctl.c
<filename>net/airctl/files/patch-airctl.c<gh_stars>1000+ --- airctl.c Tue Jun 3 03:13:35 2003 +++ airctl.c.new Tue Apr 6 20:54:40 2004 @@ -28,7 +28,6 @@ #include <sys/time.h> /* select */ #include <sys/types.h> /* select */ -#include <sys/select.h> /* select */ #include <sys/socket.h> #include <netinet/in.h>
Ludaxord/ConceptEngine
ConceptEngine/ConceptEngine/Concept-Engine/Core/Platform/Windows/Cursor/CEWindowsCursor.h
#pragma once #include "../../Generic/Cursor/CECursor.h" #include <Windows.h> namespace ConceptEngine::Core::Platform::Windows::Cursor { class CEWindowsCursor final : public Generic::Cursor::CECursor { public: CEWindowsCursor(); ~CEWindowsCursor() override; bool Init(LPCWSTR cursorName); virtual void* GetNativeHandle() override { return reinterpret_cast<void*>(Cursor); } HCURSOR GetHandle() const { return Cursor; } static CECursor* Create(LPCWSTR cursorName); private: HCURSOR Cursor; LPCWSTR CursorName; }; }
ucsd-progsys/csolve-bak
tests/postests/glib-2.24.0/int-glist.c
#include <stdlib.h> #include <csolve.h> typedef struct _SortedList SortedList; typedef SortedList * NNSTART NNREF(PSIZE(12)) NNREF(V > 0) sortedlistptr; struct _SortedList { int FINAL d; sortedlistptr NNSTART NNREF((DEREF([V]): int) >= d) n; sortedlistptr NNSTART NNREF((DEREF([V]): int) <= d) p; }; extern void assert_sorted (SortedList *) OKEXTERN; #define _g_list_alloc0() malloc(sizeof (GList)) #define _g_list_alloc() malloc(sizeof (GList)) typedef struct _GList GList; struct _GList { int data; GList *next; GList *prev; }; GList* LOC(L) g_list_remove (GList * LOC(L) list, int data) { GList *tmp; tmp = list; while (tmp) { if (tmp->data != data) tmp = tmp->next; else { if (tmp->prev) tmp->prev->next = tmp->next; if (tmp->next) tmp->next->prev = tmp->prev; if (list == tmp) list = list->next; // pmr: wretched polymorphism? /* _g_list_free1 (tmp); */ break; } } return list; } GList * LOC(L) g_list_remove_all (GList * LOC(L) list, int data) { GList *tmp = list; while (tmp) { if (tmp->data != data) tmp = tmp->next; else { GList *next = tmp->next; if (tmp->prev) tmp->prev->next = next; else list = next; if (next) next->prev = tmp->prev; // pmr: ditto polymorphism /* _g_list_free1 (tmp); */ tmp = next; } } return list; } GList * LOC(L) g_list_remove_link (GList * LOC(L) list, GList * LOC(L) link) { if (link) { if (link->prev) link->prev->next = link->next; if (link->next) link->next->prev = link->prev; if (link == list) list = list->next; link->next = NULL; link->prev = NULL; } return list; } GList * LOC(L) g_list_nth (GList * LOC(L) list, int n) { while ((n-- > 0) && list) list = list->next; return list; } GList * LOC(L) g_list_nth_prev (GList * LOC(L) list, int n) { while ((n-- > 0) && list) list = list->prev; return list; } GList * LOC(L) g_list_insert_sorted (GList * LOC(L) list, int data) { GList *tmp_list = list; GList *new_list; int cmp; if (!list) { new_list = _g_list_alloc0 (); new_list->data = data; return new_list; } cmp = data > tmp_list->data; while ((tmp_list->next) && (cmp > 0)) { tmp_list = tmp_list->next; cmp = data > tmp_list->data; } new_list = _g_list_alloc0 (); new_list->data = data; if ((!tmp_list->next) && (cmp > 0)) { tmp_list->next = new_list; new_list->prev = tmp_list; return list; } if (tmp_list->prev) { // pmr: This whole block is annotation, for which I introduct the notation: // !{ // pmr: Fold workaround (can't assume about tmp_list->prev directly) GList *p = tmp_list->prev; int a = csolve_assume (p->data <= data); p->next = new_list; new_list->prev = p; // }! } new_list->next = tmp_list; tmp_list->prev = new_list; if (tmp_list == list) return new_list; else return list; } void main () { GList *head = NULL; while (1) { switch (nondet ()) { case 0: head = g_list_insert_sorted (head, nondet ()); break; case 1: head = g_list_nth (head, nondet ()); break; case 2: head = g_list_nth_prev (head, nondet ()); break; case 3: head = g_list_remove (head, nondet ()); break; case 4: head = g_list_remove_all (head, nondet ()); break; case 5: head = g_list_remove_link (head, g_list_nth (head, nondet ())); break; default: } if (head) { assert_sorted (head); } } }
kushaldas/sunder
src/components/Modal.js
<gh_stars>100-1000 import React, { Component } from 'react'; import PropTypes from 'prop-types'; import Panel from './Panel'; import './Modal.scss'; export default class Modal extends Component { static propTypes = { children: PropTypes.node, onClose: PropTypes.func } render() { return ( <div className="modal overlay" onClick={this.props.onClose}> <Panel title="" onClick={(event) => event.stopPropagation()}> {this.props.children} </Panel> </div> ); } }
Ifdevil/RPC
rpc_config/rpc_config_api/src/main/java/com/rpc/config/RegistryConfig.java
package com.rpc.config; import com.rpc.common.util.StringUtils; import com.rpc.config.supports.Parameter; /** * 注册中心配置 */ public class RegistryConfig { private String address; // ======================== constructor ======================== public RegistryConfig(){ } public RegistryConfig(String address){ setAddress(address); } // ==================== method =============================== @Parameter(excluded = true) public boolean isValid() { // empty protocol will default to 'dubbo' return !StringUtils.isEmpty(address); } // ======================getter and setter ======================= public String getAddress() { return address; } public void setAddress(String address) { this.address = address; } }
iris-dni/iris-frontend
src/reducers/images.js
import { assign } from 'lodash/object'; import { SUBMITTING_IMAGE, CREATED_IMAGE } from 'actions/actionTypes'; const initialState = { images: [] }; export default function images (state = initialState, action) { switch (action.type) { case SUBMITTING_IMAGE: return assign({}, state, { isLoading: true } ); case CREATED_IMAGE: const appendedImages = state.images.concat([ action.file.id ]); return assign({}, state, { images: appendedImages, isLoading: false } ); default: return state; } }
m1nuz/ironforge
ironforge/src/core/game.cpp
<gh_stars>0 #include <vector> #include <memory> #include <algorithm> #include <functional> #include <SDL2/SDL.h> #include <SDL2/SDL_ttf.h> #include <xargs.hpp> #include <core/journal.hpp> #include <core/json.hpp> #include <core/assets.hpp> #include <core/input.hpp> #include <core/game.hpp> #include <video/video.hpp> #include <renderer/renderer.hpp> #include <scene/scene.hpp> #include "game_detail.hpp" #include "config.h" namespace game { static auto process_events(instance_t &app) -> void { SDL_Event ev = {}; while (SDL_PollEvent(&ev)) { if (ev.type == SDL_KEYDOWN) if (ev.key.keysym.sym == SDLK_ESCAPE) app.running = false; // Exit when esc input::process_event(app, ev); //ui::process_event(in.uic, ev); scene::process_event(app.current_scene(), ev); } } static auto cleanup_all(instance_t &app) -> void { app.render.reset(nullptr); video::cleanup(app.vi); scene::cleanup_all(app.scenes); assets::cleanup(app.asset_instance); } static auto update(instance_t &app, const float dt) -> void { input::update(app); assets::process(app.asset_instance); scene::update(app.current_scene(), dt); video::process_resources(app.asset_instance, app.vi); } static auto present(instance_t &app, const float interpolation) -> void { using std::placeholders::_1; //ui::present(in.uic, std::bind(&renderer::instance::dispath, in.render.get(), _1)); //scene::present(current_scene(), inst.render, interpolation); scene::present(app.vi, app.current_scene(), app.render, interpolation); } auto quit() noexcept -> void { journal::critical(journal::_GAME, "%", "Unexpected exit"); exit(EXIT_SUCCESS); } static auto setup_locale(const char *locale) -> void { std::setlocale(LC_ALL, locale); std::locale::global(std::locale(locale)); } auto create(const int argc, const char* argv[], std::string_view conf_path, const bool fullpath_only) -> game_result { using namespace std; (void)argc; (void)argv; journal::info( journal::_GAME, "IRONFORGE Engine %", IRONFORGE_ENGINE_VERSION); // if releative and not fullpath_only add base_path const auto cpath = (conf_path.find("..") == string::npos) && !fullpath_only ? string{conf_path} : detail::get_base_path() + string{conf_path}; auto contents = assets::get_config(cpath); if (!contents) return make_error_code(errc::init_conf); auto j = json::parse(contents.value()); if (SDL_Init(SDL_INIT_EVERYTHING) < 0) return make_error_code(errc::init_platform); atexit(SDL_Quit); if (TTF_Init() < 0) return make_error_code(errc::init_platform); atexit(TTF_Quit); instance_t ctx; // Assets { auto asset_inst = assets::create_instance(assets::create_default_readers()); if (holds_alternative<error_code>(asset_inst)) { journal::error(journal::_GAME, "%", "Can't append readers"); return make_error_code(errc::init_assets); } ctx.asset_instance = move(get<assets::instance_t>(asset_inst)); if (j.find("assets") == j.end()) return make_error_code(errc::read_assets); for (auto &a : j["assets"]) { journal::debug(journal::_GAME, "%", a.get<string>()); if (!assets::open(ctx.asset_instance, detail::get_base_path() + a.get<string>())) return make_error_code(std::errc::io_error); } } // Video { if (j.find("video") == j.end()) return make_error_code(errc::init_video); const auto video_info = j["video"]; auto vc = video::init(ctx.asset_instance, video_info); if (!video::is_ok(vc)) return get<error_code>(vc); ctx.vi = get<video::instance_t>(vc); } // Scene { if (j.find("scenes") == j.end()) return make_error_code(errc::read_scenes); string start_scene; if (j.find("start_scene") != j.end()) { start_scene = j["start_scene"].get<string>(); } if (start_scene.empty()) return make_error_code(errc::no_start_scene); scene::reset_scripts_engine(); auto any_loaded = false; for (auto &sc : j["scenes"]) if (start_scene == sc.get<string>()) { auto res = scene::load(ctx.asset_instance, ctx.vi, start_scene); if (!scene::is_ok(res)) return get<error_code>(res); any_loaded = true; ctx.scenes.push_back(get<scene::instance_t>(res)); } scene::setup_bindings(ctx.current_scene()); if (!any_loaded) return make_error_code(std::errc::io_error); } // Input if (!input::init(ctx)) return make_error_code(errc::init_gamecontrollers); // Renderer { if (j.find("renderer") == j.end()) return make_error_code(std::errc::io_error); const auto video_info = j["video"]; const auto renderer_info = j["renderer"]; const auto renderer_type = renderer_info.find("type") != renderer_info.end() ? renderer_info["type"].get<string>() : "null"; const bool is_offscreen_rendering = renderer_info.find("to_texture") != renderer_info.end() ? renderer_info["to_texture"].get<bool>() : false; uint32_t renderer_falgs = 0; if (is_offscreen_rendering) renderer_falgs |= renderer::RENDER_TO_TEXTURE_BIT; ctx.render = renderer::create_renderer(renderer_type, ctx.vi, renderer_falgs); } // UI // { // const auto ui_info = j["ui"]; // const auto ui_type = ui_info.find("type") != ui_info.end() ? ui_info["type"].get<string>() : string{}; // if (ui_type == "immediate") { // const auto styles = detail::read_ui_styles(ctx.asset_instance, ui_info); // if (auto imui = imui::create(ctx.vi, styles); imui) // ctx.imui = imui.value(); // } // } // Locale setup_locale(""); return std::move(ctx); } auto launch(game::instance_t &app) -> int { using namespace std; /*auto loader = std::thread(assets::process_load, std::ref(app.asset_instance)); assets::get_text(app.asset_instance, "gamecontrollerdb.txt", [] (const std::optional<assets::text_data_t> res) { if (res) { journal::info(journal::_GAME, "%", res.value()); } });*/ app.current_time = 0ull; app.last_time = 0ull; app.timesteps = 0ull; app.delta_accumulator = 0.0f; while (app.running) { process_events(app); app.last_time = app.current_time; app.current_time = SDL_GetPerformanceCounter(); const auto freq = SDL_GetPerformanceFrequency(); const auto dt = static_cast<float>(static_cast<double>(app.current_time - app.last_time) / static_cast<double>(freq)); app.delta_accumulator += clamp(dt, 0.f, 0.2f); while (app.delta_accumulator >= timestep) { app.delta_accumulator -= timestep; update(app, timestep); app.timesteps++; } present(app, app.delta_accumulator / timestep); } cleanup_all(app); /*if (loader.joinable()) loader.join();*/ return EXIT_SUCCESS; } } // namespace game
MothOnMars/search-gov
db/migrate/20150804163342_delete_public_key_upload_notification_template.rb
class DeletePublicKeyUploadNotificationTemplate < ActiveRecord::Migration def up EmailTemplate.destroy_all(:name => 'public_key_upload_notification') end def down end end
cloudscheduler/cloudscheduler
cloudscheduler-core/src/main/java/io/github/cloudscheduler/service/JobServiceImpl.java
/* * Copyright (c) 2018. cloudscheduler * All rights reserved. * * Permission is hereby granted, free of charge, to any person obtaining * a copy of this software and associated documentation files (the * "Software"), to deal in the Software without restriction, including * without limitation the rights to use, copy, modify, merge, publish, * distribute, sublicense, and/or sell copies of the Software, and to * permit persons to whom the Software is furnished to do so, subject to * the following conditions: * * The above copyright notice and this permission notice shall be * included in all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ package io.github.cloudscheduler.service; import static java.util.stream.Collectors.collectingAndThen; import io.github.cloudscheduler.EventType; import io.github.cloudscheduler.JobException; import io.github.cloudscheduler.Node; import io.github.cloudscheduler.codec.EntityCodecProvider; import io.github.cloudscheduler.codec.EntityDecoder; import io.github.cloudscheduler.model.JobDefinition; import io.github.cloudscheduler.model.JobDefinitionState; import io.github.cloudscheduler.model.JobDefinitionStatus; import io.github.cloudscheduler.model.JobInstance; import io.github.cloudscheduler.model.JobInstanceState; import io.github.cloudscheduler.model.JobRunStatus; import io.github.cloudscheduler.util.CompletableFutureUtils; import io.github.cloudscheduler.util.ZooKeeperUtils; import io.github.cloudscheduler.util.retry.RetryStrategy; import java.io.IOException; import java.time.Instant; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.UUID; import java.util.concurrent.CompletableFuture; import java.util.function.Consumer; import java.util.function.Function; import java.util.function.Predicate; import java.util.function.Supplier; import java.util.stream.Collectors; import org.apache.zookeeper.CreateMode; import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.Transaction; import org.apache.zookeeper.ZooKeeper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * JobService implementation. This class will initial zookeeper znodes in constructor. Since using * async zookeeper operation, it extends CompletableFuture, once it complete means zookeeper znodes * initialized, so we can chain other operations with it. * * @author <NAME> */ public class JobServiceImpl implements JobService { private static final Logger logger = LoggerFactory.getLogger(JobServiceImpl.class); static final String ZK_ROOT_KEY = "cloud.scheduler.zookeeper.chroot"; static final String ZK_ROOT_DEFAULT = "/scheduler"; static final String JOB_DEF_ROOT = "/jobDefs"; static final String JOB_INSTANCE_ROOT = "/jobInstances"; static final String WORKER_NODE_ROOT = "/workers"; static final String STATUS_PATH = "status"; private final Supplier<ZooKeeper> zooKeeperSupplier; private final RetryStrategy retryStrategy; private final String jobDefRoot; private final String jobInstanceRoot; private final String workerNodeRoot; private final EntityCodecProvider codecProvider; private final CompletableFuture<Void> _self; /** * Constructor. * * @param zooKeeper zooKeeper */ public JobServiceImpl(ZooKeeper zooKeeper) { this(() -> zooKeeper); } /** * Constructor. * * @param zooKeeper zooKeeper supplier */ public JobServiceImpl(Supplier<ZooKeeper> zooKeeper) { this( zooKeeper, RetryStrategy.newBuilder() .fibonacci(250L) .random() .maxDelay(3000L) .maxRetry(15) .retryOn(Collections.singletonList(KeeperException.class)) .stopAt( Arrays.asList( KeeperException.NoAuthException.class, KeeperException.SessionExpiredException.class)) .build()); } JobServiceImpl(Supplier<ZooKeeper> zooKeeperSupplier, RetryStrategy retryStrategy) { Objects.requireNonNull(zooKeeperSupplier, "ZooKeeper is mandatory"); Objects.requireNonNull(retryStrategy, "RetryStrategy is mandatory"); logger.trace("New JobServiceImpl instance with zk: {}", zooKeeperSupplier.get()); this.zooKeeperSupplier = zooKeeperSupplier; this.retryStrategy = retryStrategy; String zkRoot = System.getProperty(ZK_ROOT_KEY, ZK_ROOT_DEFAULT); jobDefRoot = zkRoot + JOB_DEF_ROOT; jobInstanceRoot = zkRoot + JOB_INSTANCE_ROOT; workerNodeRoot = zkRoot + WORKER_NODE_ROOT; codecProvider = EntityCodecProvider.getCodecProvider(); _self = new CompletableFuture<>(); CompletableFuture.allOf( retryStrategy.call( () -> ZooKeeperUtils.createZnodes(zooKeeperSupplier.get(), jobDefRoot)), retryStrategy.call( () -> ZooKeeperUtils.createZnodes(zooKeeperSupplier.get(), jobInstanceRoot)), retryStrategy.call( () -> ZooKeeperUtils.createZnodes(zooKeeperSupplier.get(), workerNodeRoot))) .whenComplete( (v, cause) -> { if (cause != null) { _self.completeExceptionally(cause); } else { _self.complete(null); } }); } @Override public CompletableFuture<Node> registerWorkerAsync(Node node) { logger.debug("Register worker node: {}", node.getId()); return retryOperation( () -> ZooKeeperUtils.exists(zooKeeperSupplier.get(), getWorkerNodePath(node.getId())) .thenCompose( version -> { if (version == null) { return ZooKeeperUtils.createEphemeralZnode( zooKeeperSupplier.get(), getWorkerNodePath(node.getId()), null) .thenApply(s -> node); } else { return CompletableFuture.completedFuture(node); } })); } @Override public CompletableFuture<Node> unregisterWorkerAsync(Node node) { logger.debug("Unregister worker node: {}", node.getId()); return retryOperation( () -> ZooKeeperUtils.exists(zooKeeperSupplier.get(), getWorkerNodePath(node.getId())) .thenCompose( version -> { if (version != null) { return ZooKeeperUtils.deleteIfExists( zooKeeperSupplier.get(), getWorkerNodePath(node.getId())) .thenApply(s -> node); } else { return CompletableFuture.completedFuture(node); } })); } @Override public CompletableFuture<List<UUID>> getCurrentWorkersAsync() { return getCurrentWorkersAsync(null); } @Override public CompletableFuture<List<UUID>> getCurrentWorkersAsync(Consumer<EventType> listener) { return retryOperation( () -> ZooKeeperUtils.getChildren(zooKeeperSupplier.get(), workerNodeRoot, listener) .thenApply( children -> children.stream().map(UUID::fromString).collect(Collectors.toList()))); } @Override public CompletableFuture<JobDefinition> saveJobDefinitionAsync(JobDefinition jobDef) { Objects.requireNonNull(jobDef, "JobDefinition is mandatory"); logger.debug("Saving job definition with id: {}", jobDef.getId()); return retryOperation( () -> ZooKeeperUtils.exists(zooKeeperSupplier.get(), getJobDefPath(jobDef.getId())) .thenCompose( version -> { if (version != null) { return CompletableFutureUtils.exceptionalCompletableFuture( new IllegalArgumentException( "JobDefinition with id " + jobDef.getId() + " already exist.")); } else { return ZooKeeperUtils.transactionalOperation( zooKeeperSupplier.get(), transaction -> { try { JobDefinitionStatus jobDefinitionStatus = new JobDefinitionStatus(jobDef.getId()); transaction.create( getJobDefPath(jobDef.getId()), codecProvider .getEntityEncoder(JobDefinition.class) .encode(jobDef), ZooKeeperUtils.DEFAULT_ACL, CreateMode.PERSISTENT); transaction.create( getJobDefStatusPath(jobDefinitionStatus.getId()), codecProvider .getEntityEncoder(JobDefinitionStatus.class) .encode(jobDefinitionStatus), ZooKeeperUtils.DEFAULT_ACL, CreateMode.PERSISTENT); return CompletableFuture.completedFuture(jobDef); } catch (IOException e) { throw new RuntimeException(e); } }); } })); } @Override public CompletableFuture<JobDefinition> getJobDefinitionByIdAsync(UUID id) { Objects.requireNonNull(id, "JobDefinition ID is mandatory"); logger.debug("Getting job definition by id: {}", id); return retryOperation( () -> ZooKeeperUtils.readEntity( zooKeeperSupplier.get(), getJobDefPath(id), codecProvider.getEntityDecoder(JobDefinition.class))) .thenApply(n -> n == null ? null : n.getEntity()); } @Override public CompletableFuture<Void> deleteJobDefinitionAsync(JobDefinition jobDef) { Objects.requireNonNull(jobDef, "JobDefinition ID is mandatory"); return retryOperation( () -> ZooKeeperUtils.exists(zooKeeperSupplier.get(), getJobDefPath(jobDef.getId())) .thenCompose( version -> { if (version != null) { return ZooKeeperUtils.transactionalOperation( zooKeeperSupplier.get(), transaction -> { CompletableFuture<Void> removeInstanceFuture = getJobInstancesByJobDefAsync(jobDef) .thenCompose( jobIns -> CompletableFuture.allOf( jobIns.stream() .map( jobIn -> ZooKeeperUtils.exists( zooKeeperSupplier.get(), getJobInstancePath( jobIn.getId())) .thenAccept( v -> { if (v != null) { transaction.delete( getJobInstancePath( jobIn.getId()), v); } })) .collect(Collectors.toList()) .toArray(new CompletableFuture[0]))); CompletableFuture<Void> removeStatusFuture = ZooKeeperUtils.exists( zooKeeperSupplier.get(), getJobDefStatusPath(jobDef.getId())) .thenAccept( v -> { if (v != null) { transaction.delete( getJobDefStatusPath(jobDef.getId()), v); } }); return removeInstanceFuture.thenCombine( removeStatusFuture, (__, ___) -> { transaction.delete(getJobDefPath(jobDef.getId()), version); return null; }); }); } else { return CompletableFuture.completedFuture(null); } })); } @Override public CompletableFuture<JobDefinitionStatus> getJobStatusByIdAsync(UUID id) { return getJobStatusByIdAsync(id, null); } @Override public CompletableFuture<JobDefinitionStatus> getJobStatusByIdAsync( UUID id, Consumer<EventType> listener) { Objects.requireNonNull(id, "JobDefinitionStatus ID is mandatory"); logger.debug("Getting JobDefinition status by id: {}", id); return retryOperation( () -> ZooKeeperUtils.readEntity( zooKeeperSupplier.get(), getJobDefStatusPath(id), codecProvider.getEntityDecoder(JobDefinitionStatus.class), listener)) .thenApply(n -> n == null ? null : n.getEntity()); } @Override public CompletableFuture<JobInstance> getJobInstanceByIdAsync(UUID id) { return getJobInstanceByIdAsync(id, null); } @Override public CompletableFuture<JobInstance> getJobInstanceByIdAsync( UUID id, Consumer<EventType> listener) { Objects.requireNonNull(id, "JobInstance ID is mandatory"); logger.debug("Getting JobInstance by id: {}", id); return retryOperation( () -> ZooKeeperUtils.readEntity( zooKeeperSupplier.get(), getJobInstancePath(id), codecProvider.getEntityDecoder(JobInstance.class), listener) .thenApply(n -> n == null ? null : n.getEntity())); } @Override public CompletableFuture<Void> deleteJobInstanceAsync(UUID jobInstanceId) { Objects.requireNonNull(jobInstanceId, "JobDefinition ID is mandatory"); logger.debug("Deleting JobInstance by id: {}", jobInstanceId); return retryOperation( () -> ZooKeeperUtils.deleteIfExists( zooKeeperSupplier.get(), getJobInstancePath(jobInstanceId))); } @Override public CompletableFuture<List<UUID>> listAllJobInstanceIdsAsync() { return listAllEntityIdsAsync(jobInstanceRoot); } @Override public CompletableFuture<List<JobInstance>> listAllJobInstancesAsync() { return listJobInstancesAsync(null, null); } @Override public CompletableFuture<List<JobInstance>> listAllJobInstancesAsync( Consumer<EventType> listener) { return listJobInstancesAsync(null, listener); } @Override public CompletableFuture<List<JobInstance>> getJobInstancesByJobDefAsync(JobDefinition jobDef) { return listJobInstancesAsync(job -> jobDef.getId().equals(job.getJobDefId()), null); } private CompletableFuture<List<JobInstance>> listJobInstancesAsync( Predicate<JobInstance> filter, Consumer<EventType> listener) { return listEntitiesAsync( filter, listener, codecProvider.getEntityDecoder(JobInstance.class), jobInstanceRoot, this::getJobInstancePath); } @Override public CompletableFuture<List<UUID>> listAllJobDefinitionIdsAsync() { return listAllEntityIdsAsync(jobDefRoot); } @Override public CompletableFuture<List<JobDefinition>> listJobDefinitionsByNameAsync(String name) { return listJobDefinitionsAsync(job -> name.equals(job.getName()), null); } @Override public CompletableFuture<Map<JobDefinition, JobDefinitionStatus>> listJobDefinitionsWithStatusAsync() { return listJobDefinitionsAsync(null, null) .thenCompose( js -> { Map<JobDefinition, JobDefinitionStatus> jobs = new HashMap<>(js.size()); List<CompletableFuture<?>> fs = new ArrayList<>(js.size()); js.forEach( j -> fs.add(getJobStatusByIdAsync(j.getId()).thenAccept(s -> jobs.put(j, s)))); return CompletableFuture.allOf(fs.toArray(new CompletableFuture[0])) .thenApply(v -> jobs); }); } @Override public CompletableFuture<List<JobDefinition>> listAllJobDefinitionsAsync() { return listJobDefinitionsAsync(null, null); } @Override public CompletableFuture<List<JobDefinition>> listAllJobDefinitionsAsync( Consumer<EventType> listener) { return listJobDefinitionsAsync(null, listener); } private CompletableFuture<List<JobDefinition>> listJobDefinitionsAsync( Predicate<JobDefinition> filter, Consumer<EventType> listener) { return listEntitiesAsync( filter, listener, codecProvider.getEntityDecoder(JobDefinition.class), jobDefRoot, this::getJobDefPath); } private <T> CompletableFuture<List<T>> listEntitiesAsync( Predicate<T> filter, Consumer<EventType> listener, EntityDecoder<T> decoder, String rootPath, Function<String, String> pathResolver) { logger.debug("Listing entities{}", filter == null ? " with filter" : ""); return retryOperation( () -> ZooKeeperUtils.getChildren(zooKeeperSupplier.get(), rootPath, listener) .thenCompose( list -> { logger.trace("List entities get total {} records", list.size()); return list.stream() .map( id -> ZooKeeperUtils.readEntity( zooKeeperSupplier.get(), pathResolver.apply(id), decoder) .thenApply(n -> n == null ? null : n.getEntity())) .collect( collectingAndThen( Collectors.toList(), l -> CompletableFuture.allOf(l.toArray(new CompletableFuture[0])) .thenApply( __ -> l.stream() .map(CompletableFuture::join) .filter( j -> (j != null && (filter == null || filter.test(j)))) .collect(Collectors.toList())))); })); } @Override public CompletableFuture<JobInstance> scheduleJobInstanceAsync(JobDefinition jobDef) { return scheduleJobInstanceAsync(jobDef, Instant.now()); } @Override public CompletableFuture<JobInstance> scheduleJobInstanceAsync( JobDefinition jobDef, Instant scheduledTime) { Objects.requireNonNull(jobDef, "JobDefinition is mandatory"); Objects.requireNonNull(scheduledTime, "ScheduledTime is mandatory"); logger.debug( "Scheduling JobInstance for JobDefinition with id: {} at {}", jobDef, scheduledTime); return retryOperation( () -> ZooKeeperUtils.readEntity( zooKeeperSupplier.get(), getJobDefStatusPath(jobDef.getId()), codecProvider.getEntityDecoder(JobDefinitionStatus.class)) .thenCompose( jsh -> { if (jsh != null) { return ZooKeeperUtils.transactionalOperation( zooKeeperSupplier.get(), transaction -> { JobInstance jobInstance = new JobInstance(jobDef.getId()); jobInstance.setScheduledTime(scheduledTime); jobInstance.setJobState(JobInstanceState.SCHEDULED); CompletableFuture<Void> f; if (jobDef.isGlobal()) { f = getCurrentWorkersAsync() .thenAccept( nodeIds -> nodeIds.forEach( nodeId -> jobInstance .getRunStatus() .put( nodeId, new JobRunStatus(nodeId)))); } else { f = CompletableFuture.completedFuture(null); } return f.thenCompose( v -> { try { logger.trace( "Create JobInstance, JobInstance id: {}", jobInstance.getId()); transaction.create( getJobInstancePath(jobInstance.getId()), codecProvider .getEntityEncoder(JobInstance.class) .encode(jobInstance), ZooKeeperUtils.DEFAULT_ACL, CreateMode.PERSISTENT); JobDefinitionStatus status = jsh.getEntity(); status .getJobInstanceState() .put(jobInstance.getId(), JobInstanceState.SCHEDULED); status.setLastScheduleTime(scheduledTime); status.setRunCount(status.getRunCount() + 1); transaction.setData( getJobDefStatusPath(status.getId()), codecProvider .getEntityEncoder(JobDefinitionStatus.class) .encode(status), jsh.getVersion()); return CompletableFuture.completedFuture(jobInstance); } catch (IOException e) { CompletableFuture<JobInstance> future = new CompletableFuture<>(); future.completeExceptionally(e); return future; } }); }); } else { logger.trace("Cannot find JobDefinition status with id: {}", jobDef); return CompletableFutureUtils.exceptionalCompletableFuture( new IllegalArgumentException( "Cannot find job definition status by id: " + jobDef)); } })); } @Override public CompletableFuture<JobInstance> startProcessJobInstanceAsync( UUID jobInstanceId, UUID nodeId) { return startProcessJobInstanceAsync(jobInstanceId, nodeId, Instant.now()); } @Override public CompletableFuture<JobInstance> startProcessJobInstanceAsync( UUID jobInstanceId, UUID nodeId, Instant startTime) { Objects.requireNonNull(jobInstanceId, "JobInstance ID is mandatory"); Objects.requireNonNull(nodeId, "Node ID is mandatory"); Objects.requireNonNull(startTime, "StartTime is mandatory"); logger.debug("Node: {} start process JobInstance {} at {}", nodeId, jobInstanceId, startTime); return retryOperation( () -> ZooKeeperUtils.readEntity( zooKeeperSupplier.get(), getJobInstancePath(jobInstanceId), codecProvider.getEntityDecoder(JobInstance.class)) .thenCompose( jih -> { if (jih == null) { return CompletableFutureUtils.exceptionalCompletableFuture( new IllegalStateException( "Cannot bind job instance by id: " + jobInstanceId)); } else { JobInstance instance = jih.getEntity(); Map<UUID, JobRunStatus> map = instance.getRunStatus(); JobRunStatus status = map.computeIfAbsent(nodeId, JobRunStatus::new); status.setState(JobInstanceState.RUNNING); status.setStartTime(startTime); status.setFinishTime(null); return ZooKeeperUtils.updateEntity( zooKeeperSupplier.get(), getJobInstancePath(instance.getId()), instance, codecProvider.getEntityEncoder(JobInstance.class), jih.getVersion()); } })); } @Override public CompletableFuture<JobInstance> completeJobInstanceAsync( UUID jobInstanceId, UUID nodeId, JobInstanceState state) { return completeJobInstanceAsync(jobInstanceId, nodeId, Instant.now(), state); } @Override public CompletableFuture<JobInstance> completeJobInstanceAsync( UUID jobInstanceId, UUID nodeId, Instant endTime, JobInstanceState state) { Objects.requireNonNull(jobInstanceId, "JobInstance ID is mandatory"); Objects.requireNonNull(nodeId, "Node ID is mandatory"); Objects.requireNonNull(endTime, "EndTime is mandatory"); logger.debug( "Node: {} complete process JobInstance {} at {}, state {}", nodeId, jobInstanceId, endTime, state); return retryOperation( () -> ZooKeeperUtils.readEntity( zooKeeperSupplier.get(), getJobInstancePath(jobInstanceId), codecProvider.getEntityDecoder(JobInstance.class)) .thenCompose( jih -> { if (jih == null) { return CompletableFutureUtils.exceptionalCompletableFuture( new IllegalArgumentException( "Cannot find JobInstance by id: " + jobInstanceId)); } logger.trace("Got JobInstance by id: {}, update it.", jobInstanceId); JobInstance instance = jih.getEntity(); return ZooKeeperUtils.readEntity( zooKeeperSupplier.get(), getJobDefPath(instance.getJobDefId()), codecProvider.getEntityDecoder(JobDefinition.class)) .thenCompose( jdh -> { if (jdh == null) { return CompletableFutureUtils.exceptionalCompletableFuture( new IllegalStateException( "Cannot find JobDefinition for JobInstance, instance id: " + jobInstanceId + ", definition id: " + instance.getJobDefId())); } JobDefinition jobDef = jdh.getEntity(); JobRunStatus status = instance.getRunStatus().get(nodeId); if (status == null) { return CompletableFutureUtils.exceptionalCompletableFuture( new IllegalStateException( "JobInstance(" + jobInstanceId + ") for node(" + nodeId + ") doesn't exist")); } if (status.getState().isComplete(jobDef.isGlobal())) { logger.trace( "JobInstance {}, Node: {} is already complete.", jobInstanceId, nodeId); return CompletableFuture.completedFuture(null); } status.setState(state); status.setFinishTime(endTime); boolean global = jobDef.isGlobal(); boolean complete = global; for (JobRunStatus s : instance.getRunStatus().values()) { boolean c = s.getState().isComplete(global); if (!c && global) { complete = false; break; } else if (c && !global) { complete = true; break; } } if (complete) { instance.setJobState(JobInstanceState.COMPLETE); return ZooKeeperUtils.readEntity( zooKeeperSupplier.get(), getJobDefStatusPath(instance.getJobDefId()), codecProvider.getEntityDecoder(JobDefinitionStatus.class)) .thenCompose( jsh -> { if (jsh == null) { return CompletableFutureUtils .exceptionalCompletableFuture( new IllegalStateException( "Cannot find JobDefinition " + "status by id: " + jobInstanceId)); } JobDefinitionStatus jobDefinitionStatus = jsh.getEntity(); JobInstanceState s = jobDefinitionStatus .getJobInstanceState() .get(instance.getId()); if (s == null) { return CompletableFutureUtils .exceptionalCompletableFuture( new IllegalStateException( "JobInstance state not exist in " + "JobDefinition jobDefinitionStatus." + " JobInstance id: " + jobInstanceId)); } jobDefinitionStatus .getJobInstanceState() .put(instance.getId(), JobInstanceState.COMPLETE); jobDefinitionStatus.setLastCompleteTime(endTime); return ZooKeeperUtils.transactionalOperation( zooKeeperSupplier.get(), transaction -> { try { transaction.setData( getJobInstancePath(instance.getId()), codecProvider .getEntityEncoder(JobInstance.class) .encode(instance), jih.getVersion()); transaction.setData( getJobDefStatusPath( jobDefinitionStatus.getId()), codecProvider .getEntityEncoder( JobDefinitionStatus.class) .encode(jobDefinitionStatus), jsh.getVersion()); return CompletableFuture.completedFuture( instance); } catch (IOException e) { return CompletableFutureUtils .exceptionalCompletableFuture(e); } }); }); } else { return ZooKeeperUtils.updateEntity( zooKeeperSupplier.get(), getJobInstancePath(instance.getId()), instance, codecProvider.getEntityEncoder(JobInstance.class), jih.getVersion()); } }); })); } @Override public CompletableFuture<JobDefinition> pauseJobAsync(UUID id, boolean mayInterrupt) { Objects.requireNonNull(id, "JobDefinition ID is mandatory"); return updateJobDefinitionStatus( id, JobDefinitionState.PAUSED, JobDefinitionState::isActive, "JobDefinition already completed or paused"); } @Override public CompletableFuture<JobDefinition> resumeJobAsync(UUID id) { Objects.requireNonNull(id, "JobDefinition ID is mandatory"); return updateJobDefinitionStatus( id, JobDefinitionState.CREATED, JobDefinitionState.PAUSED::equals, "JobDefinition not paused"); } private CompletableFuture<JobDefinition> updateJobDefinitionStatus( UUID jobDefId, JobDefinitionState state, Predicate<JobDefinitionState> condition, String conditionErrorMessage) { Objects.requireNonNull(jobDefId, "JobDefinition ID is mandatory"); logger.debug("Update JobDefinition state to {} with id: {}", state, jobDefId); return retryOperation( () -> ZooKeeperUtils.readEntity( zooKeeperSupplier.get(), getJobDefPath(jobDefId), codecProvider.getEntityDecoder(JobDefinition.class)) .thenCompose( jdh -> { if (jdh == null) { return CompletableFutureUtils.exceptionalCompletableFuture( new IllegalArgumentException( "Cannot find JobDefinition by id: " + jobDefId)); } JobDefinition jobDef = jdh.getEntity(); return ZooKeeperUtils.readEntity( zooKeeperSupplier.get(), getJobDefStatusPath(jobDefId), codecProvider.getEntityDecoder(JobDefinitionStatus.class)) .thenCompose( jsh -> { if (jsh == null) { return CompletableFutureUtils.exceptionalCompletableFuture( new IllegalArgumentException( "Cannot find JobDefinition status by id: " + jobDefId)); } JobDefinitionStatus jobDefinitionStatus = jsh.getEntity(); if (condition.test(jobDefinitionStatus.getState())) { jobDefinitionStatus.setState(state); return ZooKeeperUtils.updateEntity( zooKeeperSupplier.get(), getJobDefStatusPath(jobDefinitionStatus.getId()), jobDefinitionStatus, codecProvider.getEntityEncoder(JobDefinitionStatus.class), jsh.getVersion()) .thenApply(s -> jobDef); } else { return CompletableFutureUtils.exceptionalCompletableFuture( new JobException(conditionErrorMessage)); } }); })); } @Override public CompletableFuture<List<JobInstance>> cleanUpJobInstances(JobDefinition jobDef) { Objects.requireNonNull(jobDef, "JobDefinition is mandatory"); logger.debug("Cleanup JobInstances for JobDefinition: {}", jobDef.getId()); return retryOperation( () -> ZooKeeperUtils.readEntity( zooKeeperSupplier.get(), getJobDefStatusPath(jobDef.getId()), codecProvider.getEntityDecoder(JobDefinitionStatus.class)) .thenCompose( jsh -> { if (jsh == null) { return CompletableFutureUtils.exceptionalCompletableFuture( new IllegalArgumentException( "Cannot find JobDefinition status by id: " + jobDef.getId())); } JobDefinitionStatus jobDefStatus = jsh.getEntity(); List<UUID> completeJobInstance = jsh.getEntity().getJobInstanceState().entrySet().stream() .map( entry -> Optional.ofNullable( entry.getValue().isComplete(jobDef.isGlobal()) ? entry.getKey() : null)) .filter(Optional::isPresent) .map(Optional::get) .collect(Collectors.toList()); if (!completeJobInstance.isEmpty()) { return ZooKeeperUtils.transactionalOperation( zooKeeperSupplier.get(), transaction -> { jobDefStatus .getJobInstanceState() .keySet() .removeAll(completeJobInstance); try { transaction.setData( getJobDefStatusPath(jobDefStatus.getId()), codecProvider .getEntityEncoder(JobDefinitionStatus.class) .encode(jobDefStatus), jsh.getVersion()); List<JobInstance> instances = new ArrayList<>(completeJobInstance.size()); return CompletableFuture.allOf( completeJobInstance.stream() .map( id -> removeJobInstance(id, transaction) .thenAccept( ji -> { if (ji != null) { instances.add(ji); } })) .toArray(CompletableFuture[]::new)) .thenApply(__ -> instances); } catch (IOException exp) { CompletableFuture<List<JobInstance>> future = new CompletableFuture<>(); future.completeExceptionally(exp); return future; } }); } else { return CompletableFuture.completedFuture(Collections.emptyList()); } })); } private CompletableFuture<JobInstance> removeJobInstance(UUID id, Transaction transaction) { return ZooKeeperUtils.getChildren(zooKeeperSupplier.get(), getJobInstancePath(id)) .thenCompose( children -> { if (children.isEmpty()) { logger.trace("JobInstance {} has no children, remove it", id); return ZooKeeperUtils.readEntity( zooKeeperSupplier.get(), getJobInstancePath(id), codecProvider.getEntityDecoder(JobInstance.class)) .thenApply( jih -> { if (jih == null) { return null; } transaction.delete(getJobInstancePath(id), jih.getVersion()); return jih.getEntity(); }); } else { CompletableFuture<JobInstance> future = new CompletableFuture<>(); future.completeExceptionally( new IllegalStateException("JobInstance " + id + " has children.")); return future; } }); } @Override public CompletableFuture<JobDefinitionStatus> completeJobDefinitionAsync(JobDefinition jobDef) { Objects.requireNonNull(jobDef, "JobDefinition is mandatory"); logger.debug("Complete JobDefinition: {}", jobDef.getId()); return retryOperation( () -> ZooKeeperUtils.readEntity( zooKeeperSupplier.get(), getJobDefStatusPath(jobDef.getId()), codecProvider.getEntityDecoder(JobDefinitionStatus.class)) .thenCompose( jsh -> { if (jsh == null) { return CompletableFutureUtils.exceptionalCompletableFuture( new IllegalArgumentException( "Cannot find JobDefinition status by id: " + jobDef.getId())); } JobDefinitionStatus jobDefinitionStatus = jsh.getEntity(); logger.trace("Set jobDefinitionStatus state to FINISHED"); jobDefinitionStatus.setState(JobDefinitionState.FINISHED); return ZooKeeperUtils.updateEntity( zooKeeperSupplier.get(), getJobDefStatusPath(jobDef.getId()), jobDefinitionStatus, codecProvider.getEntityEncoder(JobDefinitionStatus.class), jsh.getVersion()); })); } private String getJobDefPath(UUID id) { return getJobDefPath(id.toString()); } private String getJobDefPath(String id) { return jobDefRoot + "/" + id; } private String getJobDefStatusPath(UUID id) { return getJobDefPath(id) + "/" + STATUS_PATH; } @Override public String getJobInstancePath(UUID id) { return getJobInstancePath(id.toString()); } private String getJobInstancePath(String id) { return jobInstanceRoot + "/" + id; } private String getWorkerNodePath(UUID id) { return workerNodeRoot + "/" + id.toString(); } /** * Retry logic. Retry based on exception type. Will retry if exception is KeeperException and not * NOAUTH or SESSIONEXPIRED exception. If function throw any other exception, or * KeeperException.NOAUTH or KeeperException.SESSIONEXPIRED it will not retry * * @param supplier User provide function. Need to return a CompletableFuture * @param <T> Entity type * @return CompletableFuture */ private <T> CompletableFuture<T> retryOperation(Supplier<CompletableFuture<T>> supplier) { return _self.thenCompose(v -> retryStrategy.call(supplier)); } private CompletableFuture<List<UUID>> listAllEntityIdsAsync(String rootPath) { return retryOperation( () -> ZooKeeperUtils.getChildren(zooKeeperSupplier.get(), rootPath) .thenApply( list -> { List<UUID> result = new ArrayList<>(list.size()); list.forEach(s -> result.add(UUID.fromString(s))); return result; })); } }
AlexRogalskiy/website-4
src/pages/docs.js
import React from 'react' import Head from '../components/Head' import DocsHeader from '../components/DocsHeader' import Footer from '../components/Footer' import Icon from '../components/Icon' import Link from '../components/Link' import Button from '../components/Button' import SearchBox from '../components/SearchBox' const PageTile = ({ title, subtitle, image, link }) => ( <Link to={link} colorClassName="text-black hover:text-qripink"> <div className='bg-white shadow-md rounded-lg text-center p-8 flex items-center justify-center sm:block' style={{ boxShadow: '0px 0px 5px rgba(0, 0, 0, 0.15)' }}> <img src={image} className='h-12 w-12 mr-5 sm:mx-auto sm:mb-4'/> <div> <div className='text-md font-bold mb-2 sm:text-3xl'>{title}</div> <div className='font-light text-sm text-qrigray-400 sm:text-md'>{subtitle}</div> </div> </div> </Link> ) const featuredTutorials = [ { title: 'How Qri Defines a Dataset', description: 'Qri is a distributed version control and sharing platform for datasets', slug: '/docs/concepts/understanding-qri/what-is-qri', colorClass: 'text-qripink-600' }, { title: 'How Qri Dataset Version Control Works', description: 'In Qri, \'Dataset\' means more than just the data. Learn more about the Qri Data Model', slug: '/docs/concepts/understanding-qri/how-qri-defines-a-dataset', colorClass: 'text-qrigreen-600' }, { title: 'Scrape Data from a Website', description: 'Use Starlark\'s html package to extract data from HTML', slug: '/docs/guides', colorClass: 'text-qrigreen-600' } ] const DocsPage = ({ onSearchClick, location }) => ( <div style={{ background: 'url("/img/new-docs/dot.svg")' }}> <div className='flex flex-col' style={{ backgroundImage: 'url("/img/new-docs/yellow-aura-bubble.svg")', backgroundRepeat: 'no-repeat', backgroundPositionY: 1200, backgroundPositionX: 'right' }}> <Head data={{ title: 'Qri Docs', description: 'Documentation for Qri Desktop and Qri CLI' }} /> <div className='flex-grow flex-shrink-0'> <img src='/img/new-docs/yellow-aura.svg' className='hidden sm:block absolute z-0 w-full'/> <img src='/img/new-docs/yellow-aura-mobile.svg' className='block sm:hidden absolute z-0' style={{ transform: 'scale(3)' }}/> <DocsHeader onSearchClick={onSearchClick} location={location} transparent sticky={false} border={false} /> <div className='z-10 relative'> <div className='py-24 max-w-screen-lg mx-auto'> {/* Start Splash */} <div className='text-center relative'> <div className='absolute h-0 hidden md:block'> <img src='/img/new-docs/blob-green.svg' className='relative -bottom-4 -left-4'/> </div> <div className='w-full px-5 font-black text-4xl text-qritile-600 mb-10 inline-block md:text-6xl sm:px-0 sm:w-auto'> <div className='mb-9'> <div className='absolute h-0 hidden md:block'> <img src='/img/new-docs/nodes-1.svg' className='relative -top-20 -left-16'/> </div> How can we <span className='text-qripink-600'>help</span> you? </div> <div onClick={onSearchClick} > <SearchBox size='lg' disabled/> </div> <div className='absolute h-0 bottom-20 -right-16 hidden md:block'> <img src='/img/new-docs/blob-orange.svg'/> </div> </div> </div> {/* End Splash */} {/* Start Popular Pages Section */} <div className='text-center mb-20 relative'> <div className='text-xl text-center font-bold'>Popular Pages</div> <div className='my-2 mx-auto'> <div className="w-full flex flex-wrap px-5 md:px-0 overflow-hidden"> <div className="my-3 px-1 md:px-6 py-3 w-full md:w-1/3 overflow-hidden"> <PageTile image='/img/new-docs/page-clock.svg' title='Quickstart' subtitle='Dive in and start making datasets' link='/docs/guides/transforms/scrape-data-from-a-website' /> </div> <div className="my-3 px-1 md:px-6 py-3 w-full md:w-1/3 overflow-hidden"> <PageTile image='/img/new-docs/page-cloud.svg' title='What is Qri?' subtitle='Learn Qri Core Concepts and Terms' link='/docs/concepts/understanding-qri/what-is-qri' /> </div> <div className="my-3 px-1 md:px-6 py-3 w-full md:w-1/3 overflow-hidden"> <PageTile image='/img/new-docs/page-reference.svg' title='Reference' subtitle='Explore technical docs and APIs' link='/docs/reference' /> </div> </div> </div> <div className='absolute h-0 bottom-40 -right-36 hidden md:block'> <img src='/img/new-docs/nodes-2.svg'/> </div> </div> {/* End Popular Pages Section */} {/* Start Recommended Tutorials */} <div className='px-5 mx-auto relative lg:px-0'> <div className=' w-full mb-16 md:w-1/2'> <div className='font-bold text-3xl md:text-4xl text-qritile-600 mb-4'> Featured Docs </div> <div className='text-lg text-qrigray-1000'> Start with these pages to learn more about the value of versioned datasets and how Qri works. </div> </div> {featuredTutorials.map(({ title, description, slug, colorClass }) => ( <Link key={slug} to={slug}> <div className='text-qrigray-400 bg-white rounded-lg border-solid border-2 border-qrigray-100 box-border px-6 py-6 flex mb-6' > <Icon icon='docsRing' size='xs' className={`mt-1.5 mr-3 ${colorClass}`} /> <div className='flex-grow'> <div className='font-bold text-lg text-black mb-1'>{title}</div> <div className='text-sm'>{description}</div> </div> <div className='flex items-center'> <Icon icon='caretRight' size='xl' className='' /> </div> </div> </Link> ))} </div> {/* End Recommended Tutorials */} {/* Start Still Need Help? */} <div className='px-5 lg:px-0 mx-auto py-52'> <div className='bg-qritile-600 rounded-lg block text-center md:text-left md:flex text-white px-8 pt-10 pb-32 md:p-14'> <div className='w-full md:w-3/5'> <div className='font-bold text-3xl md:text-4xl mb-5'>Still have questions?</div> <div className='font-base text-lg mb-5'>If you need answers, come join our community chat on Discord. Our staff and other Qri users can help you get going!.</div> <Link to='https://discordapp.com/invite/thkJHKj'> <Button size='lg' type='secondary'>Come Hang Out!</Button> </Link> </div> <div className=''> <div className='absolute h-0 hidden md:block z-0'> <img src='/img/new-docs/docs-help.svg' className='relative -top-32 -left-16'/> </div> <div className='relative h-0 w-full block mx-auto md:hidden z-0'> <img src='/img/new-docs/docs-help.svg' className='absolute top-8 mx-auto'/> </div> </div> </div> </div> {/* End Still Need Help? */} </div> </div> </div> <div className='flex-shrink-0'> <Footer /> </div> </div> </div> ) export default DocsPage
MarselAhmetov/ITIS-2c
GameJavaFX/src/main/java/attributes/Args.java
<filename>GameJavaFX/src/main/java/attributes/Args.java package attributes; import com.beust.jcommander.Parameter; import java.util.ArrayList; import java.util.List; public class Args { @Parameter public List<String> parameters = new ArrayList<>(); @Parameter(names = { "--address"}) public String address = "127.0.0.1"; @Parameter(names = { "--port"}) public Integer port = 4321; }
eguneys/pipo
modules/memo/src/main/Env.scala
package oyun.memo import com.softwaremill.macwire._ import io.methvin.play.autoconfig._ import play.api.Configuration import oyun.common.config._ final class MemoConfig( @ConfigName("collection.cache") val cacheColl: CollName, @ConfigName("collection.config") val configColl: CollName, ) @Module final class Env( appConfig: Configuration, mode: play.api.Mode, db: oyun.db.Db )(implicit ec: scala.concurrent.ExecutionContext, system: akka.actor.ActorSystem) { private val config = appConfig.get[MemoConfig]("memo")(AutoConfig.loader) lazy val cacheApi = wire[CacheApi] }
SaadYum/SquarePad
screens/Interests.js
import React from "react"; import { StyleSheet, Dimensions, ScrollView, Image, ImageBackground, Platform, Alert, AsyncStorage, SafeAreaView, KeyboardAvoidingView, FlatList, ActivityIndicator, } from "react-native"; import { Block, Text, theme, Input } from "galio-framework"; import { Card, ListItem, CheckBox } from "react-native-elements"; import { Button, Icon } from "../components"; import { IconButton } from "../components/IconButton"; import { Images, argonTheme } from "../constants"; import { HeaderHeight } from "../constants/utils"; import * as firebase from "firebase"; import * as ImagePicker from "expo-image-picker"; import * as Permissions from "expo-permissions"; import Constants from "expo-constants"; import { TouchableOpacity, TouchableWithoutFeedback, } from "react-native-gesture-handler"; import { getPosts } from "../constants/Images"; const { width, height } = Dimensions.get("screen"); const thumbMeasure = (width - 48 - 32) / 2; // const userID = firebase.auth().currentUser.uid; class Interests extends React.Component { user = firebase.auth().currentUser.uid; firestoreUserRef = firebase.firestore().collection("users").doc(this.user); firestoreCurrentInterestsRef = firebase .firestore() .collection("users") .doc(this.user) .collection("interests") .doc(this.user); // firestorePostRef = firebase.firestore().collection("posts").doc(this.user.uid) // .collection("userPosts"); storageRef = firebase.storage().ref(); state = { searchWord: "", searchResults: [], profilePic: Images.ProfilePicture, foundUser: "", found: false, checked: true, interestsCompleted: false, itemCount: 0, interestsArr: [], users: [ { name: "Saad", avatar: Images.ProfilePicture }, { name: "Saad1", avatar: Images.ProfilePicture }, { name: "Saad2", avatar: Images.ProfilePicture }, { name: "Saad3", avatar: Images.ProfilePicture }, { name: "Saad4", avatar: Images.ProfilePicture }, ], items: [ { name: "Photography", id: 1, selected: false, iconName: "camera", iconFamily: "EvilIcons", color: "tomato", }, { name: "Research", id: 2, selected: false, iconName: "location", iconFamily: "EvilIcons", color: "#3FA7D6", }, { name: "Music", id: 3, selected: false, iconName: "customerservice", iconFamily: "Antdesign", color: "#8EE269", }, { name: "Vlogs", id: 4, selected: false, iconName: "videocamera", iconFamily: "AntDesign", color: "#9B77F3", }, { name: "Tech", id: 5, selected: false, iconName: "laptop", iconFamily: "antdesign", color: "#F5C851", }, { name: "Food", id: 6, selected: false, iconName: "drink", iconFamily: "Entypo", color: "#FAC05E", }, { name: "Parks", id: 7, selected: false, iconName: "light-up", iconFamily: "Entypo", color: "#8EE269", }, { name: "Cafes", id: 8, selected: false, iconName: "rest", iconFamily: "AntDesign", color: "#3FA7D6", }, { name: "Beauty", id: 9, selected: false, iconName: "basecamp", iconFamily: "Entypo", color: "#FF5964", }, { name: "Books", id: 10, selected: false, iconName: "book", iconFamily: "Entypo", color: "#F06543", }, { name: "Cars", id: 11, selected: false, iconName: "car", iconFamily: "AntDesign", color: "#E01A4F", }, { name: "Movies", id: 12, selected: false, iconName: "folder-video", iconFamily: "Entypo", color: "#F9C22E", }, { name: "Education", id: 13, selected: false, iconName: "book", iconFamily: "AntDesign", color: "#53B3CB", }, { name: "Shopping", id: 14, selected: false, iconName: "gift", iconFamily: "AntDesign", color: "#809FFC", }, { name: "Sports", id: 15, selected: false, iconName: "dribbble", iconFamily: "Entypo", color: "#8EE269", }, { name: "Culture", id: 16, selected: false, iconName: "globe", iconFamily: "Entypo", color: "#35A7FF", }, ], }; onSelect = (id) => { const itemIndex = this.state.items.findIndex((item) => item.id === id); const item = this.state.items.find((item) => item.id === id); let count = this.state.itemCount; // console.log(item); let itemsCopy = this.state.items; if (!itemsCopy[itemIndex].selected) { count++; this.setState({ itemCount: count }); } else { count--; this.setState({ itemCount: count }); } itemsCopy[itemIndex].selected = !itemsCopy[itemIndex].selected; this.setState( { items: itemsCopy, }, () => { let interestsArr = []; this.state.items.forEach((interest) => { if (interest.selected) { interestsArr.push(interest.name); } }); this.setState({ interestsArr: interestsArr }); } ); }; onUpdate = () => { if (this.state.itemCount < 3) { alert("Please Select atleast 3 tags!"); } else { this.state.interestsCompleted = true; this.firestoreUserRef .set( { interests: this.state.items, interestsArr: this.state.interestsArr, interested: true, }, { merge: true } ) .then(() => { this.firestoreCurrentInterestsRef .set({ interestsArr: this.state.interestsArr, }) .then(() => { alert("Updated!"); }); this.storeToken(); try { this.props.navigation.goBack(); } finally { this.props.navigation.navigate("SignedIn"); } }); } }; getInterests = () => { this.firestoreUserRef.get().then((doc) => { if (doc.data().interested) { // console.log(doc.data().interests) let interestsArr = this.searchArray(doc.data().interests); this.setState({ items: doc.data().interests, interestsCompleted: true, itemCount: interestsArr.length, }); console.log(this.state.itemCount); } else { this.setState({ interestsCompleted: true }); } }); }; searchArray = (myArray) => { let interestsArr = []; for (var i = 0; i < myArray.length; i++) { if (myArray[i].selected) { interestsArr.push(myArray[i]); } } return interestsArr; }; //AsynchStorage for pre loggin for already logged in users storeToken = async () => { this.firestoreUserRef.onSnapshot(async (doc) => { try { await AsyncStorage.setItem("userData", JSON.stringify(doc.data())); } catch (error) { console.log("Something went wrong", error); } }); }; componentDidMount = () => { this.storeToken(); this.getInterests(); }; UNSAFE_componentWillMount = () => { if (this.user) { this.getInterests(); } }; renderInterests = () => { const items = this.state.items; // console.log(this.state.items); return ( <SafeAreaView> <Block center shadow style={{ width: width * 0.9, backgroundColor: "#f5f5f5", borderRadius: 5, }} > <FlatList showsVerticalScrollIndicator={false} data={items} numColumns={2} renderItem={({ item }) => ( <IconButton item={item} onSelect={this.onSelect} /> )} keyExtractor={(item) => item.id} extraData={(item) => item.selected} /> </Block> </SafeAreaView> ); }; textInput = (word) => { this.setState({ searchWord: word }); this.searchUser(word); }; renderSearchBar = () => { const { navigation } = this.props; return ( <Block row flex style={styles.inputContainer}> <Block left flex={4}> <Input color="black" style={styles.todoInput} placeholder="Add new todo.." placeholderTextColor={"#8898AA"} // onFocus={() => navigation.navigate('Pro')} onChangeText={(word) => this.textInput(word)} value={this.state.searchWord} /> </Block> <Block right flex={1}> <Button onlyIcon icon="pluscircle" iconFamily="antdesign" iconSize={25} color="#f5f5f5" iconColor="#fff" style={{ width: 30, height: 30, marginTop: 12, marginRight: 8 }} ></Button> </Block> </Block> ); }; render() { return ( <Block flex center> {/* <Block flex={1}/> */} {/* <Block flex={2}> {this.renderSearchBar()} </Block> */} <Block flex={1} style={{ marginTop: 10, marginBottom: 5 }}> <Text h4>Select atleast 3 interests: </Text> </Block> <Block flex={14}> {this.state.interestsCompleted && this.renderInterests()} </Block> <Block flex={2} style={{ marginTop: 12 }}> <Button round style={{ width: width * 0.7 }} color="tomato" onPress={this.onUpdate} > Update </Button> </Block> {/* {this.state.interestsCompleted && <Block flex={2}> <ActivityIndicator size="large"/> </Block> } */} </Block> ); } } const styles = StyleSheet.create({ inputContainer: { height: 48, width: width - 36, marginHorizontal: 16, borderWidth: 1, borderRadius: 10, backgroundColor: "#ebebeb", borderColor: "#ebebeb", }, todoInput: { height: 40, width: width * 0.6, marginHorizontal: 10, borderWidth: 1, borderRadius: 10, backgroundColor: "#ebebeb", borderColor: "#ebebeb", }, item: { backgroundColor: "#ff6347", borderRadius: 5, padding: 20, height: 180, width: width * 0.4, marginVertical: 8, marginHorizontal: 10, }, title: { fontSize: 32, }, }); export default Interests;
consulo/consulo-lua
src/main/java/com/sylvanaar/idea/Lua/codeInsight/LuaMethodSeparatorMarkerProvider.java
/* * Copyright 2011 <NAME> (Sylvanaar) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.sylvanaar.idea.Lua.codeInsight; import com.intellij.codeHighlighting.Pass; import com.intellij.codeInsight.daemon.DaemonCodeAnalyzerSettings; import com.intellij.codeInsight.daemon.LineMarkerInfo; import com.intellij.codeInsight.daemon.LineMarkerProvider; import com.intellij.openapi.editor.colors.CodeInsightColors; import com.intellij.openapi.editor.colors.EditorColorsManager; import com.intellij.openapi.editor.colors.EditorColorsScheme; import com.intellij.openapi.editor.markup.GutterIconRenderer; import com.intellij.openapi.editor.markup.SeparatorPlacement; import com.intellij.openapi.util.TextRange; import com.intellij.psi.PsiElement; import com.intellij.util.NullableFunction; import com.sylvanaar.idea.Lua.lang.luadoc.psi.api.LuaDocComment; import com.sylvanaar.idea.Lua.lang.luadoc.psi.api.LuaDocCommentOwner; import com.sylvanaar.idea.Lua.lang.psi.LuaFunctionDefinition; import consulo.annotation.access.RequiredReadAction; import javax.annotation.Nonnull; import javax.annotation.Nullable; public class LuaMethodSeparatorMarkerProvider implements LineMarkerProvider { private final DaemonCodeAnalyzerSettings myDaemonSettings; private final EditorColorsManager myColorsManager; public LuaMethodSeparatorMarkerProvider(DaemonCodeAnalyzerSettings daemonSettings, EditorColorsManager colorsManager) { myDaemonSettings = daemonSettings; myColorsManager = colorsManager; } @RequiredReadAction @Nullable @Override public LineMarkerInfo getLineMarkerInfo(@Nonnull PsiElement element) { if(myDaemonSettings.SHOW_METHOD_SEPARATORS) { if(element instanceof LuaDocComment) { LuaDocCommentOwner owner = ((LuaDocComment) element).getOwner(); if(owner instanceof LuaFunctionDefinition) { TextRange range = new TextRange(element.getTextOffset(), owner.getTextRange().getEndOffset()); LineMarkerInfo<PsiElement> info = new LineMarkerInfo<>(element, range, null, Pass.UPDATE_ALL, NullableFunction.NULL, null, GutterIconRenderer.Alignment.RIGHT); EditorColorsScheme scheme = myColorsManager.getGlobalScheme(); info.separatorColor = scheme.getColor(CodeInsightColors.METHOD_SEPARATORS_COLOR); info.separatorPlacement = SeparatorPlacement.TOP; return info; } } } return null; } }
Shaptic/py-stellar-base
stellar_sdk/xdr/change_trust_op.py
# This is an automatically generated file. # DO NOT EDIT or your changes may be overwritten import base64 from xdrlib import Packer, Unpacker from ..type_checked import type_checked from .change_trust_asset import ChangeTrustAsset from .int64 import Int64 __all__ = ["ChangeTrustOp"] @type_checked class ChangeTrustOp: """ XDR Source Code:: struct ChangeTrustOp { ChangeTrustAsset line; // if limit is set to 0, deletes the trust line int64 limit; }; """ def __init__( self, line: ChangeTrustAsset, limit: Int64, ) -> None: self.line = line self.limit = limit def pack(self, packer: Packer) -> None: self.line.pack(packer) self.limit.pack(packer) @classmethod def unpack(cls, unpacker: Unpacker) -> "ChangeTrustOp": line = ChangeTrustAsset.unpack(unpacker) limit = Int64.unpack(unpacker) return cls( line=line, limit=limit, ) def to_xdr_bytes(self) -> bytes: packer = Packer() self.pack(packer) return packer.get_buffer() @classmethod def from_xdr_bytes(cls, xdr: bytes) -> "ChangeTrustOp": unpacker = Unpacker(xdr) return cls.unpack(unpacker) def to_xdr(self) -> str: xdr_bytes = self.to_xdr_bytes() return base64.b64encode(xdr_bytes).decode() @classmethod def from_xdr(cls, xdr: str) -> "ChangeTrustOp": xdr_bytes = base64.b64decode(xdr.encode()) return cls.from_xdr_bytes(xdr_bytes) def __eq__(self, other: object): if not isinstance(other, self.__class__): return NotImplemented return self.line == other.line and self.limit == other.limit def __str__(self): out = [ f"line={self.line}", f"limit={self.limit}", ] return f"<ChangeTrustOp {[', '.join(out)]}>"
FanHuaRan/quick-start
java/akka/simple/src/main/java/com/fhr/akka/minirpg/MsgHandler.java
package com.fhr.akka.minirpg; import akka.actor.UntypedActor; import com.fhr.akka.minirpg.request.AddExpRequest; import com.fhr.akka.minirpg.request.CreatePlayerRequest; import com.fhr.akka.minirpg.request.GetPlayerInfoRequest; import com.fhr.akka.minirpg.request.LevelUpRequest; import com.fhr.akka.minirpg.response.AddExpResponse; import com.fhr.akka.minirpg.response.CreatePlayerResponse; import com.fhr.akka.minirpg.response.GetPlayerInfoResponse; import com.fhr.akka.minirpg.response.LevelUpResponse; import java.util.ArrayList; import java.util.List; /** * @author <NAME> * created on 2018/11/28 * @description 游戏消息处理器 */ public class MsgHandler extends UntypedActor { // 记录当前的玩家数量 private final List<Player> players = new ArrayList<>(); @Override public void onReceive(Object msg) { // 处理游戏消息 if (msg instanceof CreatePlayerRequest) { int newPlayerId = createPlayer((CreatePlayerRequest) msg); getSender().tell(new CreatePlayerResponse(newPlayerId), getSelf()); } else if (msg instanceof AddExpRequest) { int newExp = addExpToPlayer((AddExpRequest) msg); getSender().tell(new AddExpResponse(newExp), getSelf()); } else if (msg instanceof LevelUpRequest) { int newLevel = levelUpPlayer((LevelUpRequest) msg); getSender().tell(new LevelUpResponse(newLevel), getSelf()); } else if (msg instanceof GetPlayerInfoRequest) { PlayerInfo playerInfo = getPlayerInfo((GetPlayerInfoRequest) msg); getSender().tell(new GetPlayerInfoResponse(playerInfo), getSelf()); } } private int createPlayer(CreatePlayerRequest req) { int playerId = players.size() + 1; Player newPlayer = new Player(); newPlayer.setId(playerId); newPlayer.setLevel(1); newPlayer.setName(req.getPlayerName()); players.add(newPlayer); return playerId; } private int addExpToPlayer(AddExpRequest req) { Player player = players.get(req.getPlayerId()); player.addExp(req.getExp()); return player.getExp(); } private int levelUpPlayer(LevelUpRequest req) { Player player = players.get(req.getPlayerId()); player.levelUp(); return player.getLevel(); } private PlayerInfo getPlayerInfo(GetPlayerInfoRequest req) { Player player = players.get(req.getPlayerId()); return new PlayerInfo(player.getId(), player.getName(), player.getExp(), player.getLevel()); } }
john-james-ai/ml-studio
ml_studio/model_evaluation/validity.py
# =========================================================================== # # VALIDITY # # =========================================================================== # # =========================================================================== # # Project: Visualate # # Version: 0.1.0 # # File: \model_validation.py # # Python Version: 3.8.0 # # --------------- # # Author: <NAME> # # Company: Decision Scients # # Email: <EMAIL> # # --------------- # # Create Date: Thursday November 28th 2019, 1:49:18 pm # # Last Modified: Thursday November 28th 2019, 1:50:37 pm # # Modified By: <NAME> (<EMAIL>) # # --------------- # # License: Modified BSD # # Copyright (c) 2019 Decision Scients # # =========================================================================== # """Model validation and verification module.""" #%% import math from math import erf import numpy as np from scipy.stats import norm # --------------------------------------------------------------------------- # # LEVERAGE # # --------------------------------------------------------------------------- # def leverage(X): """Computes leverage. Leverage is a measure of how far away an independent variable values of an observation are from those of other observations. """ hat = X.dot(np.linalg.inv(X.T.dot(X)).dot(X.T)) hii = np.diagonal(hat) return hii print(leverage.__doc__) # --------------------------------------------------------------------------- # # RESIDUAL ANALYSIS # # --------------------------------------------------------------------------- # def standardized_residuals(model, X, y, return_predictions=False): """Computes standardized residuals. Standardized residuals (sometimes referred to as "internally studentized residuals") are defined for each observation, i = 1, ..., n as an ordinary residual divided by an estimate of its standard deviation: ..math:: r_i = \\frac{e_i}{\\sqrt{MSE(1-h_{ii})}} Parameters ---------- model : Estimator or BaseEstimator ML Studio or Scikit Learn estimator X : ndarray or DataFrame of shape n x m A matrix of n instances with m features y : ndarray or Series of length n An array or series of target or class values """ # Compute residuals y_pred = model.predict(X) residuals = y - y_pred # Compute Leverage hii = leverage(X) # Compute degrees of freedom and MSE rank = np.linalg.matrix_rank(X) df = X.shape[0] - rank mse = np.matmul(residuals, residuals) / df # Calculate standardized standardized_residuals = residuals / np.sqrt(mse * (1-hii)) # Return standardized residuals and optionally the predictions if return_predictions: return standardized_residuals, y_pred else: return standardized_residuals def studentized_residuals(model, X, y, return_predictions=False): """Computes studentized residuals. Studentized residuals are just a deleted residual divided by its estimated standard deviation. This turns out to be equivalent to the ordinary residual divided by a factor that includes the mean square error based on the estimated model with the ith observation deleted, MSE(i), and the leverage, hii .. math:: r_i = \\frac{e_i}{\\sqrt{MSE_{(i)}(1-h_{ii})}} Parameters ---------- model : Estimator or BaseEstimator ML Studio or Scikit Learn estimator X : ndarray or DataFrame of shape n x m A matrix of n instances with m features y : ndarray or Series of length n An array or series of target or class values """ # Compute residuals y_pred = model.predict(X) # Using the calculation from # https://newonlinecourses.science.psu.edu/stat462/node/247/ n = X.shape[0] k = X.shape[1] r = standardized_residuals(model=model, X=X, y=y) # Calculate studentized residuals studentized_residuals = r * np.sqrt((n-k-2)/(n-k-1-np.square(r))) # Return studentized residuals and optionally the predictions if return_predictions: return studentized_residuals, y_pred else: return studentized_residuals # --------------------------------------------------------------------------- # # INVERSE CUMULATIVE DISTRIBUTION FUNCTION # # --------------------------------------------------------------------------- # def quantile(p): """Inverse Cumulative Distribution Function for Normal Distribution. The cumulative distribution function (CDF) of the random variable X has the following definition: .. math:: \\mathbb{F}_X(t) = \\mathbb{P}(X \\le t) The notation \\mathbb{F}_X(t) means that \\mathbb{F} is the cdf for the random variable \\mathbb{X} but it is a function of t. It can be defined for any kind of random variable (discrete, continuous, and mixed). Parameters ---------- p : Array-like Sample vector of real numbers Note ---- The original function was obtained from the google courtesy of Dr. <NAME> and his group of consultants. The following code first appeared as A literate program to compute the inverse of the normal CDF. See that page for a detailed explanation of the algorithm. Ultimately had to swap it out because it could only handle positive values. Source: Author : <NAME> Date : December 1, 2019 Title : Inverse Normal CDF Website : https://www.johndcook.com/blog/python_phi_inverse/ The second algorithm was obtained from stackoverflow https://stackoverflow.com/questions/809362/how-to-calculate-cumulative-normal-distribution """ #'Cumulative distribution function for the standard normal distribution' #i_cdf = normal_CDF_inverse(p) return (1.0 + erf(p / np.sqrt(2.0))) / 2.0 #i_cdf = normal_CDF_inverse(p) def rational_approximation(t): # Abramowitz and Stegun formula 26.2.23. c = [2.515517, 0.802853, 0.010328] d = [1.432788, 0.189269, 0.001308] numerator = (c[2]*t + c[1])*t + c[0] denominator = ((d[2]*t + d[1])*t + d[0])*t + 1.0 return t - numerator / denominator def normal_CDF_inverse(p): assert p > 0.0 and p < 1 # See article above for explanation of this section. if p < 0.5: # F^-1(p) = - G^-1(p) return -rational_approximation( math.sqrt(-2.0*math.log(p)) ) else: # F^-1(p) = G^-1(1-p) return rational_approximation( math.sqrt(-2.0*math.log(1.0-p)) )
xiayingfeng/sarl
main/coreplugins/io.sarl.lang.ui/src/io/sarl/lang/ui/contentassist/templates/SARLTemplateProposalProvider.java
/* * $Id$ * * SARL is an general-purpose agent programming language. * More details on http://www.sarl.io * * Copyright (C) 2014-2021 the original authors or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.sarl.lang.ui.contentassist.templates; import javax.inject.Inject; import org.eclipse.jdt.internal.ui.JavaPlugin; import org.eclipse.jdt.internal.ui.JavaPluginImages; import org.eclipse.jface.text.templates.ContextTypeRegistry; import org.eclipse.jface.text.templates.Template; import org.eclipse.jface.text.templates.persistence.TemplateStore; import org.eclipse.swt.graphics.Image; import org.eclipse.xtext.ui.editor.templates.ContextTypeIdHelper; import org.eclipse.xtext.xbase.ui.templates.XbaseTemplateProposalProvider; /** Provider of templates for SARL. * * <p>This provider is overridden for provided the correct template's icon. * * @author $Author: sgalland$ * @version $FullVersion$ * @mavengroupid $GroupId$ * @mavenartifactid $ArtifactId$ */ public class SARLTemplateProposalProvider extends XbaseTemplateProposalProvider { /** Construct the provider. * * @param templateStore the store of templates. * @param registry the template context registry. * @param helper the context helper. */ @Inject public SARLTemplateProposalProvider(TemplateStore templateStore, ContextTypeRegistry registry, ContextTypeIdHelper helper) { super(templateStore, registry, helper); } @Override public Image getImage(Template template) { return JavaPlugin.getImageDescriptorRegistry().get(JavaPluginImages.DESC_OBJS_TEMPLATE); } }
aaronjwood/alb-sdk
go/models/upgrade_status_info.go
<filename>go/models/upgrade_status_info.go // Copyright 2021 VMware, Inc. // SPDX-License-Identifier: Apache License 2.0 package models // This file is auto-generated. // UpgradeStatusInfo upgrade status info // swagger:model UpgradeStatusInfo type UpgradeStatusInfo struct { // UNIX time since epoch in microseconds. Units(MICROSECONDS). // Read Only: true LastModified *string `json:"_last_modified,omitempty"` // Backward compatible abort function name. Field introduced in 18.2.10, 20.1.1. AfterRebootRollbackFnc *string `json:"after_reboot_rollback_fnc,omitempty"` // Backward compatible task dict name. Field introduced in 18.2.10, 20.1.1. AfterRebootTaskName *string `json:"after_reboot_task_name,omitempty"` // Flag for clean installation. Field introduced in 18.2.10, 20.1.1. Clean *bool `json:"clean,omitempty"` // Duration of Upgrade operation in seconds. Field introduced in 18.2.6. Duration *int32 `json:"duration,omitempty"` // Check if the patch rollback is possible on this node. Field introduced in 18.2.6. EnablePatchRollback *bool `json:"enable_patch_rollback,omitempty"` // Check if the rollback is possible on this node. Field introduced in 18.2.6. EnableRollback *bool `json:"enable_rollback,omitempty"` // End time of Upgrade operation. Field introduced in 18.2.6. EndTime *string `json:"end_time,omitempty"` // Enqueue time of Upgrade operation. Field introduced in 18.2.6. EnqueueTime *string `json:"enqueue_time,omitempty"` // Fips mode for the entire system. Field introduced in 20.1.5. FipsMode *bool `json:"fips_mode,omitempty"` // Record of past operations on this node. Field introduced in 20.1.4. History []*OpsHistory `json:"history,omitempty"` // Image path of current base image. Field introduced in 18.2.10, 20.1.1. ImagePath *string `json:"image_path,omitempty"` // Image uuid for identifying the current base image. It is a reference to an object of type Image. Field introduced in 18.2.6. ImageRef *string `json:"image_ref,omitempty"` // Name of the system such as cluster name, se group name and se name. Field introduced in 18.2.6. Name *string `json:"name,omitempty"` // Type of the system such as controller_cluster, se_group or se. Enum options - NODE_CONTROLLER_CLUSTER, NODE_SE_GROUP, NODE_SE_TYPE. Field introduced in 18.2.6. NodeType *string `json:"node_type,omitempty"` // Cloud that this object belongs to. It is a reference to an object of type Cloud. Field introduced in 18.2.6. ObjCloudRef *string `json:"obj_cloud_ref,omitempty"` // Parameters associated with the Upgrade operation. Field introduced in 18.2.6. Params *UpgradeOpsParam `json:"params,omitempty"` // Image path of current patch image. Field introduced in 18.2.10, 20.1.1. PatchImagePath *string `json:"patch_image_path,omitempty"` // Image uuid for identifying the current patch.Example Base-image is 18.2.6 and a patch 6p1 is applied, then this field will indicate the 6p1 value. . It is a reference to an object of type Image. Field introduced in 18.2.6. PatchImageRef *string `json:"patch_image_ref,omitempty"` // List of patches applied to this node. Example Base-image is 18.2.6 and a patch 6p1 is applied, then a patch 6p5 applied. This field will indicate the [{'6p1', '6p1_image_uuid'}, {'6p5', '6p5_image_uuid'}] value. Field introduced in 18.2.8, 20.1.1. PatchList []*PatchData `json:"patch_list,omitempty"` // Flag for patch op with reboot. Field introduced in 18.2.10, 20.1.1. PatchReboot *bool `json:"patch_reboot,omitempty"` // Current patch version applied to this node. Example Base-image is 18.2.6 and a patch 6p1 is applied, then this field will indicate the 6p1 value. . Field introduced in 18.2.6. PatchVersion *string `json:"patch_version,omitempty"` // Image path of previous base image. Field introduced in 18.2.10, 20.1.1. PrevImagePath *string `json:"prev_image_path,omitempty"` // Image path of previous patch image. Field introduced in 18.2.10, 20.1.1. PrevPatchImagePath *string `json:"prev_patch_image_path,omitempty"` // Image uuid for identifying previous base image.Example Base-image was 18.2.5 and an upgrade was done to 18.2.6, then this field will indicate the 18.2.5 value. . It is a reference to an object of type Image. Field introduced in 18.2.6. PreviousImageRef *string `json:"previous_image_ref,omitempty"` // Image uuid for identifying previous patch.Example Base-image was 18.2.6 with a patch 6p1. Upgrade was initiated to 18.2.8 with patch 8p1. The previous_image field will contain 18.2.6 and this field will indicate the 6p1 value. . It is a reference to an object of type Image. Field introduced in 18.2.6. PreviousPatchImageRef *string `json:"previous_patch_image_ref,omitempty"` // List of patches applied to this node on previous major version. Field introduced in 18.2.8, 20.1.1. PreviousPatchList []*PatchData `json:"previous_patch_list,omitempty"` // Previous patch version applied to this node.Example Base-image was 18.2.6 with a patch 6p1. Upgrade was initiated to 18.2.8 with patch 8p1. The previous_image field will contain 18.2.6 and this field will indicate the 6p1 value. . Field introduced in 18.2.6. PreviousPatchVersion *string `json:"previous_patch_version,omitempty"` // Previous version prior to upgrade.Example Base-image was 18.2.5 and an upgrade was done to 18.2.6, then this field will indicate the 18.2.5 value. . Field introduced in 18.2.6. PreviousVersion *string `json:"previous_version,omitempty"` // Upgrade operations progress which holds value between 0-100. Allowed values are 0-100. Field introduced in 18.2.8, 20.1.1. Unit is PERCENT. Progress *int32 `json:"progress,omitempty"` // Image path of se patch image.(required in case of reimage and upgrade + patch). Field introduced in 18.2.10, 20.1.1. SePatchImagePath *string `json:"se_patch_image_path,omitempty"` // Image uuid for identifying the current se patch required in case of system upgrade(re-image) with se patch. . It is a reference to an object of type Image. Field introduced in 18.2.10, 20.1.1. SePatchImageRef *string `json:"se_patch_image_ref,omitempty"` // ServiceEngineGroup upgrade errors. Field introduced in 18.2.6. SeUpgradeEvents []*SeUpgradeEvents `json:"se_upgrade_events,omitempty"` // se_patch may be different from the controller_patch. It has to be saved in the journal for subsequent consumption. The SeGroup params will be saved in the controller entry as seg_params. . Field introduced in 18.2.10, 20.1.1. SegParams *UpgradeOpsParam `json:"seg_params,omitempty"` // Detailed SeGroup status. Field introduced in 18.2.6. SegStatus *SeGroupStatus `json:"seg_status,omitempty"` // Start time of Upgrade operation. Field introduced in 18.2.6. StartTime *string `json:"start_time,omitempty"` // Current status of the Upgrade operation. Field introduced in 18.2.6. State *UpgradeOpsState `json:"state,omitempty"` // Flag is set only in the cluster if the upgrade is initiated as a system-upgrade. . Field introduced in 18.2.6. System *bool `json:"system,omitempty"` // Completed set of tasks in the Upgrade operation. Field introduced in 18.2.6. TasksCompleted *int32 `json:"tasks_completed,omitempty"` // Tenant that this object belongs to. It is a reference to an object of type Tenant. Field introduced in 18.2.6. TenantRef *string `json:"tenant_ref,omitempty"` // Total number of tasks in the Upgrade operation. Field introduced in 18.2.6. TotalTasks *int32 `json:"total_tasks,omitempty"` // Events performed for Upgrade operation. Field introduced in 18.2.6. UpgradeEvents []*EventMap `json:"upgrade_events,omitempty"` // Upgrade operations requested. Enum options - UPGRADE, PATCH, ROLLBACK, ROLLBACKPATCH, SEGROUP_RESUME. Field introduced in 18.2.6. UpgradeOps *string `json:"upgrade_ops,omitempty"` // url // Read Only: true URL *string `json:"url,omitempty"` // UUID Identifier for the system such as cluster, se group and se. Field introduced in 18.2.6. UUID *string `json:"uuid,omitempty"` // Current base image applied to this node. Field introduced in 18.2.6. Version *string `json:"version,omitempty"` }
kalamay/eddy
lib/bpt.c
#include "eddy-private.h" _Static_assert(sizeof(EdBpt) == PAGESIZE, "EdBpt size invalid"); _Static_assert(offsetof(EdBpt, data) % 8 == 0, "EdBpt data not 8-byte aligned"); #define BRANCH_KEY_SIZE 8 #define BRANCH_PTR_SIZE (sizeof(EdPgno)) #define BRANCH_ENTRY_SIZE (BRANCH_PTR_SIZE + BRANCH_KEY_SIZE) #define BRANCH_NEXT(pg) ((EdPgno *)((uint8_t *)(pg) + BRANCH_ENTRY_SIZE)) #define BRANCH_ORDER \ (((sizeof(((EdBpt *)0)->data) - BRANCH_PTR_SIZE) / BRANCH_ENTRY_SIZE) + 1) #define LEAF_ORDER(esize) \ (sizeof(((EdBpt *)0)->data) / (esize)) #define IS_BRANCH(n) ((n)->base.type == ED_PG_BRANCH) #define IS_BRANCH_FULL(n) ((n)->nkeys == (BRANCH_ORDER-1)) #define IS_LEAF_FULL(n, esize) ((n)->nkeys == LEAF_ORDER(esize)) #define IS_FULL(n, esize) (IS_BRANCH(n) ? IS_BRANCH_FULL(n) : IS_LEAF_FULL(n, esize)) static inline uint64_t branch_key(EdBpt *b, uint16_t idx) { assert(idx <= b->nkeys); if (idx == 0) { return 0; } return ed_fetch64(b->data + idx*BRANCH_ENTRY_SIZE - BRANCH_KEY_SIZE); } static inline EdPgno branch_ptr(EdBpt *b, uint16_t idx) { assert(idx <= b->nkeys); return ed_fetch32(b->data + idx*BRANCH_ENTRY_SIZE); } static inline void branch_set_key(EdBpt *b, uint16_t idx, uint64_t val) { assert(idx <= b->nkeys); if (idx == 0) { return; } memcpy(b->data + idx*BRANCH_ENTRY_SIZE - BRANCH_KEY_SIZE, &val, sizeof(val)); } static inline void branch_set_ptr(EdBpt *b, uint16_t idx, EdPgno val) { assert(idx <= b->nkeys); memcpy(b->data + idx*BRANCH_ENTRY_SIZE, &val, sizeof(val)); } static inline uint16_t branch_index(EdBpt *b, EdPgno *ptr) { assert(b->data <= (uint8_t *)ptr); assert((uint8_t *)ptr < b->data + (BRANCH_ORDER*BRANCH_ENTRY_SIZE - BRANCH_KEY_SIZE)); return ((uint8_t *)ptr - b->data) / BRANCH_ENTRY_SIZE; } static EdPgno * branch_search(EdBpt *b, uint64_t key) { // TODO: binary search or SIMD EdPgno *ptr = (EdPgno *)b->data; uint8_t *bkey = b->data + BRANCH_PTR_SIZE; for (uint32_t i = 0, n = b->nkeys; i < n; i++, bkey += BRANCH_ENTRY_SIZE) { uint64_t cmp = ed_fetch64(bkey); if (key < cmp) { break; } ptr = BRANCH_NEXT(ptr); if (key == cmp) { break; } } return ptr; } static inline uint64_t leaf_key(EdBpt *l, uint16_t idx, size_t esize) { assert(idx < l->nkeys); return ed_fetch64(l->data + idx*esize); } size_t ed_branch_order(void) { return BRANCH_ORDER; } size_t ed_leaf_order(size_t esize) { return LEAF_ORDER(esize); } size_t ed_bpt_capacity(size_t esize, size_t depth) { return llround(pow(BRANCH_ORDER, depth-1) * LEAF_ORDER(esize)); } int ed_bpt_find(EdTxn *txn, unsigned db, uint64_t key, void **ent) { if (txn->state != ED_TXN_OPEN) { // FIXME: return proper error code return ed_esys(EINVAL); } EdTxnDb *dbp = ed_txn_db(txn, db, true); int rc = 0; uint32_t i = 0, n = 0; uint8_t *data = NULL; size_t esize = dbp->entry_size; uint64_t kmin = 0, kmax = UINT64_MAX; EdNode *node = dbp->root; if (node == NULL) { dbp->nsplits = 1; goto done; } // The root node needs two pages when splitting. dbp->nsplits = IS_FULL(node->tree, esize); // Search down the branches of the tree. while (IS_BRANCH(node->tree)) { if (IS_BRANCH_FULL(node->tree)) { dbp->nsplits++; } else { dbp->nsplits = 0; } EdPgno *ptr = branch_search(node->tree, key); uint16_t bidx = branch_index(node->tree, ptr); EdPgno no = ed_fetch32(node->tree->data + bidx*BRANCH_ENTRY_SIZE); EdNode *next; rc = ed_txn_map(txn, no, node, bidx, &next); if (rc < 0) { goto done; } if (bidx > 0) { kmin = branch_key(node->tree, bidx); } if (bidx < node->tree->nkeys) { kmax = branch_key(node->tree, bidx + 1) - 1; } node = next; dbp->find = node; } if (IS_LEAF_FULL(node->tree, esize)) { dbp->nsplits++; } else { dbp->nsplits = 0; } // Search the leaf node. data = node->tree->data; for (i = 0, n = node->tree->nkeys; i < n; i++, data += esize) { uint64_t cmp = ed_fetch64(data); if (key == cmp) { kmax = cmp; rc = 1; break; } else if (key < cmp) { kmax = cmp; break; } kmin = cmp; } done: if (rc >= 0) { dbp->key = key; dbp->kmin = kmin; dbp->kmax = kmax; dbp->entry = dbp->start = data; dbp->entry_index = i; dbp->nmatches = rc; dbp->nloops = 0; dbp->hasfind = true; dbp->haskey = true; if (rc == 1) { dbp->hasentry = true; if (ent) { *ent = data; } } else { dbp->hasentry = false; if (ent) { *ent = NULL; } } } dbp->match = rc; return rc; } static uint64_t find_kmin(EdNode *node) { if (node->parent == NULL) { return 0; } if (node->pindex > 0) { return branch_key(node->parent->tree, node->pindex-1) - 1; } return find_kmin(node->parent); } static uint64_t find_kmax(EdNode *node) { if (node->parent == NULL) { return UINT64_MAX; } if (node->pindex < node->parent->tree->nkeys) { return branch_key(node->parent->tree, node->pindex+1) - 1; } return find_kmax(node->parent); } /** * @brief Move ths db find to the first entry from a start point * @param txn Transaction object * @param dbp Transaction database object * @param from Node to move from * @param kmin The current minimum key value range * @param kmax The current maximum key value range * @return 0 on succces, <0 on error */ static int move_first(EdTxn *txn, EdTxnDb *dbp, EdNode *from, uint64_t kmin, uint64_t kmax) { int rc = 0; if (from == NULL) { goto done; } while (IS_BRANCH(from->tree)) { EdPgno no = branch_ptr(from->tree, 0); EdNode *next; rc = ed_txn_map(txn, no, from, 0, &next); if (rc < 0) { goto done; } kmax = branch_key(from->tree, 1) - 1; from = next; } dbp->find = from; if (from->tree->nkeys > 0) { kmax = leaf_key(from->tree, 0, dbp->entry_size); } done: if (rc >= 0) { dbp->kmin = kmin; dbp->kmax = kmax; dbp->hasentry = true; dbp->entry = dbp->find->tree->data; dbp->entry_index = 0; } dbp->match = rc; dbp->nmatches = 0; return rc; } /** * @brief Move ths db find to the last entry from a start point * @param txn Transaction object * @param dbp Transaction database object * @param from Node to move from * @param kmin The current minimum key value range * @param kmax The current maximum key value range * @return 0 on succces, <0 on error */ static int move_last(EdTxn *txn, EdTxnDb *dbp, EdNode *from, uint64_t kmin, uint64_t kmax) { int rc = 0; if (from == NULL) { goto done; } while (IS_BRANCH(from->tree)) { EdPgno no = branch_ptr(from->tree, from->tree->nkeys); EdNode *next; rc = ed_txn_map(txn, no, from, from->tree->nkeys, &next); if (rc < 0) { goto done; } kmin = branch_key(from->tree, from->tree->nkeys); from = next; } dbp->find = from; if (from->tree->nkeys > 0) { kmin = leaf_key(from->tree, from->tree->nkeys - 1, dbp->entry_size); } done: if (rc >= 0) { dbp->kmin = kmin; dbp->kmax = kmax; dbp->hasentry = true; dbp->entry = dbp->find->tree->data + (dbp->find->tree->nkeys - 1) * dbp->entry_size; dbp->entry_index = dbp->find->tree->nkeys - 1; } dbp->match = rc; dbp->nmatches = 0; return rc; } /** * @brief Moves the db find to a right sibling node * @param txn Transaction object * @param dbp Transaction database object * @param from Node to move from * @return 0 on succces, <0 on error */ static int move_right(EdTxn *txn, EdTxnDb *dbp, EdNode *from) { assert(from->page->type == ED_PG_LEAF); uint64_t kmin, kmax; // Traverse up the nearest node that isn't the last key of its parent. do { // When at the root, wrapping is the only option. if (from->parent == NULL) { kmin = 0; kmax = UINT64_MAX; break; } // If a child node is not the last key, load the sibling. if (from->pindex < from->parent->tree->nkeys) { EdPgno no = branch_ptr(from->parent->tree, from->pindex+1); int rc = ed_txn_map(txn, no, from->parent, from->pindex+1, &from); if (rc < 0) { return rc; } kmin = branch_key(from->parent->tree, from->pindex); kmax = find_kmax(from); break; } from = from->parent; } while (1); // Traverse down to the left-most leaf. return move_first(txn, dbp, from, kmin, kmax); } /** * @brief Moves the db find to a left sibling node * @param txn Transaction object * @param dbp Transaction database object * @param from Node to move from * @return 0 on succces, <0 on error */ static int move_left(EdTxn *txn, EdTxnDb *dbp, EdNode *from) { assert(from->page->type == ED_PG_LEAF); uint64_t kmin, kmax; // Traverse up the nearest node that isn't the last key of its parent. do { // When at the root, wrapping is the only option. if (from->parent == NULL) { kmin = 0; kmax = UINT64_MAX; break; } // If a child node is not the first key, load the sibling. if (from->pindex > 0) { EdPgno no = branch_ptr(from->parent->tree, from->pindex-1); int rc = ed_txn_map(txn, no, from->parent, from->pindex-1, &from); if (rc < 0) { return rc; } kmin = find_kmin(from); kmax = branch_key(from->parent->tree, from->pindex); break; } from = from->parent; } while (1); // Traverse down to the left-most leaf. return move_last(txn, dbp, from, kmin, kmax); } int ed_bpt_first(EdTxn *txn, unsigned db, void **ent) { EdTxnDb *dbp = &txn->db[db]; int rc = move_first(txn, dbp, dbp->root, 0, UINT64_MAX); if (rc == 0) { dbp->start = dbp->entry; dbp->nloops = 0; dbp->hasfind = true; if (ent) { *ent = dbp->entry; } } return rc; } int ed_bpt_last(EdTxn *txn, unsigned db, void **ent) { EdTxnDb *dbp = &txn->db[db]; int rc = move_last(txn, dbp, dbp->root, 0, UINT64_MAX); if (rc == 0) { dbp->start = dbp->entry; dbp->nloops = 0; dbp->hasfind = true; if (ent) { *ent = dbp->entry; } } return rc; } int ed_bpt_next(EdTxn *txn, unsigned db, void **ent) { EdTxnDb *dbp = &txn->db[db]; if (!dbp->hasfind) { return ED_EINDEX_KEY_MATCH; } if (dbp->find == NULL) { return 0; } int rc = 0; uint32_t i = dbp->entry_index; if (dbp->hasentry) { i++; } if (i >= dbp->find->tree->nkeys) { rc = move_right(txn, dbp, dbp->find); if (rc < 0) { goto error; } } else if (dbp->hasentry) { dbp->entry = (uint8_t *)dbp->entry + dbp->entry_size; dbp->entry_index++; dbp->kmin = dbp->kmax; dbp->kmax = ed_fetch64(dbp->entry); } else { dbp->hasentry = true; } if (dbp->haskey) { if (dbp->key == ed_fetch64(dbp->entry)) { dbp->nmatches++; rc = 1; } else { dbp->haskey = false; } } if (ent) { *ent = dbp->entry; } if (dbp->entry == dbp->start) { dbp->nloops++; } dbp->match = rc; return rc; error: dbp->match = 0; txn->error = rc; return rc; } int ed_bpt_prev(EdTxn *txn, unsigned db, void **ent) { EdTxnDb *dbp = &txn->db[db]; if (!dbp->hasfind) { return ED_EINDEX_KEY_MATCH; } if (dbp->find == NULL) { return 0; } int rc = 0; uint32_t i = dbp->entry_index; if (i == 0) { rc = move_left(txn, dbp, dbp->find); if (rc < 0) { goto error; } } else { dbp->entry = (uint8_t *)dbp->entry - dbp->entry_size; dbp->entry_index--; dbp->kmax = dbp->kmin; dbp->kmin = ed_fetch64(dbp->entry); dbp->hasentry = true; } if (dbp->haskey) { if (dbp->key == ed_fetch64(dbp->entry)) { dbp->nmatches++; rc = 1; } else { dbp->haskey = false; } } if (ent) { *ent = dbp->entry; } if (dbp->entry == dbp->start) { dbp->nloops++; } dbp->match = rc; return rc; error: dbp->match = 0; txn->error = rc; return rc; } int ed_bpt_loop(const EdTxn *txn, unsigned db) { return txn->db[db].nloops; } static int set_node(EdTxn *txn, EdTxnDb *dbp, EdNode *node) { EdNode *parent = node->parent; if (parent == NULL) { dbp->root = node; return 0; } assert(parent->page->type == ED_PG_BRANCH); if (parent->tree->xid < txn->xid) { EdNode *src = parent; int rc = ed_txn_clone(txn, src, &parent); if (rc < 0) { return rc; } memcpy(parent->tree->data, src->tree->data, src->tree->nkeys*BRANCH_ENTRY_SIZE + BRANCH_PTR_SIZE); node->parent = parent; } if (node->tree->xid == txn->xid) { branch_set_ptr(parent->tree, node->pindex, node->page->no); } return set_node(txn, dbp, parent); } static int set_leaf(EdTxn *txn, EdTxnDb *dbp, EdNode *leaf, uint32_t eidx) { int rc = set_node(txn, dbp, leaf); if (rc == 0 && eidx == 0 && leaf->pindex > 0) { branch_set_key(leaf->parent->tree, leaf->pindex, ed_fetch64(leaf->tree->data)); } return 0; } static int insert_into_parent(EdTxn *txn, EdTxnDb *dbp, EdNode *l, EdNode *r, uint64_t rkey) { assert(r->pindex == l->pindex + 1); EdNode *branch = l->parent; uint32_t eidx = l->pindex; // When the branch is NULL, we have a new root of the tree. if (branch == NULL) { int rc = ed_txn_alloc(txn, NULL, 0, &branch); if (rc < 0) { return rc; } branch->page->type = ED_PG_BRANCH; branch->tree->next = ED_PG_NONE; branch->tree->nkeys = 1; } // If the branch is full, it needs to be split. This splitting approach is // less efficent than the way leaves are split: entry positions are fully // copied and then shifted at the insertion point. But given the additional // complexity of splitting branch data, and the relatively infrequent need to // do so, this hasn't been improved. else if (IS_BRANCH_FULL(branch->tree)) { int mid = (branch->tree->nkeys+1) / 2; size_t off = mid * BRANCH_ENTRY_SIZE; uint64_t rbkey = branch_key(branch->tree, mid); EdNode *left = branch, *right; int rc = ed_txn_alloc(txn, left->parent, left->pindex + 1, &right); if (rc < 0) { return rc; } if (branch->tree->xid < txn->xid) { rc = ed_txn_clone(txn, branch, &left); if (rc < 0) { return rc; } // Copy all entries left of the mid point. memcpy(left->tree->data, branch->tree->data, off - BRANCH_KEY_SIZE); } right->page->type = ED_PG_BRANCH; right->tree->next = ED_PG_NONE; right->tree->nkeys = branch->tree->nkeys - mid; left->tree->nkeys = mid - 1; // Copy all entries right of the mid point. memcpy(right->tree->data, branch->tree->data+off, sizeof(branch->tree->data) - off); // The new entry goes on the left-side branch. if (rkey < rbkey) { branch = left; } // The new entry goes on the right-side branch. else { branch = right; eidx -= mid; l->pindex = eidx; r->pindex = eidx + 1; } // Shift the new branch entries to make room for the new entry. size_t pos = BRANCH_PTR_SIZE + eidx*BRANCH_ENTRY_SIZE; memmove(branch->tree->data + pos + BRANCH_ENTRY_SIZE, branch->tree->data + pos, (branch->tree->nkeys - eidx) * BRANCH_ENTRY_SIZE); branch->tree->nkeys++; rc = insert_into_parent(txn, dbp, left, right, rbkey); if (rc < 0) { return rc; } } // Otherwie we expand the current node. else { size_t pos = BRANCH_PTR_SIZE + eidx*BRANCH_ENTRY_SIZE; EdNode *src = branch; // The source node must be cloned if was from a previous transaction. This // node could have already been modified from a prior operation within the // current transaction. if (src->tree->xid < txn->xid) { int rc = ed_txn_clone(txn, src, &branch); if (rc < 0) { return rc; } memcpy(branch->tree->data, src->tree->data, pos); } // Shift all entries after the new index over. The src node must be part of // the current transaction at this point. If the node was cloned, this will // copy the remaining data from the previous version. Otherwise, this will // shift the entries in place. memmove(branch->tree->data + pos + BRANCH_ENTRY_SIZE, src->tree->data + pos, (src->tree->nkeys - eidx) * BRANCH_ENTRY_SIZE); branch->tree->nkeys++; } l->parent = branch; r->parent = branch; set_node(txn, dbp, l); branch_set_key(r->parent->tree, r->pindex, rkey); set_node(txn, dbp, r); return 0; } static int split_point(EdTxnDb *dbp, EdBpt *l) { uint16_t n = l->nkeys, mid = n/2, min, max; uint64_t key; size_t esize = dbp->entry_size; // The split cannot be between repeated keys. // If the searched index is around the mid point, use the key and search position. if (dbp->nmatches > 0 && mid <= dbp->entry_index && mid >= dbp->entry_index - dbp->nmatches + 1) { key = dbp->key; min = dbp->entry_index - dbp->nmatches + 1; max = dbp->entry_index + 1; } // Otherwise search back for the start of any repeat sequence. else { key = leaf_key(l, mid, esize); if (key == dbp->key) { min = dbp->entry_index - dbp->nmatches + 1; } else { for (min = mid; min > 0 && leaf_key(l, min-1, esize) == key; min--) {} } max = mid + 1; } // If repeat keys span the mid point, pick the larger side to split on. if (min != mid) { for (; max < n && leaf_key(l, max, esize) == key; max++) {} if (min == 0 && max == n) { return ED_EINDEX_DUPKEY; } mid = min >= n - max ? min : max; } return mid; } static int split_leaf(EdTxn *txn, EdTxnDb *dbp, EdNode *leaf, int mid) { size_t esize = dbp->entry_size; uint32_t eidx = dbp->entry_index; size_t off = mid * esize; // If the new key will be the first entry on right, use the search key. uint64_t rkey = (uint16_t)mid == eidx ? dbp->key : ed_fetch64(leaf->tree->data+off); assert(ed_fetch64(leaf->tree->data + off - esize) < rkey); assert(rkey <= ed_fetch64(leaf->tree->data + off)); EdNode *left = leaf, *right; int rc = ed_txn_alloc(txn, leaf->parent, leaf->pindex + 1, &right); if (rc < 0) { return rc; } if (leaf->tree->xid < txn->xid) { rc = ed_txn_clone(txn, leaf, &left); if (rc < 0) { return rc; } } right->page->type = ED_PG_LEAF; right->tree->next = ED_PG_NONE; right->tree->nkeys = leaf->tree->nkeys - mid; left->tree->nkeys = mid; // The new entry goes on the left-side leaf. if (eidx < (uint16_t)mid) { dbp->find = left; // Copy entries after the mid point to new right leaf. memcpy(right->tree->data, leaf->tree->data+off, sizeof(leaf->tree->data) - off); // If left is a newly cloned node, copy the entries left of the new index. if (left != leaf) { memcpy(left->tree->data, leaf->tree->data, eidx*esize); } // Shift entries before the mid point over in left leaf. memmove(left->tree->data + (eidx+1)*esize, leaf->tree->data + eidx*esize, (left->tree->nkeys - eidx) * esize); } // The new entry goes on the right-side leaf. else { eidx -= mid; if (left != leaf) { memcpy(left->tree->data, leaf->tree->data, off); } // Copy entries after the mid point but before the new index to new right leaf. memcpy(right->tree->data, leaf->tree->data+off, eidx*esize); // Copy entries after the mid point and after the new index to new right leaf. memcpy(right->tree->data + (eidx+1)*esize, leaf->tree->data + off + eidx*esize, (right->tree->nkeys - eidx) * esize); dbp->entry_index = eidx; dbp->find = right; } dbp->entry = dbp->find->tree->data + eidx*esize; return insert_into_parent(txn, dbp, left, right, rkey); } static int insert_into_leaf(EdTxn *txn, EdTxnDb *dbp, const void *ent, bool replace) { EdNode *leaf = dbp->find; size_t esize = dbp->entry_size; uint32_t eidx = dbp->entry_index; // When the leaf is NULL, we have a brand new tree. if (leaf == NULL) { int rc = ed_txn_alloc(txn, NULL, 0, &leaf); if (rc < 0) { return rc; } leaf->page->type = ED_PG_LEAF; leaf->tree->next = ED_PG_NONE; leaf->tree->nkeys = 1; dbp->entry = leaf->tree->data; dbp->entry_index = 0; dbp->find = leaf; } // If the leaf is full, it needs to be split. else if (!replace && IS_LEAF_FULL(leaf->tree, esize)) { int mid = split_point(dbp, leaf->tree); if (mid < 0) { return mid; } int rc = split_leaf(txn, dbp, leaf, mid); if (rc < 0) { return rc; } leaf = dbp->find; leaf->tree->nkeys++; } // Otherwie we expand the current node. else { EdNode *src = leaf; // The source node must be cloned if was from a previous transaction. This // node could have already been modified from a prior operation within the // current transaction. if (src->tree->xid < txn->xid) { int rc = ed_txn_clone(txn, src, &leaf); if (rc < 0) { return rc; } dbp->find = leaf; dbp->entry = leaf->tree->data + eidx*esize; // When replacing, copy the full data. Otherwise only copy left of then new // insertion index. The following memmove will copy the right side. memcpy(leaf->tree->data, src->tree->data, replace ? sizeof(src->tree->data) : eidx*esize); } if (!replace) { // Shift all entries after the new index over. The src node must be part of // the current transaction at this point. If the node was cloned, this will // copy the remaining data from the previous version. Otherwise, this will // shift the entries in place. memmove(leaf->tree->data + eidx*esize + esize, src->tree->data + eidx*esize, (src->tree->nkeys - eidx)*esize); leaf->tree->nkeys++; } } // The insert location is now available for assignment and it is part of the // current transaction. memcpy(dbp->entry, ent, esize); return set_leaf(txn, dbp, leaf, eidx); } int ed_bpt_set(EdTxn *txn, unsigned db, const void *ent, bool replace) { if (ed_txn_isrdonly(txn)) { return ED_EINDEX_RDONLY; } EdTxnDb *dbp = ed_txn_db(txn, db, false); uint64_t key = ed_fetch64(ent); if (!dbp->hasfind || key < dbp->kmin || key > dbp->kmax) { return ED_EINDEX_KEY_MATCH; } int rc = insert_into_leaf(txn, dbp, ent, replace && dbp->match == 1); if (rc < 0) { txn->error = rc; return rc; } dbp->key = key; dbp->kmax = key; dbp->nsplits = 0; dbp->match = 1; dbp->nmatches = 1; dbp->nloops = 0; dbp->haskey = true; dbp->hasentry = true; return 0; } int ed_bpt_del(EdTxn *txn, unsigned db) { // FIXME: this function is terrible if (ed_txn_isrdonly(txn)) { return ED_EINDEX_RDONLY; } EdTxnDb *dbp = ed_txn_db(txn, db, false); if (!dbp->hasfind) { return ED_EINDEX_RDONLY; } if (!dbp->hasentry) { return 0; } EdNode *leaf = dbp->find; size_t esize = dbp->entry_size; uint32_t eidx = dbp->entry_index; if (leaf->tree->xid < txn->xid) { EdNode *src = leaf; int rc = ed_txn_clone(txn, src, &leaf); if (rc < 0) { return rc; } memcpy(leaf->tree->data, src->tree->data, sizeof(leaf->tree->data)); rc = set_node(txn, dbp, leaf); if (rc < 0) { return rc; } dbp->entry = leaf->tree->data + esize*eidx; dbp->find = leaf; } memmove(dbp->entry, (uint8_t *)dbp->entry + esize, (leaf->tree->nkeys - eidx - 1) * esize); if (leaf->tree->nkeys == 1) { leaf->tree->nkeys = 0; } else { leaf->tree->nkeys--; #if 0 if (leaf->parent && eidx == 0) { branch_set_key(leaf->parent->tree, leaf->pindex, ed_fetch64(dbp->entry)); } #endif } if (dbp->entry_index == leaf->tree->nkeys) { dbp->kmax = find_kmax(leaf); } else { dbp->kmax = ed_fetch64(dbp->entry); } dbp->nmatches = 0; dbp->hasentry = false; return 1; } static int bpt_mark_children(EdIdx *idx, EdStat *stat, EdBpt *brch, int depth, int *max) { EdPgno *ptr = (EdPgno *)brch->data; for (uint32_t i = 0; i <= brch->nkeys; i++, ptr = BRANCH_NEXT(ptr)) { EdPgno no = ed_fetch32(ptr); int rc = ed_stat_mark(stat, no); if (rc < 0) { return rc; } if (depth < *max) { EdBpt *chld = ed_pg_map(idx->fd, no, 1, true); if (chld == MAP_FAILED) { return ED_ERRNO; } if (chld->base.type == ED_PG_LEAF) { *max = depth; } else { rc = bpt_mark_children(idx, stat, chld, depth+1, max); } ed_pg_unmap(chld, 1); if (rc < 0) { return rc; } } } return 0; } int ed_bpt_mark(EdIdx *idx, EdStat *stat, EdBpt *bpt) { int rc = ed_stat_mark(stat, bpt->base.no); if (rc < 0 || bpt->base.type == ED_PG_LEAF) { return rc; } int max = 8; return bpt_mark_children(idx, stat, bpt, 1, &max); } #define HBAR "╌" #define VBAR "┆" static const char tl[] = "╭", tc[] = "┬", tr[] = "╮", ml[] = "├", mc[] = "┼", mr[] = "┤", bl[] = "╰", bc[] = "┴", br[] = "╯", hbar[] = HBAR HBAR HBAR HBAR HBAR HBAR HBAR HBAR HBAR HBAR HBAR HBAR HBAR HBAR HBAR HBAR HBAR HBAR HBAR HBAR HBAR HBAR HBAR HBAR; #define COLS 6 #define COLW ((sizeof(hbar)-1)/(sizeof(HBAR)-1)) static const char space[COLW] = " "; static void print_page(int fd, size_t esize, uint8_t *p, FILE *out, EdBptPrint print, bool *stack, int top); static int print_value(const void *value, char *buf, size_t len) { return snprintf(buf, len, "%" PRIu64, ed_fetch64(value)); } static void print_tree_branches(FILE *out, bool *stack, int top) { for (int i = 0; i < top; i++) { if (stack[i]) { static const char s[] = " "; fwrite(s, 1, sizeof(s)-1, out); } else { static const char s[] = "│ "; fwrite(s, 1, sizeof(s)-1, out); } } } static void print_tree(FILE *out, bool *stack, int top) { print_tree_branches(out, stack, top); if (stack[top]) { static const char s[] = "└ "; fwrite(s, 1, sizeof(s)-1, out); } else { static const char s[] = "├ "; fwrite(s, 1, sizeof(s)-1, out); } } static void print_box(FILE *out, uint32_t i, uint32_t n, bool *stack, int top) { if (n == 0 || i > n) { return; } bool last = false; uint32_t end = 0; if (i == n) { last = true; uint32_t c = n % COLS; if (c == 0) { c = COLS; } end = i + c; } else if (i % COLS == 0) { end = n < COLS ? n : i + COLS; } if (i < end) { if (i) { fprintf(out, VBAR); } fputc('\n', out); print_tree_branches(out, stack, top); fwrite(i == 0 ? tl : (i < n ? ml : bl), 1, sizeof(tl)-1, out); fwrite(hbar, 1, sizeof(hbar)-1, out); for (i++; i < end; i++) { fwrite(!last && i < COLS ? tc : (i <= n ? mc : bc), 1, sizeof(tc)-1, out); fwrite(hbar, 1, sizeof(hbar)-1, out); } fwrite(!last && i <= COLS ? tr : (i <= n ? mr : br), 1, sizeof(tr)-1, out); fputc('\n', out); if (last) { return; } print_tree_branches(out, stack, top); } fprintf(out, VBAR); } static void print_leaf(int fd, size_t esize, EdBpt *leaf, FILE *out, EdBptPrint print, bool *stack, int top) { fprintf(out, "leaf p%u, xid=%" PRIu64 ", nkeys=%u/%zu", leaf->base.no, leaf->xid, leaf->nkeys, LEAF_ORDER(esize)); uint32_t n = leaf->nkeys; if (n == 0) { fputc('\n', out); return; } uint8_t *p = leaf->data; for (uint32_t i = 0; i < n; i++, p += esize) { char buf[COLW+1]; int len = print(p, buf, sizeof(buf)); if (len < 0 || len > (int)COLW) { len = 0; } print_box(out, i, n, stack, top); fwrite(buf, 1, len, out); fwrite(space, 1, COLW-len, out); } print_box(out, n, n, stack, top); if (leaf->next != ED_PG_NONE) { EdBpt *next = ed_pg_map(fd, leaf->next, 1, true); print_tree(out, stack, top-1); fprintf(out, "= %" PRIu64 ", ", ed_fetch64(next->data)); print_leaf(fd, esize, next, out, print, stack, top); ed_pg_unmap(next, 1); } } static void print_branch(int fd, size_t esize, EdBpt *branch, FILE *out, EdBptPrint print, bool *stack, int top) { fprintf(out, "branch p%u, xid=%" PRIu64 ", nkeys=%u/%zu\n", branch->base.no, branch->xid, branch->nkeys, BRANCH_ORDER-1); uint32_t end = branch->nkeys; uint8_t *p = branch->data + BRANCH_PTR_SIZE; stack[top] = end == 0; print_tree(out, stack, top); fprintf(out, "< %" PRIu64 ", ", ed_fetch64(p)); print_page(fd, esize, p-BRANCH_PTR_SIZE, out, print, stack, top+1); for (uint32_t i = 1; i <= end; i++, p += BRANCH_ENTRY_SIZE) { stack[top] = i == end; print_tree(out, stack, top); fprintf(out, "≥ %" PRIu64 ", ", ed_fetch64(p)); print_page(fd, esize, p+BRANCH_KEY_SIZE, out, print, stack, top+1); } } static void print_node(int fd, size_t esize, EdBpt *t, FILE *out, EdBptPrint print, bool *stack, int top) { switch (t->base.type) { case ED_PG_LEAF: print_leaf(fd, esize, t, out, print, stack, top); break; case ED_PG_BRANCH: print_branch(fd, esize, t, out, print, stack, top); break; } } static void print_page(int fd, size_t esize, uint8_t *p, FILE *out, EdBptPrint print, bool *stack, int top) { EdBpt *t = ed_pg_map(fd, ed_fetch32(p), 1, true); if (t == MAP_FAILED) { fprintf(out, "MAP FAILED (%s)\n", strerror(errno)); return; } print_node(fd, esize, t, out, print, stack, top); ed_pg_unmap(t, 1); } static int verify_leaf(int fd, size_t esize, EdBpt *l, FILE *out, uint64_t min, uint64_t max) { if (l->nkeys == 0) { return 0; } uint8_t *p = l->data; uint64_t last; for (uint32_t i = 0; i < l->nkeys; i++, p += esize) { uint64_t key = ed_fetch64(p); if (key < min || key > max) { if (out != NULL) { fprintf(out, "leaf key out of range: %" PRIu64 ", %" PRIu64 "...%" PRIu64 "\n", key, min, max); bool stack[16] = {0}; print_leaf(fd, esize, l, out, print_value, stack, 0); } return -1; } if (i > 0 && key < last) { if (out != NULL) { fprintf(out, "leaf key out of order: %" PRIu64 "\n", key); bool stack[16] = {0}; print_leaf(fd, esize, l, out, print_value, stack, 0); } return -1; } last = key; } return 0; } static int verify_node(int fd, size_t esize, EdBpt *t, FILE *out, uint64_t min, uint64_t max) { if (t->base.type == ED_PG_LEAF) { return verify_leaf(fd, esize, t, out, min, max); } uint8_t *p = t->data; uint64_t nmin = min; EdBpt *chld; int rc; for (uint16_t i = 0; i < t->nkeys; i++, p += BRANCH_ENTRY_SIZE) { uint64_t nmax = ed_fetch64(p + BRANCH_PTR_SIZE); if (nmax < min || nmax > max) { if (out != NULL) { fprintf(out, "branch key out of range: %" PRIu64 ", %" PRIu64 "...%" PRIu64 "\n", nmax, min, max); bool stack[16] = {0}; print_branch(fd, esize, t, out, print_value, stack, 0); } return -1; } chld = ed_pg_map(fd, ed_fetch32(p), 1, true); if (chld == MAP_FAILED) { return ED_ERRNO; } rc = verify_node(fd, esize, chld, out, nmin, nmax - 1); ed_pg_unmap(chld, 1); if (rc < 0) { return rc; } nmin = nmax; } chld = ed_pg_map(fd, ed_fetch32(p), 1, true); if (chld == MAP_FAILED) { return ED_ERRNO; } rc = verify_node(fd, esize, chld, out, nmin, max); ed_pg_unmap(chld, 1); if (rc < 0) { return rc; } return 0; } void ed_bpt_print(EdBpt *t, int fd, size_t esize, FILE *out, EdBptPrint print) { if (t == NULL) { return; } if (out == NULL) { out = stdout; } if (print == NULL) { print = print_value; } bool stack[16] = {1}; fwrite(space, 1, 3, out); print_node(fd, esize, t, out, print, stack, 1); } int ed_bpt_verify(EdBpt *t, int fd, size_t esize, FILE *out) { if (t == NULL) { return 0; } return verify_node(fd, esize, t, out, 0, UINT64_MAX); }
Caprowni/concourse
testflight/regression_test.go
package testflight_test import ( . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" "github.com/onsi/gomega/gexec" ) var _ = Describe("Regression tests", func() { Describe("issue 7282", func() { It("does not error when resources emit long metadata strings", func() { setAndUnpausePipeline("fixtures/long-metadata.yml") watch := fly("trigger-job", "-j", inPipeline("job"), "-w") Expect(watch).To(gexec.Exit(0)) }) }) })