repo_name
stringlengths
6
101
path
stringlengths
4
300
text
stringlengths
7
1.31M
brunolauze/openpegasus-providers-old
src/Providers/UNIXProviders/VendorPolicyAction/UNIX_VendorPolicyActionPrivate.h
#if defined(PEGASUS_OS_HPUX) # include "UNIX_VendorPolicyActionPrivate_HPUX.h" #elif defined(PEGASUS_OS_LINUX) # include "UNIX_VendorPolicyActionPrivate_LINUX.h" #elif defined(PEGASUS_OS_DARWIN) # include "UNIX_VendorPolicyActionPrivate_DARWIN.h" #elif defined(PEGASUS_OS_AIX) # include "UNIX_VendorPolicyActionPrivate_AIX.h" #elif defined(PEGASUS_OS_FREEBSD) # include "UNIX_VendorPolicyActionPrivate_FREEBSD.h" #elif defined(PEGASUS_OS_SOLARIS) # include "UNIX_VendorPolicyActionPrivate_SOLARIS.h" #elif defined(PEGASUS_OS_ZOS) # include "UNIX_VendorPolicyActionPrivate_ZOS.h" #elif defined(PEGASUS_OS_VMS) # include "UNIX_VendorPolicyActionPrivate_VMS.h" #elif defined(PEGASUS_OS_TRU64) # include "UNIX_VendorPolicyActionPrivate_TRU64.h" #else # include "UNIX_VendorPolicyActionPrivate_STUB.h" #endif
Roxbili/kws-demo
freeze_and_tflite.py
<reponame>Roxbili/kws-demo import tensorflow as tf import os, sys import argparse from tensorflow.python.tools import freeze_graph from tensorflow.contrib.framework.python.ops import audio_ops as contrib_audio from tensorflow.python.framework import graph_util import input_data import models from convert_to_tflite import convert as convert2 def convert_frozen_graph_to_tflite(save_path=None, graph_name='frozen_graph.pb', input_array='fingerprint_input', output_array='logits/SoftMax', output_tflite_file="swiftnet-uint8.lite", quantisize_type=None, post_training_quantize=True, enable_dummy_quant=True): """ Convert the frozen graph pb to tflite model (8-bit) :param graph_name: name of frozen graph :param input_array: input nodes array :param output_array: output nodes array :param output_tflite_file: Output tflite file name. :param post_training_quantize: Whether to enable post training quantization :param :return: """ if not isinstance(input_array, list): input_arrays = [input_array] else: input_arrays = input_array if not isinstance(output_array, list): output_arrays = [output_array] else: output_arrays = output_array graph_def_file = os.path.join(save_path, graph_name) try: converter = tf.contrib.lite.TFLiteConverter.from_frozen_graph( graph_def_file, input_arrays, output_arrays) except Exception: # tf1.14 converter = tf.lite.TFLiteConverter.from_frozen_graph( graph_def_file, input_arrays, output_arrays) # Official pipeline not working... # converter.optimizations = [tf.lite.Optimize.OPTIMIZE_FOR_SIZE] if quantisize_type == 'weight': converter.optimizations = [tf.lite.Optimize.DEFAULT] # just quantisize weight to 8bit elif quantisize_type == 'all': # converter.post_training_quantize = True # True flag is used to convert float model # converter.optimizations = [tf.lite.Optimize.DEFAULT] # just quantisize weight to 8bit converter.inference_type = tf.uint8 converter.quantized_input_stats = {input_arrays[0]: (0.0, 1.0)} if enable_dummy_quant: converter.default_ranges_stats = (0, 6) tflite_model = converter.convert() tflite_output_path = os.path.join(save_path, output_tflite_file) open(tflite_output_path, "wb").write(tflite_model) def save_to_graphpb_and_freeze(sess, output_node_names, model_dir, graph_name="frozen_graph.pb"): """ :param pb_log_dir: :param output_node_names: :param graph_name: :return: """ pb_txt_name = graph_name + ".pbtxt" pb_txt_path = os.path.join(model_dir, pb_txt_name) pb_output_path = os.path.join(model_dir, graph_name) tf.train.write_graph(sess.graph_def, model_dir, pb_txt_name, as_text=True) # Construct a copy for debug saver = tf.train.Saver() save_path_ = os.path.join(model_dir, "model-lite.ckpt") saver.save(sess, save_path_) ckpt_dir = model_dir ckpt_path = tf.train.latest_checkpoint(ckpt_dir) print(ckpt_path) freeze_graph.freeze_graph(input_graph=pb_txt_path, input_saver='', input_binary=False, input_checkpoint=ckpt_path, output_node_names=output_node_names, restore_op_name='save/restore_all', filename_tensor_name='save/Const:0', output_graph=pb_output_path, clear_devices=True, initializer_nodes='') def freeze(sess, model_dir, graph_name="frozen_graph.pb"): pb_output_path = os.path.join(model_dir, graph_name) # Turn all the variables into inline constants inside the graph and save it. frozen_graph_def = graph_util.convert_variables_to_constants( sess, sess.graph_def, ['prediction']) tf.train.write_graph( frozen_graph_def, model_dir, graph_name, as_text=False) tf.logging.info('Saved frozen graph to %s', pb_output_path) def create_inference_graph(wanted_words, sample_rate, clip_duration_ms, clip_stride_ms, window_size_ms, window_stride_ms, dct_coefficient_count, model_architecture, model_size_info, output_node_name): """Creates an audio model with the nodes needed for inference. Uses the supplied arguments to create a model, and inserts the input and output nodes that are needed to use the graph for inference. Args: wanted_words: Comma-separated list of the words we're trying to recognize. sample_rate: How many samples per second are in the input audio files. clip_duration_ms: How many samples to analyze for the audio pattern. clip_stride_ms: How often to run recognition. Useful for models with cache. window_size_ms: Time slice duration to estimate frequencies from. window_stride_ms: How far apart time slices should be. dct_coefficient_count: Number of frequency bands to analyze. model_architecture: Name of the kind of model to generate. """ words_list = input_data.prepare_words_list(wanted_words.split(',')) model_settings = models.prepare_model_settings( len(words_list), sample_rate, clip_duration_ms, window_size_ms, window_stride_ms, dct_coefficient_count) runtime_settings = {'clip_stride_ms': clip_stride_ms} wav_data_placeholder = tf.placeholder(tf.string, [], name='wav_data') decoded_sample_data = contrib_audio.decode_wav( wav_data_placeholder, desired_channels=1, desired_samples=model_settings['desired_samples'], name='decoded_sample_data') spectrogram = contrib_audio.audio_spectrogram( decoded_sample_data.audio, window_size=model_settings['window_size_samples'], stride=model_settings['window_stride_samples'], magnitude_squared=True) fingerprint_input = contrib_audio.mfcc( spectrogram, decoded_sample_data.sample_rate, dct_coefficient_count=dct_coefficient_count) fingerprint_frequency_size = model_settings['dct_coefficient_count'] fingerprint_time_size = model_settings['spectrogram_length'] reshaped_input = tf.reshape(fingerprint_input, [ -1, fingerprint_time_size * fingerprint_frequency_size ]) logits = models.create_model( reshaped_input, model_settings, model_architecture, model_size_info, is_training=False, runtime_settings=runtime_settings) # Create an output to use for inference. tf.nn.softmax(logits, name=output_node_name) def create_inference_graph_no_mfcc(FLAGS): # We want to see all the logging messages for this tutorial. tf.logging.set_verbosity(tf.logging.INFO) # Begin by making sure we have the training data we need. If you already have # training data of your own, use `--data_url= ` on the command line to avoid # downloading. model_settings = models.prepare_model_settings( len(input_data.prepare_words_list(FLAGS.wanted_words.split(','))), FLAGS.sample_rate, FLAGS.clip_duration_ms, FLAGS.window_size_ms, FLAGS.window_stride_ms, FLAGS.dct_coefficient_count) # audio_processor = input_data.AudioProcessor( # FLAGS.data_url, FLAGS.data_dir, FLAGS.silence_percentage, # FLAGS.unknown_percentage, # FLAGS.wanted_words.split(','), FLAGS.validation_percentage, # FLAGS.testing_percentage, model_settings) fingerprint_size = model_settings['fingerprint_size'] # label_count = model_settings['label_count'] # time_shift_samples = int((FLAGS.time_shift_ms * FLAGS.sample_rate) / 1000) # Figure out the learning rates for each training phase. Since it's often # effective to have high learning rates at the start of training, followed by # lower levels towards the end, the number of steps and learning rates can be # specified as comma-separated lists to define the rate at each stage. For # example --how_many_training_steps=10000,3000 --learning_rate=0.001,0.0001 # will run 13,000 training loops in total, with a rate of 0.001 for the first # 10,000, and 0.0001 for the final 3,000. fingerprint_input = tf.placeholder( tf.float32, [None, fingerprint_size], name='fingerprint_input') logits = models.create_model( fingerprint_input, model_settings, FLAGS.model_architecture, FLAGS.model_size_info, is_training=False) print(logits) tf.nn.softmax(logits, name="prediction") def convert(sess, model_dir, FLAGS, inference_type="float32", output_node_name="logits/activation", enable_dummy_quant=False, triplet="conv-bn-relu", ): if FLAGS.embed_mfcc == True: create_inference_graph(FLAGS.wanted_words, FLAGS.sample_rate, FLAGS.clip_duration_ms, FLAGS.clip_stride_ms, FLAGS.window_size_ms, FLAGS.window_stride_ms, FLAGS.dct_coefficient_count, FLAGS.model_architecture, FLAGS.model_size_info, output_node_name) else: create_inference_graph_no_mfcc(FLAGS) models.load_variables_from_checkpoint(sess, FLAGS.checkpoint) tf.logging.info("Freezing Graph...") # 每次freeze后运行test_pb都会在不同的层报错说变量没有初始化,估计是变量没有顺利保存下来,可是tflite能用就很奇怪 save_to_graphpb_and_freeze(sess, model_dir=model_dir, output_node_names=output_node_name) # freeze(sess, FLAGS.model_dir) tf.logging.info("Freezing complete!") if FLAGS.embed_mfcc == False: ENABLE_DUMMY_QUANT = enable_dummy_quant if not os.path.exists(model_dir): os.makedirs(model_dir) tf.logging.info("Converting to TFLite Models...") convert_frozen_graph_to_tflite(input_array='fingerprint_input', output_array=output_node_name, save_path=model_dir, quantisize_type=FLAGS.quantize_type, enable_dummy_quant=ENABLE_DUMMY_QUANT) tf.logging.info("Convert complete! Quantize type: " + str(FLAGS.quantize_type)) tf.logging.info("Complete!") def main(_): sess = tf.InteractiveSession() convert(sess, model_dir=FLAGS.model_dir, output_node_name="prediction", FLAGS=FLAGS) # convert2(sess, FLAGS.model_dir, 'uint8', output_node_name='prediction') if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument( '--sample_rate', type=int, default=16000, help='Expected sample rate of the wavs',) parser.add_argument( '--clip_duration_ms', type=int, default=1000, help='Expected duration in milliseconds of the wavs',) parser.add_argument( '--clip_stride_ms', type=int, default=30, help='How often to run recognition. Useful for models with cache.',) parser.add_argument( '--window_size_ms', type=float, default=30.0, help='How long each spectrogram timeslice is',) parser.add_argument( '--window_stride_ms', type=float, default=10.0, help='How long the stride is between spectrogram timeslices',) parser.add_argument( '--dct_coefficient_count', type=int, default=40, help='How many bins to use for the MFCC fingerprint',) parser.add_argument( '--model_dir', type=str, default='', help='The directory where the model is saved.') parser.add_argument( '--checkpoint', type=str, default='', help='If specified, restore this pretrained model before any training.') parser.add_argument( '--model_architecture', type=str, default='dnn', help='What model architecture to use') parser.add_argument( '--model_size_info', type=int, nargs="+", default=[128,128,128], help='Model dimensions - different for various models') parser.add_argument( '--wanted_words', type=str, default='yes,no,up,down,left,right,on,off,stop,go', help='Words to use (others will be added to an unknown label)',) parser.add_argument( '--output_file', type=str, help='Where to save the frozen graph.') parser.add_argument( '--embed_mfcc', default=False, dest='embed_mfcc', action='store_true', help='Embed mfcc module into graph, and the input will change.') parser.add_argument( '--quantize_type', type=str, default=None, help='Quantize weight or all operations. Type: weight | all') FLAGS, unparsed = parser.parse_known_args() tf.app.run(main=main, argv=[sys.argv[0]] + unparsed)
dearbornlavern/botmetrics
app/services/post_message_to_facebook_service.rb
<reponame>dearbornlavern/botmetrics<gh_stars>10-100 # frozen_string_literal: true class PostMessageToFacebookService def initialize(message, token) @message = message @token = token end def call facebook_client.call('me/messages', 'POST', options) end private attr_reader :message, :token def facebook_client @_facebook_client ||= Facebook.new(token) end def options { recipient: { id: message.user }, message: { text: message.text } } end end
PavlidisLab/Gemma
gemma-core/src/test/java/ubic/gemma/model/expression/experiment/ExpressionExperimentServiceTest.java
/* * The Gemma project * * Copyright (c) 2006 University of British Columbia * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package ubic.gemma.model.expression.experiment; import org.apache.commons.lang3.RandomStringUtils; import org.junit.Before; import org.junit.Test; import org.springframework.beans.factory.annotation.Autowired; import ubic.gemma.core.util.test.BaseSpringContextTest; import ubic.gemma.model.common.auditAndSecurity.Contact; import ubic.gemma.model.common.description.DatabaseEntry; import ubic.gemma.model.common.description.ExternalDatabase; import ubic.gemma.model.common.quantitationtype.QuantitationType; import ubic.gemma.model.expression.arrayDesign.ArrayDesign; import ubic.gemma.model.expression.arrayDesign.ArrayDesignValueObject; import ubic.gemma.model.expression.bioAssayData.DesignElementDataVector; import ubic.gemma.model.expression.bioAssayData.RawExpressionDataVector; import ubic.gemma.model.expression.designElement.CompositeSequence; import ubic.gemma.model.genome.Taxon; import ubic.gemma.persistence.service.expression.bioAssayData.RawExpressionDataVectorService; import ubic.gemma.persistence.service.expression.experiment.ExpressionExperimentService; import java.util.Collection; import java.util.HashSet; import java.util.Map; import static org.junit.Assert.*; /** * @author kkeshav * @author pavlidis */ public class ExpressionExperimentServiceTest extends BaseSpringContextTest { private static final String EE_NAME = RandomStringUtils.randomAlphanumeric( 20 ); @Autowired private ExpressionExperimentService expressionExperimentService; @Autowired private RawExpressionDataVectorService rawExpressionDataVectorService; private ExpressionExperiment ee = null; private ExternalDatabase ed; private String accession; private boolean persisted = false; @Before public void setup() { if ( !persisted ) { ee = this.getTestPersistentCompleteExpressionExperiment( false ); ee.setName( ExpressionExperimentServiceTest.EE_NAME ); DatabaseEntry accessionEntry = this.getTestPersistentDatabaseEntry(); accession = accessionEntry.getAccession(); ed = accessionEntry.getExternalDatabase(); ee.setAccession( accessionEntry ); Contact c = this.getTestPersistentContact(); ee.setOwner( c ); expressionExperimentService.update( ee ); ee = expressionExperimentService.thaw( ee ); persisted = true; } else { log.debug( "Skipping making new ee for test" ); } } @Test public final void testFindByAccession() { DatabaseEntry accessionEntry = DatabaseEntry.Factory.newInstance( ed ); accessionEntry.setAccession( accession ); Collection<ExpressionExperiment> expressionExperiment = expressionExperimentService .findByAccession( accessionEntry ); assertTrue( expressionExperiment.size() > 0 ); } @Test public void testFindByFactor() { ExperimentalDesign design = ee.getExperimentalDesign(); assertNotNull( design.getExperimentalFactors() ); ExperimentalFactor ef = design.getExperimentalFactors().iterator().next(); assertNotNull( ef ); ExpressionExperiment eeFound = expressionExperimentService.findByFactor( ef ); assertNotNull( eeFound ); assertEquals( eeFound.getId(), ee.getId() ); } @Test public void testFindByFactorValue() { ExperimentalDesign design = ee.getExperimentalDesign(); assertNotNull( design.getExperimentalFactors() ); ExperimentalFactor ef = design.getExperimentalFactors().iterator().next(); FactorValue fv = ef.getFactorValues().iterator().next(); ExpressionExperiment eeFound = expressionExperimentService.findByFactorValue( fv ); assertNotNull( eeFound ); assertEquals( eeFound.getId(), ee.getId() ); } @Test public void testFindByFactorValueId() { ExperimentalDesign design = ee.getExperimentalDesign(); assertNotNull( design.getExperimentalFactors() ); ExperimentalFactor ef = design.getExperimentalFactors().iterator().next(); FactorValue fv = ef.getFactorValues().iterator().next(); assertNotNull( fv.getId() ); ExpressionExperiment eeFound = expressionExperimentService.findByFactorValue( fv.getId() ); assertNotNull( eeFound ); assertEquals( eeFound.getId(), ee.getId() ); } @Test public void testLoadAllValueObjects() { Collection<ExpressionExperimentValueObject> vos = expressionExperimentService.loadAllValueObjects(); assertNotNull( vos ); assertTrue(vos.size() > 0); } @Test public void testGetByTaxon() { ExpressionExperimentService eeService = this.getBean( ExpressionExperimentService.class ); Taxon taxon = taxonService.findByCommonName( "mouse" ); Collection<ExpressionExperiment> list = expressionExperimentService.findByTaxon( taxon ); assertNotNull( list ); Taxon checkTaxon = eeService.getTaxon( list.iterator().next() ); assertEquals( taxon, checkTaxon ); } @Test public final void testGetDesignElementDataVectorsByQt() { QuantitationType quantitationType = ee.getRawExpressionDataVectors().iterator().next().getQuantitationType(); Collection<QuantitationType> quantitationTypes = new HashSet<>(); quantitationTypes.add( quantitationType ); Collection<RawExpressionDataVector> vectors = rawExpressionDataVectorService.find( quantitationTypes ); assertEquals( 12, vectors.size() ); } @Test public final void testGetPerTaxonCount() { Map<Taxon, Long> counts = expressionExperimentService.getPerTaxonCount(); long oldCount = counts.get( taxonService.findByCommonName( "mouse" ) ); assertNotNull( counts ); expressionExperimentService.remove( ee ); counts = expressionExperimentService.getPerTaxonCount(); assertEquals( oldCount - 1, counts.get( taxonService.findByCommonName( "mouse" ) ).longValue() ); } @Test public final void testGetQuantitationTypes() { Collection<QuantitationType> types = expressionExperimentService.getQuantitationTypes( ee ); assertEquals( 2, types.size() ); } @Test public final void testGetQuantitationTypesForArrayDesign() { ArrayDesign ad = ee.getRawExpressionDataVectors().iterator().next().getDesignElement().getArrayDesign(); Collection<QuantitationType> types = expressionExperimentService.getQuantitationTypes( ee, ad ); assertEquals( 2, types.size() ); } @Test public final void testGetRawExpressionDataVectors() { ExpressionExperiment eel = this.getTestPersistentCompleteExpressionExperiment( false ); Collection<CompositeSequence> designElements = new HashSet<>(); QuantitationType quantitationType = eel.getRawExpressionDataVectors().iterator().next().getQuantitationType(); Collection<RawExpressionDataVector> allv = eel.getRawExpressionDataVectors(); assertNotNull( quantitationType ); assertTrue( allv.size() > 1 ); for ( RawExpressionDataVector anAllv : allv ) { CompositeSequence designElement = anAllv.getDesignElement(); assertNotNull( designElement ); designElements.add( designElement ); if ( designElements.size() == 2 ) break; } assertEquals( 2, designElements.size() ); Collection<? extends DesignElementDataVector> vectors = rawExpressionDataVectorService .find( designElements, quantitationType ); assertEquals( 2, vectors.size() ); } @Test public final void testLoadValueObjects() { Collection<Long> ids = new HashSet<>(); Long id = ee.getId(); ids.add( id ); Collection<ExpressionExperimentValueObject> list = expressionExperimentService.loadValueObjects( ids, false ); assertNotNull( list ); assertEquals( 1, list.size() ); } }
BME-MIT-IET/iet-hf2021-csapat1234
server/src/main/java/io/github/basilapi/basil/server/BasilCli.java
<gh_stars>10-100 /* * Copyright (c) 2021. <NAME> and <NAME> * * MLicensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.github.basilapi.basil.server; import java.io.PrintStream; import org.apache.commons.cli.BasicParser; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; public class BasilCli { private int port = 8080; private String[] args; private static PrintStream O = System.out; private static PrintStream E = System.err; public int getPort() { return port; } private Options options = new Options(); public BasilCli(String[] args) { this.args = args; options.addOption("h", "help", false, "Show this help."); options.addOption("p", "port", true, "Set the port the server will listen to (defaults to 8080)."); } /** * Prints help. */ private void help() { String syntax = "java [java-opts] -jar [jarfile] "; new HelpFormatter().printHelp(syntax, options); System.exit(0); } /** * Parses command line arguments and acts upon them. */ public void parse() { CommandLineParser parser = new BasicParser(); CommandLine cmd = null; try { cmd = parser.parse(options, args); if (cmd.hasOption('h')) help(); if (cmd.hasOption('p')) { port = Integer.parseInt(cmd.getOptionValue('p')); if (port < 0 && port > 65535) { O.println("Invalid port number " + port + ". Must be in the range [0,65535]."); System.exit(100); } } } catch (ParseException e) { E.println("Failed to parse comand line properties"); e.printStackTrace(); help(); } } }
AarushiSingh09/libmodulemd
modulemd/v1/tests/test-modulemd-component.c
/* * This file is part of libmodulemd * Copyright (C) 2017-2018 <NAME> * * Fedora-License-Identifier: MIT * SPDX-2.0-License-Identifier: MIT * SPDX-3.0-License-Identifier: MIT * * This program is free software. * For more information on the license, see COPYING. * For more information on free software, see <https://www.gnu.org/philosophy/free-sw.en.html>. */ #define MMD_DISABLE_DEPRECATION_WARNINGS 1 #include "modulemd.h" #include "modulemd-component.h" #include <glib.h> #include <locale.h> typedef struct _ComponentFixture { ModulemdComponent *component; } ComponentFixture; static void modulemd_component_set_up (ComponentFixture *fixture, gconstpointer user_data) { fixture->component = modulemd_component_new (); } static void modulemd_component_tear_down (ComponentFixture *fixture, gconstpointer user_data) { g_object_unref (fixture->component); } static void modulemd_component_test_create (ComponentFixture *fixture, gconstpointer user_data) { g_assert_true (MODULEMD_IS_COMPONENT (fixture->component)); } int main (int argc, char *argv[]) { setlocale (LC_ALL, ""); g_test_init (&argc, &argv, NULL); g_test_bug_base ("https://bugzilla.redhat.com/show_bug.cgi?id="); // Define the tests. g_test_add ("/modulemd/component/test_create", ComponentFixture, NULL, modulemd_component_set_up, modulemd_component_test_create, modulemd_component_tear_down); return g_test_run (); }
miw-upm/betca-tpv-core
src/main/java/es/upm/miw/betca_tpv_core/domain/persistence/VoucherPersistence.java
package es.upm.miw.betca_tpv_core.domain.persistence; import es.upm.miw.betca_tpv_core.domain.model.Voucher; import org.springframework.stereotype.Repository; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; import java.time.LocalDateTime; @Repository public interface VoucherPersistence { Mono<Voucher> create(Voucher voucher); Flux< Voucher > readAll(); Mono<Voucher> readByReference(String reference); Mono<Voucher> consume(String reference); Flux<Voucher> getUnconsumedVouchersBetweenDates(LocalDateTime from, LocalDateTime to); }
difosschan/bamboo
libs/protocol/protocolif.cpp
#include "bamboo/protocol/protocolif.hpp" namespace bamboo { namespace protocol { ProtocolIf::ProtocolIf() {} ProtocolIf::~ProtocolIf() {} } }
jimstack/super-csv-annotation
src/test/java/com/github/mygreen/supercsv/builder/spring/UserListener.java
<reponame>jimstack/super-csv-annotation package com.github.mygreen.supercsv.builder.spring; import java.net.URL; import java.util.Map; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import com.github.mygreen.supercsv.annotation.CsvPostRead; import com.github.mygreen.supercsv.annotation.CsvPreWrite; import com.github.mygreen.supercsv.validation.CsvBindingErrors; import com.github.mygreen.supercsv.validation.CsvField; import com.github.mygreen.supercsv.validation.CsvFieldValidator; import com.github.mygreen.supercsv.validation.ValidationContext; /** * {@link UserCsv}に対するリスナクラス * * @since 2.0 * @author T.TSUCHIE * */ @Component public class UserListener { @Autowired private UserService userSerivce; @CsvPreWrite @CsvPostRead public void validate(final UserCsv record, final ValidationContext<UserCsv> validationContext, final CsvBindingErrors bindingErrors) { final CsvField<URL> homepageField = new CsvField<>(validationContext, record, "homepage"); homepageField.add(new CsvFieldValidator<URL>() { @Override public void validate(final CsvBindingErrors bindingErrors, final CsvField<URL> field) { if(field.isEmpty()) { return; } if(!userSerivce.isValidProtocol(field.getValue())) { Map<String, Object> vars = createMessageVariables(field); vars.put("protocol", field.getValue().getProtocol()); bindingErrors.rejectValue(field.getName(), field.getType(), "fieldError.homepage.supportedProtocol", vars); } } }) .validate(bindingErrors); } }
dsteelma-umd/avalon
app/javascript/components/collections/CollectionsFilterNoResults.js
<reponame>dsteelma-umd/avalon<gh_stars>10-100 /* * Copyright 2011-2020, The Trustees of Indiana University and Northwestern * University. Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed * under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. * --- END LICENSE_HEADER BLOCK --- */ import React from 'react'; const CollectionsFilterNoResults = () => ( <p className="alert alert-info text-left"> <i className="glyphicon glyphicon-exclamation-sign"></i> No results returned for your search </p> ); export default CollectionsFilterNoResults;
Sorgas/Tearfall
core/src/stonering/generators/worldgen/generators/elevation/ValleyGenerator.java
<reponame>Sorgas/Tearfall package stonering.generators.worldgen.generators.elevation; import com.badlogic.gdx.math.Vector2; import stonering.generators.worldgen.WorldGenConfig; import stonering.generators.worldgen.WorldGenContainer; import stonering.generators.worldgen.generators.WorldGenerator; import stonering.entity.world.Edge; import stonering.entity.world.Mountain; import stonering.util.geometry.Position; import stonering.util.geometry.Vector; import java.util.Iterator; import java.util.List; import java.util.Random; /** * @author <NAME> on 03.03.2017. * <p> * Generates deep cavities for oceans and seas. valleys are generated where plates move away from another */ public class ValleyGenerator extends WorldGenerator { private Random random; private List<Edge> edges; private float plateSpeedToDepthModifier; private float topOffsetModifier; private int topsDensity; private float worldBorderDepth; @Override public void set(WorldGenContainer container) { random = container.random; plateSpeedToDepthModifier = config.plateSpeedToDepthModifier; topOffsetModifier = config.topOffsetModifier; topsDensity = config.valleysTopsDensity; worldBorderDepth = config.worldBorderDepth; // edges = container.getEdges(); } @Override public void run() { System.out.println("generating valleys"); for (Edge edge : edges) { edge.getValleys().clear(); configureEdge(edge); createValleyDepths(edge); applyOffsetVectors(edge); createValley(edge); } } private void createTops(Edge edge) { Position pos1 = edge.getPoint1().clone(); Position pos2 = edge.getPoint2().clone(); int length = (int) Math.round(Math.sqrt(Math.pow(pos1.x - pos2.x, 2) + Math.pow(pos1.y - pos2.y, 2))); float extentX = (pos1.x - pos2.x) * 0.5f; float extentY = (pos1.y - pos2.y) * 0.5f; pos1.x = (int) (pos1.x + extentX); pos1.y = (int) (pos1.y + extentY); pos2.x = (int) (pos2.x - extentX); pos2.y = (int) (pos2.y - extentY); int num = length / topsDensity; if (num > 0) { float xDensity = (pos2.x - pos1.x) / (float) num; float yDensity = (pos2.y - pos1.y) / (float) num; int xStart = pos1.x; int yStart = pos1.y; float x = xStart + xDensity / 2; float y = yStart + yDensity / 2; for (int i = 0; i < num; i++) { Mountain valley = new Mountain(); valley.setTop(new Position(Math.round(x), Math.round(y), 0)); edge.addValley(valley); x += xDensity; y += yDensity; } } } private void configureEdge(Edge edge) { if (edge.isWorldBorder()) { createTops(edge); float maxDepth = worldBorderDepth; maxDepth *= plateSpeedToDepthModifier; edge.setPikeHeight(maxDepth); } else { if (edge.getDynamics().get(0) < 0 && edge.getDynamics().get(1) < 0) { //if both plates move from the edge createTops(edge); int speed1 = Math.min(edge.getDynamics().get(0), edge.getDynamics().get(1)); int speed2 = Math.max(edge.getDynamics().get(0), edge.getDynamics().get(1)); int maxDepth = speed1 + speed2; maxDepth *= plateSpeedToDepthModifier; edge.setPikeHeight(maxDepth); int topOffset = speed1; topOffset -= speed2; Vector edgeVector = new Vector(edge.getPoint1().x, edge.getPoint1().y, edge.getPoint2().x, edge.getPoint2().y); edge.setOffsetVector(new Vector2((float) (edgeVector.getAngle() + 180 * Math.signum(speed1 - speed2) - edgeVector.getX()), topOffset - edgeVector.getY())); } } } private void createValleyDepths(Edge edge) { List<Mountain> valleys = edge.getValleys(); for (int i = 0; i < valleys.size(); i++) { double radians = Math.PI * i / edge.getValleys().size(); valleys.get(i).getTop().z = (int) Math.round((edge.getPikeHeight() + 8) * Math.sin(radians) - random.nextInt(3) - 11); } } private void applyOffsetVectors(Edge edge) { // Vector2 edgeOffsetVector = edge.getOffsetVector(); // if (edgeOffsetVector != null) { // Position endPoint = edgeOffsetVector.getEndPoint(); // for (int i = 0; i < edge.getValleys().size(); i++) { // Mountain valley = edge.getValleys().get(i); // int xRand = random.nextInt(2) - 1; // int yRand = random.nextInt(2) - 1; // Vector topOffsetVector = new Vector(edgeOffsetVector.getStartPoint().x, edgeOffsetVector.getStartPoint().y, endPoint.x + xRand, endPoint.y + yRand); // if (Double.isNaN(topOffsetVector.getAngle())) { // topOffsetVector.setAngle(edgeOffsetVector.getAngle()); // } // double radians = Math.PI * i / edge.getValleys().size(); // topOffsetVector.setLength((topOffsetVector.getLength() * Math.sin(radians) * topOffsetModifier)); // valley.setTop(valley.getTop().add(topOffsetVector)); // } // } } private void createValley(Edge edge) { float radiusModifier = 1; if (edge.isWorldBorder()) { radiusModifier = 3; } List<Mountain> valleys = edge.getValleys(); for (Iterator<Mountain> iterator = valleys.iterator(); iterator.hasNext(); ) { Mountain valley = iterator.next(); int slopeCount = random.nextInt(2) + 6; int[] slopeAngles = new int[slopeCount]; int spinAngle = random.nextInt(360); for (int i = 0; i < slopeCount; i++) { slopeAngles[i] = random.nextInt(30) - 15 + 360 / slopeCount * i; slopeAngles[i] += spinAngle; slopeAngles[i] %= 360; } for (int i = 0; i < slopeCount; i++) { int radius = valley.getTop().z * 2; int offset = radius / 2 > 0 ? random.nextInt(radius / 2) : 1; Vector vector = new Vector(valley.getTop().x, valley.getTop().y, (float) slopeAngles[i], radius * radiusModifier + offset); valley.addCorner(vector.getEndPoint()); } } } }
yaoxuanw007/forfun
leetcode/python/populatingNextRightPointersInEachNodeII.py
# https://oj.leetcode.com/problems/populating-next-right-pointers-in-each-node-ii/ # Definition for a binary tree node class TreeNode: def __init__(self, x): self.val = x self.left = None self.right = None self.next = None class Solution: # @param root, a tree node # @return nothing def connect(self, root): if root == None: return queue = [root] while len(queue) > 0: num = len(queue) last = None while num > 0: curr = queue.pop(0) if last != None: last.next = curr last = curr if curr.left != None: queue.append(curr.left) if curr.right != None: queue.append(curr.right) num -= 1
guidotack/gecode
gecode/third-party/boost/config/stdlib/libstdcpp3.hpp
// (C) Copyright <NAME> 2001. // (C) Copyright <NAME> 2001. // Use, modification and distribution are subject to the // Boost Software License, Version 1.0. (See accompanying file // LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) // See http://www.boost.org for most recent version. // config for libstdc++ v3 // not much to go in here: #define GECODE_BOOST_GNU_STDLIB 1 #ifdef __GLIBCXX__ #define GECODE_BOOST_STDLIB "GNU libstdc++ version " GECODE_BOOST_STRINGIZE(__GLIBCXX__) #else #define GECODE_BOOST_STDLIB "GNU libstdc++ version " GECODE_BOOST_STRINGIZE(__GLIBCPP__) #endif #if !defined(_GLIBCPP_USE_WCHAR_T) && !defined(_GLIBCXX_USE_WCHAR_T) # define GECODE_BOOST_NO_CWCHAR # define GECODE_BOOST_NO_CWCTYPE # define GECODE_BOOST_NO_STD_WSTRING # define GECODE_BOOST_NO_STD_WSTREAMBUF #endif #if defined(__osf__) && !defined(_REENTRANT) \ && ( defined(_GLIBCXX_HAVE_GTHR_DEFAULT) || defined(_GLIBCPP_HAVE_GTHR_DEFAULT) ) // GCC 3 on Tru64 forces the definition of _REENTRANT when any std lib header // file is included, therefore for consistency we define it here as well. # define _REENTRANT #endif #ifdef __GLIBCXX__ // gcc 3.4 and greater: # if defined(_GLIBCXX_HAVE_GTHR_DEFAULT) \ || defined(_GLIBCXX__PTHREADS) \ || defined(_GLIBCXX_HAS_GTHREADS) \ || defined(_WIN32) // // If the std lib has thread support turned on, then turn it on in Boost // as well. We do this because some gcc-3.4 std lib headers define _REENTANT // while others do not... // # define GECODE_BOOST_HAS_THREADS # else # define GECODE_BOOST_DISABLE_THREADS # endif #elif defined(__GLIBCPP__) \ && !defined(_GLIBCPP_HAVE_GTHR_DEFAULT) \ && !defined(_GLIBCPP__PTHREADS) // disable thread support if the std lib was built single threaded: # define GECODE_BOOST_DISABLE_THREADS #endif #if (defined(linux) || defined(__linux) || defined(__linux__)) && defined(__arm__) && defined(_GLIBCPP_HAVE_GTHR_DEFAULT) // linux on arm apparently doesn't define _REENTRANT // so just turn on threading support whenever the std lib is thread safe: # define GECODE_BOOST_HAS_THREADS #endif #if !defined(_GLIBCPP_USE_LONG_LONG) \ && !defined(_GLIBCXX_USE_LONG_LONG)\ && defined(GECODE_BOOST_HAS_LONG_LONG) // May have been set by compiler/*.hpp, but "long long" without library // support is useless. # undef GECODE_BOOST_HAS_LONG_LONG #endif // Apple doesn't seem to reliably defined a *unix* macro #if !defined(CYGWIN) && ( defined(__unix__) \ || defined(__unix) \ || defined(unix) \ || defined(__APPLE__) \ || defined(__APPLE) \ || defined(APPLE)) # include <unistd.h> #endif #if defined(__GLIBCXX__) || (defined(__GLIBCPP__) && __GLIBCPP__>=20020514) // GCC >= 3.1.0 # define GECODE_BOOST_STD_EXTENSION_NAMESPACE __gnu_cxx # define GECODE_BOOST_HAS_SLIST # define GECODE_BOOST_HAS_HASH # define GECODE_BOOST_SLIST_HEADER <ext/slist> # if !defined(__GNUC__) || __GNUC__ < 4 || (__GNUC__ == 4 && __GNUC_MINOR__ < 3) # define GECODE_BOOST_HASH_SET_HEADER <ext/hash_set> # define GECODE_BOOST_HASH_MAP_HEADER <ext/hash_map> # else # define GECODE_BOOST_HASH_SET_HEADER <backward/hash_set> # define GECODE_BOOST_HASH_MAP_HEADER <backward/hash_map> # endif #endif // stdlibc++ C++0x support is detected via __GNUC__, __GNUC_MINOR__, and possibly // __GNUC_PATCHLEVEL__ at the suggestion of <NAME>, one of the stdlibc++ // developers. He also commented: // // "I'm not sure how useful __GLIBCXX__ is for your purposes, for instance in // GCC 4.2.4 it is set to 20080519 but in GCC 4.3.0 it is set to 20080305. // Although 4.3.0 was released earlier than 4.2.4, it has better C++0x support // than any release in the 4.2 series." // // Another resource for understanding stdlibc++ features is: // http://gcc.gnu.org/onlinedocs/libstdc++/manual/status.html#manual.intro.status.standard.200x // C++0x headers in GCC 4.3.0 and later // #if __GNUC__ < 4 || (__GNUC__ == 4 && __GNUC_MINOR__ < 3) || !defined(__GXX_EXPERIMENTAL_CXX0X__) # define GECODE_BOOST_NO_0X_HDR_ARRAY # define GECODE_BOOST_NO_0X_HDR_REGEX # define GECODE_BOOST_NO_0X_HDR_TUPLE # define GECODE_BOOST_NO_STD_UNORDERED // deprecated; see following # define GECODE_BOOST_NO_0X_HDR_UNORDERED_MAP # define GECODE_BOOST_NO_0X_HDR_UNORDERED_SET #endif // C++0x headers in GCC 4.4.0 and later // #if __GNUC__ < 4 || (__GNUC__ == 4 && __GNUC_MINOR__ < 4) || !defined(__GXX_EXPERIMENTAL_CXX0X__) # define GECODE_BOOST_NO_0X_HDR_CHRONO # define GECODE_BOOST_NO_0X_HDR_CONDITION_VARIABLE # define GECODE_BOOST_NO_0X_HDR_FORWARD_LIST # define GECODE_BOOST_NO_0X_HDR_INITIALIZER_LIST # define GECODE_BOOST_NO_0X_HDR_MUTEX # define GECODE_BOOST_NO_0X_HDR_RATIO # define GECODE_BOOST_NO_0X_HDR_SYSTEM_ERROR #else # define GECODE_BOOST_HAS_TR1_COMPLEX_INVERSE_TRIG # define GECODE_BOOST_HAS_TR1_COMPLEX_OVERLOADS #endif #if (!defined(_GLIBCXX_HAS_GTHREADS) || !defined(_GLIBCXX_USE_C99_STDINT_TR1)) && (!defined(GECODE_BOOST_NO_0X_HDR_CONDITION_VARIABLE) || !defined(GECODE_BOOST_NO_0X_HDR_MUTEX)) # define GECODE_BOOST_NO_0X_HDR_CONDITION_VARIABLE # define GECODE_BOOST_NO_0X_HDR_MUTEX #endif // C++0x features in GCC 4.5.0 and later // #if __GNUC__ < 4 || (__GNUC__ == 4 && __GNUC_MINOR__ < 5) || !defined(__GXX_EXPERIMENTAL_CXX0X__) # define GECODE_BOOST_NO_NUMERIC_LIMITS_LOWEST # define GECODE_BOOST_NO_0X_HDR_FUTURE # define GECODE_BOOST_NO_0X_HDR_RANDOM #endif // C++0x features in GCC 4.5.0 and later // #if __GNUC__ < 4 || (__GNUC__ == 4 && __GNUC_MINOR__ < 6) || !defined(__GXX_EXPERIMENTAL_CXX0X__) # define GECODE_BOOST_NO_0X_HDR_TYPEINDEX #endif // C++0x headers not yet (fully!) implemented // # define GECODE_BOOST_NO_0X_HDR_THREAD # define GECODE_BOOST_NO_0X_HDR_TYPE_TRAITS # define GECODE_BOOST_NO_0X_HDR_CODECVT // --- end ---
psingarakannan/home-acc-mngmt
acc-mngmt/acc-mngmt-service/src/main/java/org/pradeep/acc/mngmt/entities/Account.java
<reponame>psingarakannan/home-acc-mngmt package org.pradeep.acc.mngmt.entities; import lombok.Getter; import lombok.Setter; import org.pradeep.platform.enums.AccountCategory; import org.pradeep.platform.enums.AccountType; import org.pradeep.platform.enums.TxnType; import org.pradeep.platform.hibernate.AuditedEntity; import javax.persistence.*; /** * @author psingarakannan on 28/12/18 **/ @Entity @Table(name="account") public class Account extends AuditedEntity { private static final long serialVersionUID = 1L; @Getter @Setter @GeneratedValue(strategy = GenerationType.IDENTITY) @Id @Column(name="id") private Long id; @Getter @Setter @Column(name="description") private String description; @Getter @Setter @Column(name="name") @Enumerated(EnumType.STRING) private AccountCategory name; @Column(name="type") @Enumerated(EnumType.STRING) @Getter @Setter private AccountType accountType; @Getter @Setter @Column(name="balance") private Long balance; }
jasnow/mira00
app/helpers/datapackage_helper.rb
<reponame>jasnow/mira00 module DatapackageHelper end
romualdo-bar/barbacoa-server-lib
src/mt_server.cpp
<reponame>romualdo-bar/barbacoa-server-lib<gh_stars>1-10 #include <server_lib/mt_server.h> #include <server_lib/logging_helper.h> #include <server_lib/asserts.h> #include <server_lib/emergency_helper.h> #include <server_lib/platform_config.h> #include <mutex> #include <condition_variable> #include <thread> #include <cstring> #include <server_clib/server.h> #if defined(SERVER_LIB_PLATFORM_LINUX) #include <sys/syscall.h> #include <signal.h> #include <pthread.h> #include <alloca.h> #endif namespace server_lib { namespace impl { using sig_callback_ext_type = std::function<void(int)>; static sig_callback_ext_type g_sig_callback = nullptr; void sig_callback_wrapper(int signo) { if (g_sig_callback) g_sig_callback(signo); } struct callback_data { using user_signal = mt_server::user_signal; union { int signo = 0; user_signal signal; } data; enum class data_type { null, exit, usersignal }; data_type type = data_type::null; static callback_data make_exit_data(const int signo); static callback_data make_config_data(const user_signal); }; callback_data callback_data::make_exit_data(const int signo) { callback_data r; r.type = data_type::exit; r.data.signo = signo; return r; } callback_data callback_data::make_config_data(const user_signal sig) { callback_data r; r.type = data_type::usersignal; r.data.signal = sig; return r; } } // namespace impl class mt_server_impl { public: static char crash_dump_file_path[PATH_MAX]; mt_server_impl() = default; void init(bool daemon) { SRV_ASSERT(event_loop::is_main_thread(), "Only for main thread allowed"); impl::g_sig_callback = [this](int signo) { this->process_signal(signo); }; server_init_default_signals_should_register(); if (daemon) { server_mt_init_daemon(nullptr, impl::sig_callback_wrapper); } else { server_mt_init(nullptr, impl::sig_callback_wrapper); } } using exit_callback_type = mt_server::exit_callback_type; using fail_callback_type = mt_server::fail_callback_type; using control_callback_type = mt_server::control_callback_type; int run(main_loop& e, exit_callback_type exit_callback, fail_callback_type fail_callback, control_callback_type control_callback); void start(main_loop& e, exit_callback_type exit_callback, fail_callback_type fail_callback, control_callback_type control_callback); void stop(main_loop& e) { SRV_ASSERT(e.is_main(), "Only main loop accepted"); e.stop(); } void wait_started(main_loop& e, std::function<void(void)>&& start_notify) { SRV_ASSERT(e.is_main(), "Only main loop accepted"); while (!e.is_running()) { std::unique_lock<std::mutex> lck(_wait_started_condition_lock); _wait_started_condition.wait(lck); } start_notify(); } protected: void run_impl(main_loop& e, exit_callback_type exit_callback, fail_callback_type fail_callback, control_callback_type control_callback); void process_exit() { auto process_exit_ = [this]() { if (_exit_callback && _e) { auto exit_callback = _exit_callback; _exit_callback = nullptr; if (event_loop::is_main_thread()) { exit_callback(); } else { _e->post([exit_callback]() { exit_callback(); }); } } }; if (event_loop::is_main_thread()) process_exit_(); else { std::unique_lock<std::mutex> lck(_process_exit_config_lock); process_exit_(); } } void process_signal(int signal) { #ifndef NDEBUG fprintf(stderr, "Got signal %d\n", signal); #endif if (server_is_fail_signal(signal)) { #if !defined(STACKTRACE_DISABLED) char crash_dump_file_path_[PATH_MAX] = { 0 }; if (crash_dump_file_path[0]) { // copy to alternative stack area std::strncpy(crash_dump_file_path_, crash_dump_file_path, PATH_MAX - 1); } if (crash_dump_file_path_[0]) { emergency_helper::save_dump(crash_dump_file_path_); // it maybe fail here but dump has been saved already this->process_fail(crash_dump_file_path_); } else #endif //!STACKTRACE_DISABLED this->process_fail(nullptr); } else { #if defined(SERVER_LIB_PLATFORM_LINUX) using user_signal = mt_server::user_signal; switch (signal) { case SIGUSR1: _callback_data = impl::callback_data::make_config_data(user_signal::USR1); break; case SIGUSR2: _callback_data = impl::callback_data::make_config_data(user_signal::USR2); break; default: #else { #endif _callback_data = impl::callback_data::make_exit_data(signal); } } } void process_fail(const char* dump_file_path) { auto process_fail_ = [this, dump_file_path]() { if (_fail_callback && _e) { auto fail_callback = _fail_callback; _fail_callback = nullptr; if (event_loop::is_main_thread()) { fail_callback(dump_file_path); } else { // pointer dump_file_path should be stay valid in main thread because was created from alternative stack area _e->post([fail_callback, dump_file_path]() { fail_callback(dump_file_path); }); } } }; if (event_loop::is_main_thread()) process_fail_(); else { std::unique_lock<std::mutex> lck(_process_fail_config_lock); process_fail_(); } } private: impl::callback_data _callback_data; std::thread _wait_synch_signal_thread; std::condition_variable _wait_started_condition; std::mutex _wait_started_condition_lock; main_loop* _e = nullptr; std::mutex _process_exit_config_lock; exit_callback_type _exit_callback = nullptr; std::mutex _process_fail_config_lock; fail_callback_type _fail_callback = nullptr; }; // namespace server_lib int mt_server_impl::run(main_loop& e, exit_callback_type exit_callback, fail_callback_type fail_callback, control_callback_type control_callback) { if (!event_loop::is_main_thread() || !e.is_main()) return exit_code_error; run_impl(e, exit_callback, fail_callback, control_callback); return exit_code_ok; } void mt_server_impl::start(main_loop& e, exit_callback_type exit_callback, fail_callback_type fail_callback, control_callback_type control_callback) { SRV_ASSERT(event_loop::is_main_thread(), "Only for main thread allowed"); SRV_ASSERT(e.is_main(), "Only main loop accepted"); run_impl(e, exit_callback, fail_callback, control_callback); } void mt_server_impl::run_impl(main_loop& e, exit_callback_type exit_callback, fail_callback_type fail_callback, control_callback_type control_callback) { _e = &e; { std::unique_lock<std::mutex> lck(_process_exit_config_lock); if (exit_callback) { _exit_callback = exit_callback; } else { _exit_callback = [this]() { #ifndef NDEBUG fprintf(stderr, "Got signal in default exit callback\n"); #endif stop(*_e); }; } } { std::unique_lock<std::mutex> lck(_process_fail_config_lock); if (fail_callback) { _fail_callback = fail_callback; } else { _fail_callback = [this](const char*) { #ifndef NDEBUG fprintf(stderr, "Got signal in default fail callback\n"); #endif _e->exit(1); }; } } std::thread wait_thread([this, &e, exit_callback, fail_callback, control_callback]() { #if defined(SERVER_LIB_PLATFORM_LINUX) pthread_setname_np(pthread_self(), "signal"); #endif using Callbackdata_type = impl::callback_data::data_type; bool terminated = false; while (!terminated) { server_mt_wait_sig_callback(impl::sig_callback_wrapper); #ifndef NDEBUG fprintf(stderr, "Got signal in signal thread\n"); #endif if (Callbackdata_type::exit == _callback_data.type) { terminated = true; this->process_exit(); } if (Callbackdata_type::usersignal == _callback_data.type && control_callback) { auto data = _callback_data.data.signal; e.post([control_callback, data]() { control_callback(data); }); } } SRV_LOG_TRACE("Signal thread is stopped"); }); _wait_synch_signal_thread.swap(wait_thread); e.set_exit_callback(_exit_callback); e.start([this]() { _wait_started_condition.notify_all(); }); } char mt_server_impl::crash_dump_file_path[PATH_MAX] = { 0 }; mt_server::mt_server() { SRV_ASSERT(event_loop::is_main_thread(), "Only for main thread allowed"); _impl = std::make_unique<mt_server_impl>(); } mt_server::~mt_server() { } void mt_server::init(bool daemon) { _impl->init(daemon); } void mt_server::set_crash_dump_file_name(const char* crash_dump_file_path) { std::strncpy(mt_server_impl::crash_dump_file_path, crash_dump_file_path, PATH_MAX - 1); } void mt_server::start(main_loop& e, exit_callback_type exit_callback, fail_callback_type fail_callback, control_callback_type control_callback) { _impl->start(e, exit_callback, fail_callback, control_callback); } int mt_server::run(main_loop& e, exit_callback_type exit_callback, fail_callback_type fail_callback, control_callback_type control_callback) { return _impl->run(e, exit_callback, fail_callback, control_callback); } void mt_server::stop(main_loop& e) { _impl->stop(e); } void mt_server::wait_started(main_loop& e, std::function<void(void)>&& start_notify) { _impl->wait_started(e, std::forward<std::function<void(void)>>(start_notify)); } } // namespace server_lib
Tavi3h/LeetCode_3rd
src/main/java/pers/tavish/leetcode/hard/RecoverBinarySearchTree.java
package pers.tavish.leetcode.hard; /* You are given the root of a binary search tree (BST), where exactly two nodes of the tree were swapped by mistake. Recover the tree without changing its structure. Follow up: A solution using O(n) space is pretty straight forward. Could you devise a constant space solution? Example 1: Input: root = [1,3,null,null,2] Output: [3,1,null,null,2] Explanation: 3 cannot be a left child of 1 because 3 > 1. Swapping 1 and 3 makes the BST valid. Example 2: Input: root = [3,1,4,null,null,2] Output: [2,1,4,null,null,3] Explanation: 2 cannot be in the right subtree of 3 because 2 < 3. Swapping 2 and 3 makes the BST valid. for more information: https://leetcode.com/problems/recover-binary-search-tree/ */ import pers.tavish.leetcode.struct.TreeNode; public class RecoverBinarySearchTree { private TreeNode pre, first, second; public void recoverTree(TreeNode root) { inOrder(root); if (first != null && second != null) { int tmp = first.val; first.val = second.val; second.val = tmp; } } private void inOrder(TreeNode node) { if (node == null) { return; } inOrder(node.left); if (pre != null && pre.val > node.val) { if (first == null) { first = pre; } second = node; } pre = node; inOrder(node.right); } }
n-dusan/wroom
agent-monolith/src/main/java/xwsagent/wroomagent/repository/VerificationTokenRepository.java
<filename>agent-monolith/src/main/java/xwsagent/wroomagent/repository/VerificationTokenRepository.java<gh_stars>0 package xwsagent.wroomagent.repository; import org.springframework.data.jpa.repository.JpaRepository; import xwsagent.wroomagent.domain.auth.VerificationToken; public interface VerificationTokenRepository extends JpaRepository<VerificationToken, Long> { public VerificationToken findByToken(String token); }
lhlawson/geopm
src/SharedMemoryImp.hpp
/* * Copyright (c) 2015 - 2021, Intel Corporation * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in * the documentation and/or other materials provided with the * distribution. * * * Neither the name of Intel Corporation nor the names of its * contributors may be used to endorse or promote products derived * from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY LOG OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #ifndef SHAREDMEMORYIMP_HPP_INCLUDE #define SHAREDMEMORYIMP_HPP_INCLUDE #include "SharedMemory.hpp" #include <pthread.h> namespace geopm { class SharedMemoryImp : public SharedMemory { public: SharedMemoryImp(); /// @brief Destructor destroys and unlinks the shared memory region. virtual ~SharedMemoryImp(); /// @brief Retrieve a pointer to the shared memory region. /// @return Void pointer to the shared memory region. void *pointer(void) const override; /// @brief Retrieve the key to the shared memory region. /// @return Key to the shared memory region. std::string key(void) const override; size_t size(void) const override; void unlink(void) override; std::unique_ptr<SharedMemoryScopedLock> get_scoped_lock(void) override; /// @brief Takes a key and a size and creates /// an inter-process shared memory region. /// @param [in] shm_key Shared memory key to create the region. /// @param [in] size Size of the region to create. void create_memory_region(const std::string &shm_key, size_t size); /// @brief Takes a key and attempts to attach to a /// inter-process shared memory region. This version of the /// constructor tries to attach multiple times until a timeout /// is reached. /// @param [in] shm_key Shared memory key to attach to the region. /// @param [in] timeout Length in seconds to keep retrying the /// attachment process to a shared memory region. void attach_memory_region(const std::string &shm_key, unsigned int timeout); private: /// @brief Shared memory key for the region. std::string m_shm_key; /// @brief Size of the region. size_t m_size; /// @brief Pointer to the region. void *m_ptr; /// @brief Indicates whether the shared memory is ready for use, /// either from calling create() or attach(). bool m_is_linked; /// @brief Whether to throw if unlink fails. An object created /// through make_unique_owner() may be unlinked in other /// objects' destructors, and should not throw. bool m_do_unlink_check; }; } #endif
xiaohalo/LeetCode
Python/strong-password-checker.py
# Time: O(n) # Space: O(1) # A password is considered strong if below conditions are all met: # # It has at least 6 characters and at most 20 characters. # It must contain at least one lowercase letter, at least one uppercase letter, # and at least one digit. # It must NOT contain three repeating characters in a row ("...aaa..." is weak, # but "...aa...a..." is strong, assuming other conditions are met). # Write a function strongPasswordChecker(s), that takes a string s as input, # and return the MINIMUM change required to make s a strong password. If s is already strong, return 0. # # Insertion, deletion or replace of any one character are all considered as one change. class Solution(object): def strongPasswordChecker(self, s): """ :type s: str :rtype: int """ missing_type_cnt = 3 if any('a' <= c <= 'z' for c in s): missing_type_cnt -= 1 if any('A' <= c <= 'Z' for c in s): missing_type_cnt -= 1 if any(c.isdigit() for c in s): missing_type_cnt -= 1 total_change_cnt = 0 one_change_cnt, two_change_cnt, three_change_cnt = 0, 0, 0 i = 2 while i < len(s): if s[i] == s[i-1] == s[i-2]: length = 2 while i < len(s) and s[i] == s[i-1]: length += 1 i += 1 total_change_cnt += length / 3 if length % 3 == 0: one_change_cnt += 1 elif length % 3 == 1: two_change_cnt += 1 else: three_change_cnt += 1 else: i += 1 if len(s) < 6: return max(missing_type_cnt, 6 - len(s)) elif len(s) <= 20: return max(missing_type_cnt, total_change_cnt) else: delete_cnt = len(s) - 20 total_change_cnt -= min(delete_cnt, one_change_cnt * 1) / 1 total_change_cnt -= min(max(delete_cnt - one_change_cnt, 0), two_change_cnt * 2) / 2 total_change_cnt -= min(max(delete_cnt - one_change_cnt - 2 * two_change_cnt, 0), three_change_cnt * 3) / 3 return delete_cnt + max(missing_type_cnt, total_change_cnt)
Robbbert/messui
src/devices/bus/psi_kbd/psi_kbd.cpp
// license: GPL-2.0+ // copyright-holders: <NAME> /*************************************************************************** Kontron PSI keyboard interface ***************************************************************************/ #include "emu.h" #include "psi_kbd.h" #include "ergoline.h" #include "hle.h" //************************************************************************** // DEVICE DEFINITIONS //************************************************************************** DEFINE_DEVICE_TYPE(PSI_KEYBOARD_INTERFACE, psi_keyboard_bus_device, "psi_kbd", "PSI Keyboard Interface") //************************************************************************** // SLOT DEVICE //************************************************************************** //------------------------------------------------- // psi_keyboard_bus_device - constructor //------------------------------------------------- psi_keyboard_bus_device::psi_keyboard_bus_device(const machine_config &mconfig, const char *tag, device_t *owner, uint32_t clock) : device_t(mconfig, PSI_KEYBOARD_INTERFACE, tag, owner, clock), device_single_card_slot_interface<device_psi_keyboard_interface>(mconfig, *this), m_kbd(nullptr), m_rx_handler(*this), m_key_strobe_handler(*this), m_key_data(0xff) { } //------------------------------------------------- // psi_keyboard_bus_device - destructor //------------------------------------------------- psi_keyboard_bus_device::~psi_keyboard_bus_device() { } //------------------------------------------------- // device_start - device-specific startup //------------------------------------------------- void psi_keyboard_bus_device::device_start() { // get connected keyboard m_kbd = get_card_device(); // resolve callbacks m_rx_handler.resolve_safe(); m_key_strobe_handler.resolve_safe(); } //------------------------------------------------- // device_reset - device-specific reset //------------------------------------------------- void psi_keyboard_bus_device::device_reset() { m_key_data = 0xff; // FIXME: dumb port devices shouldn't mess with data - the keyboard should push this if lines change state on reset } //------------------------------------------------- // host to module interface //------------------------------------------------- WRITE_LINE_MEMBER( psi_keyboard_bus_device::tx_w ) { if (m_kbd) m_kbd->tx_w(state); } //************************************************************************** // KEYBOARD INTERFACE //************************************************************************** //------------------------------------------------- // device_psi_keyboard_interface - constructor //------------------------------------------------- device_psi_keyboard_interface::device_psi_keyboard_interface(const machine_config &mconfig, device_t &device) : device_interface(device, "psikbd") { m_host = dynamic_cast<psi_keyboard_bus_device *>(device.owner()); } //------------------------------------------------- // ~device_psi_keyboard_interface - destructor //------------------------------------------------- device_psi_keyboard_interface::~device_psi_keyboard_interface() { } //************************************************************************** // SLOT INTERFACE //************************************************************************** void psi_keyboard_devices(device_slot_interface &device) { device.option_add("ergoline", ERGOLINE_KEYBOARD); device.option_add("hle", PSI_HLE_KEYBOARD); }
chuckcranor/deltafs
src/libdeltafs/util/mdb.h
<filename>src/libdeltafs/util/mdb.h /* * Copyright (c) 2019 <NAME> University, * Copyright (c) 2019 Triad National Security, LLC, as operator of * Los Alamos National Laboratory. * * All rights reserved. * * Use of this source code is governed by a BSD-style license that can be * found in the LICENSE file. See the AUTHORS file for names of contributors. */ #pragma once #include "pdlfs-common/env.h" #include "pdlfs-common/fsdbx.h" #include "pdlfs-common/fstypes.h" #include "pdlfs-common/leveldb/db.h" #include "pdlfs-common/leveldb/readonly.h" #include "pdlfs-common/leveldb/snapshot.h" #include "pdlfs-common/leveldb/write_batch.h" #include "pdlfs-common/status.h" namespace pdlfs { // Tablefs has its own MDB definitions, so we won't define it. #if defined(DELTAFS) || defined(INDEXFS) class DirIndex; // GIGA index struct MDBOptions { MDBOptions(); // Always set fill_cache to the following for all ReadOptions. // Default: false bool fill_cache; // Always set verify_checksums to the following for all ReadOptions. // Default: false bool verify_checksums; // Always set sync to the following for all WriteOptions. // Default: false bool sync; // The underlying KV-store. DB* db; }; struct MDBStats { MDBStats(); // Total amount of key bytes pushed to db. uint64_t putkeybytes; // Total amount of val bytes pushed to db. uint64_t putbytes; // Total number of put operations. uint64_t puts; // Total number of key bytes read out of db. uint64_t getkeybytes; // Total number of val bytes read out of db. uint64_t getbytes; // Total number of get operations. uint64_t gets; }; class MDB : public MXDB<> { public: explicit MDB(const MDBOptions& opts); ~MDB(); struct Tx { Tx() {} // Note that snap is initialized via Create Tx const Snapshot* snap; WriteBatch bat; }; Tx* CreateTx(bool snap = true) { // Start a new Tx return STARTTX<Tx>(snap); } Status GetNode(const DirId& id, const Slice& hash, Stat* stat, std::string* name, Tx* tx); Status SetNode(const DirId& id, const Slice& hash, const Stat& stat, const Slice& name, Tx* tx); Status DelNode(const DirId& id, const Slice& hash, Tx* tx); Status GetDirIdx(const DirId& id, DirIndex* idx, Tx* tx); Status SetDirIdx(const DirId& id, const DirIndex& idx, Tx* tx); Status DelDirIdx(const DirId& id, Tx* tx); Status GetInfo(const DirId& id, DirInfo* info, Tx* tx); Status SetInfo(const DirId& id, const DirInfo& info, Tx* tx); Status DelInfo(const DirId& id, Tx* tx); size_t List(const DirId& id, StatList* stats, NameList* names, Tx* tx, size_t limit); bool Exists(const DirId& id, const Slice& hash, Tx* tx); // Finish a Tx by submitting all its writes Status Commit(Tx* tx) { WriteOptions options; return COMMIT<Tx, WriteOptions>(&options, tx); } void Release(Tx* tx) { // Discard a Tx RELEASE<Tx>(tx); } private: MDBOptions options_; void operator=(const MDB&); // No copying allowed MDB(const MDB&); }; #endif } // namespace pdlfs
Rollczi/LiteCommands
litecommands-core/src/test/java/dev/rollczi/litecommands/scheme/TestSchemeMessage.java
<filename>litecommands-core/src/test/java/dev/rollczi/litecommands/scheme/TestSchemeMessage.java package dev.rollczi.litecommands.scheme; import dev.rollczi.litecommands.component.ExecutionResult; import dev.rollczi.litecommands.valid.messages.LiteMessage; import dev.rollczi.litecommands.valid.messages.MessageInfoContext; import dev.rollczi.litecommands.valid.messages.UseSchemeFormatting; public class TestSchemeMessage implements LiteMessage { private final UseSchemeFormatting schemeFormatting; public TestSchemeMessage(UseSchemeFormatting schemeFormatting) { this.schemeFormatting = schemeFormatting; } public String message(ExecutionResult executionResult) { return this.message(executionResult, this.schemeFormatting); } @Override public String message(MessageInfoContext messageInfoContext) { return messageInfoContext.getUseScheme(); } }
a10networks/terraform-provider-vThunder
thunder/resource_thunder_timezone.go
<gh_stars>1-10 package thunder //Thunder resource Timezone import ( "context" "util" go_thunder "github.com/go_thunder/thunder" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" ) func resourceTimezone() *schema.Resource { return &schema.Resource{ CreateContext: resourceTimezoneCreate, UpdateContext: resourceTimezoneUpdate, ReadContext: resourceTimezoneRead, DeleteContext: resourceTimezoneDelete, Schema: map[string]*schema.Schema{ "timezone_index_cfg": { Type: schema.TypeList, Optional: true, MaxItems: 1, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ "timezone_index": { Type: schema.TypeString, Optional: true, Description: "", }, "nodst": { Type: schema.TypeInt, Optional: true, Description: "", }, }, }, }, "uuid": { Type: schema.TypeString, Optional: true, Description: "", }, }, } } func resourceTimezoneCreate(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { logger := util.GetLoggerInstance() client := meta.(Thunder) var diags diag.Diagnostics if client.Host != "" { logger.Println("[INFO] Creating Timezone (Inside resourceTimezoneCreate) ") data := dataToTimezone(d) logger.Println("[INFO] received formatted data from method data to Timezone --") d.SetId("1") err := go_thunder.PostTimezone(client.Token, data, client.Host) if err != nil { return diag.FromErr(err) } return resourceTimezoneRead(ctx, d, meta) } return diags } func resourceTimezoneRead(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { logger := util.GetLoggerInstance() client := meta.(Thunder) logger.Println("[INFO] Reading Timezone (Inside resourceTimezoneRead)") var diags diag.Diagnostics if client.Host != "" { logger.Println("[INFO] Fetching service Read") data, err := go_thunder.GetTimezone(client.Token, client.Host) if err != nil { return diag.FromErr(err) } if data == nil { logger.Println("[INFO] No data found ") return nil } return diags } return diags } func resourceTimezoneUpdate(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { logger := util.GetLoggerInstance() client := meta.(Thunder) var diags diag.Diagnostics if client.Host != "" { logger.Println("[INFO] Updating Timezone (Inside resourceTimezoneCreate) ") data := dataToTimezone(d) logger.Println("[INFO] received formatted data from method data to Timezone --") d.SetId("1") err := go_thunder.PutTimezone(client.Token, data, client.Host) if err != nil { return diag.FromErr(err) } return resourceTimezoneRead(ctx, d, meta) } return diags } func resourceTimezoneDelete(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { logger := util.GetLoggerInstance() client := meta.(Thunder) logger.Println("[INFO] Deleting Timezone (Inside resourceTimezoneRead)") var diags diag.Diagnostics if client.Host != "" { logger.Println("[INFO] Fetching service Read") err := go_thunder.DeleteTimezone(client.Token, client.Host) if err != nil { logger.Printf("[ERROR] Unable to Delete Timezone") return diag.FromErr(err) } return nil } return diags } func dataToTimezone(d *schema.ResourceData) go_thunder.Timezone { var vc go_thunder.Timezone var c go_thunder.TimezoneInstance var obj1 go_thunder.TimezoneInstanceTimezoneIndexCfg prefix1 := "timezone_index_cfg.0." obj1.TimezoneInstanceTimezoneIndexCfgTimezoneIndex = d.Get(prefix1 + "timezone_index").(string) obj1.TimezoneInstanceTimezoneIndexCfgNodst = d.Get(prefix1 + "nodst").(int) c.TimezoneInstanceTimezoneIndexCfgTimezoneIndex = obj1 vc.TimezoneInstanceTimezoneIndexCfg = c return vc }
Pixelated-Project/aosp-android-jar
android-31/src/com/android/internal/telephony/InboundSmsTracker.java
/* * Copyright (C) 2013 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.internal.telephony; import android.compat.annotation.UnsupportedAppUsage; import android.content.ContentValues; import android.content.Context; import android.database.Cursor; import android.os.Build; import android.telephony.SubscriptionManager; import android.telephony.TelephonyManager; import android.text.TextUtils; import android.util.Pair; import com.android.internal.annotations.VisibleForTesting; import com.android.internal.util.HexDump; import com.android.telephony.Rlog; import java.io.UnsupportedEncodingException; import java.nio.ByteBuffer; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.util.Arrays; import java.util.Date; /** * Tracker for an incoming SMS message ready to broadcast to listeners. * This is similar to {@link com.android.internal.telephony.SMSDispatcher.SmsTracker} used for * outgoing messages. */ public class InboundSmsTracker { // Need 8 bytes to get a message id as a long. private static final int NUM_OF_BYTES_HASH_VALUE_FOR_MESSAGE_ID = 8; // Fields for single and multi-part messages private final byte[] mPdu; private final long mTimestamp; private final int mDestPort; private final boolean mIs3gpp2; private final boolean mIs3gpp2WapPdu; private final String mMessageBody; private final boolean mIsClass0; private final int mSubId; private final long mMessageId; private final @InboundSmsHandler.SmsSource int mSmsSource; // Fields for concatenating multi-part SMS messages private final String mAddress; private final int mReferenceNumber; private final int mSequenceNumber; private final int mMessageCount; // Fields for deleting this message after delivery private String mDeleteWhere; private String[] mDeleteWhereArgs; // BroadcastReceiver associated with this tracker private InboundSmsHandler.SmsBroadcastReceiver mSmsBroadcastReceiver; /** * Copied from SmsMessageBase#getDisplayOriginatingAddress used for blocking messages. * DisplayAddress could be email address if this message was from an email gateway, otherwise * same as mAddress. Email gateway might set a generic gateway address as the mAddress which * could not be used for blocking check and append the display email address at the beginning * of the message body. In that case, display email address is only available for the first SMS * in the Multi-part SMS. */ private final String mDisplayAddress; @VisibleForTesting /** Destination port flag bit for no destination port. */ public static final int DEST_PORT_FLAG_NO_PORT = (1 << 16); /** Destination port flag bit to indicate 3GPP format message. */ private static final int DEST_PORT_FLAG_3GPP = (1 << 17); @VisibleForTesting /** Destination port flag bit to indicate 3GPP2 format message. */ public static final int DEST_PORT_FLAG_3GPP2 = (1 << 18); @VisibleForTesting /** Destination port flag bit to indicate 3GPP2 format WAP message. */ public static final int DEST_PORT_FLAG_3GPP2_WAP_PDU = (1 << 19); /** Destination port mask (16-bit unsigned value on GSM and CDMA). */ private static final int DEST_PORT_MASK = 0xffff; @VisibleForTesting public static final String SELECT_BY_REFERENCE = "address=? AND reference_number=? AND " + "count=? AND (destination_port & " + DEST_PORT_FLAG_3GPP2_WAP_PDU + "=0) AND deleted=0"; @VisibleForTesting public static final String SELECT_BY_REFERENCE_3GPP2WAP = "address=? AND reference_number=? " + "AND count=? AND (destination_port & " + DEST_PORT_FLAG_3GPP2_WAP_PDU + "=" + DEST_PORT_FLAG_3GPP2_WAP_PDU + ") AND deleted=0"; /** * Create a tracker for a single-part SMS. * * @param context * @param pdu the message PDU * @param timestamp the message timestamp * @param destPort the destination port * @param is3gpp2 true for 3GPP2 format; false for 3GPP format * @param is3gpp2WapPdu true for 3GPP2 format WAP PDU; false otherwise * @param address originating address * @param displayAddress email address if this message was from an email gateway, otherwise same * as originating address * @param smsSource the source of the SMS message */ public InboundSmsTracker(Context context, byte[] pdu, long timestamp, int destPort, boolean is3gpp2, boolean is3gpp2WapPdu, String address, String displayAddress, String messageBody, boolean isClass0, int subId, @InboundSmsHandler.SmsSource int smsSource) { mPdu = pdu; mTimestamp = timestamp; mDestPort = destPort; mIs3gpp2 = is3gpp2; mIs3gpp2WapPdu = is3gpp2WapPdu; mMessageBody = messageBody; mAddress = address; mDisplayAddress = displayAddress; mIsClass0 = isClass0; // fields for multi-part SMS mReferenceNumber = -1; mSequenceNumber = getIndexOffset(); // 0 or 1, depending on type mMessageCount = 1; mSubId = subId; mMessageId = createMessageId(context, timestamp, subId); mSmsSource = smsSource; } /** * Create a tracker for a multi-part SMS. Sequence numbers start at 1 for 3GPP and regular * concatenated 3GPP2 messages, but CDMA WAP push sequence numbers start at 0. The caller will * subtract 1 if necessary so that the sequence number is always 0-based. When loading and * saving to the raw table, the sequence number is adjusted if necessary for backwards * compatibility. * * @param pdu the message PDU * @param timestamp the message timestamp * @param destPort the destination port * @param is3gpp2 true for 3GPP2 format; false for 3GPP format * @param address originating address, or email if this message was from an email gateway * @param displayAddress email address if this message was from an email gateway, otherwise same * as originating address * @param referenceNumber the concatenated reference number * @param sequenceNumber the sequence number of this segment (0-based) * @param messageCount the total number of segments * @param is3gpp2WapPdu true for 3GPP2 format WAP PDU; false otherwise * @param smsSource the source of the SMS message */ public InboundSmsTracker(Context context, byte[] pdu, long timestamp, int destPort, boolean is3gpp2, String address, String displayAddress, int referenceNumber, int sequenceNumber, int messageCount, boolean is3gpp2WapPdu, String messageBody, boolean isClass0, int subId, @InboundSmsHandler.SmsSource int smsSource) { mPdu = pdu; mTimestamp = timestamp; mDestPort = destPort; mIs3gpp2 = is3gpp2; mIs3gpp2WapPdu = is3gpp2WapPdu; mMessageBody = messageBody; mIsClass0 = isClass0; // fields used for check blocking message mDisplayAddress = displayAddress; // fields for multi-part SMS mAddress = address; mReferenceNumber = referenceNumber; mSequenceNumber = sequenceNumber; mMessageCount = messageCount; mSubId = subId; mMessageId = createMessageId(context, timestamp, subId); mSmsSource = smsSource; } /** * Create a new tracker from the row of the raw table pointed to by Cursor. * Since this constructor is used only for recovery during startup, the Dispatcher is null. * @param cursor a Cursor pointing to the row to construct this SmsTracker for */ public InboundSmsTracker(Context context, Cursor cursor, boolean isCurrentFormat3gpp2) { mPdu = HexDump.hexStringToByteArray(cursor.getString(InboundSmsHandler.PDU_COLUMN)); // TODO: add a column to raw db to store this mIsClass0 = false; if (cursor.isNull(InboundSmsHandler.DESTINATION_PORT_COLUMN)) { mDestPort = -1; mIs3gpp2 = isCurrentFormat3gpp2; mIs3gpp2WapPdu = false; } else { int destPort = cursor.getInt(InboundSmsHandler.DESTINATION_PORT_COLUMN); if ((destPort & DEST_PORT_FLAG_3GPP) != 0) { mIs3gpp2 = false; } else if ((destPort & DEST_PORT_FLAG_3GPP2) != 0) { mIs3gpp2 = true; } else { mIs3gpp2 = isCurrentFormat3gpp2; } mIs3gpp2WapPdu = ((destPort & DEST_PORT_FLAG_3GPP2_WAP_PDU) != 0); mDestPort = getRealDestPort(destPort); } mTimestamp = cursor.getLong(InboundSmsHandler.DATE_COLUMN); mAddress = cursor.getString(InboundSmsHandler.ADDRESS_COLUMN); mDisplayAddress = cursor.getString(InboundSmsHandler.DISPLAY_ADDRESS_COLUMN); mSubId = cursor.getInt(SmsBroadcastUndelivered.PDU_PENDING_MESSAGE_PROJECTION_INDEX_MAPPING .get(InboundSmsHandler.SUBID_COLUMN)); if (cursor.getInt(InboundSmsHandler.COUNT_COLUMN) == 1) { // single-part message long rowId = cursor.getLong(InboundSmsHandler.ID_COLUMN); mReferenceNumber = -1; mSequenceNumber = getIndexOffset(); // 0 or 1, depending on type mMessageCount = 1; mDeleteWhere = InboundSmsHandler.SELECT_BY_ID; mDeleteWhereArgs = new String[]{Long.toString(rowId)}; } else { // multi-part message mReferenceNumber = cursor.getInt(InboundSmsHandler.REFERENCE_NUMBER_COLUMN); mMessageCount = cursor.getInt(InboundSmsHandler.COUNT_COLUMN); // GSM sequence numbers start at 1; CDMA WDP datagram sequence numbers start at 0 mSequenceNumber = cursor.getInt(InboundSmsHandler.SEQUENCE_COLUMN); int index = mSequenceNumber - getIndexOffset(); if (index < 0 || index >= mMessageCount) { throw new IllegalArgumentException("invalid PDU sequence " + mSequenceNumber + " of " + mMessageCount); } mDeleteWhere = getQueryForSegments(); mDeleteWhereArgs = new String[]{mAddress, Integer.toString(mReferenceNumber), Integer.toString(mMessageCount)}; } mMessageBody = cursor.getString(InboundSmsHandler.MESSAGE_BODY_COLUMN); mMessageId = createMessageId(context, mTimestamp, mSubId); // TODO(b/167713264): Use the correct SMS source mSmsSource = InboundSmsHandler.SOURCE_NOT_INJECTED; } public ContentValues getContentValues() { ContentValues values = new ContentValues(); values.put("pdu", HexDump.toHexString(mPdu)); values.put("date", mTimestamp); // Always set the destination port, since it now contains message format flags. // Port is a 16-bit value, or -1, so clear the upper bits before setting flags. int destPort; if (mDestPort == -1) { destPort = DEST_PORT_FLAG_NO_PORT; } else { destPort = mDestPort & DEST_PORT_MASK; } if (mIs3gpp2) { destPort |= DEST_PORT_FLAG_3GPP2; } else { destPort |= DEST_PORT_FLAG_3GPP; } if (mIs3gpp2WapPdu) { destPort |= DEST_PORT_FLAG_3GPP2_WAP_PDU; } values.put("destination_port", destPort); if (mAddress != null) { values.put("address", mAddress); values.put("display_originating_addr", mDisplayAddress); values.put("reference_number", mReferenceNumber); values.put("sequence", mSequenceNumber); } values.put("count", mMessageCount); values.put("message_body", mMessageBody); values.put("sub_id", mSubId); return values; } /** * Get the port number, or -1 if there is no destination port. * @param destPort the destination port value, with flags * @return the real destination port, or -1 for no port */ public static int getRealDestPort(int destPort) { if ((destPort & DEST_PORT_FLAG_NO_PORT) != 0) { return -1; } else { return destPort & DEST_PORT_MASK; } } /** * Update the values to delete all rows of the message from raw table. * @param deleteWhere the selection to use * @param deleteWhereArgs the selection args to use */ public void setDeleteWhere(String deleteWhere, String[] deleteWhereArgs) { mDeleteWhere = deleteWhere; mDeleteWhereArgs = deleteWhereArgs; } public String toString() { StringBuilder builder = new StringBuilder("SmsTracker{timestamp="); builder.append(new Date(mTimestamp)); builder.append(" destPort=").append(mDestPort); builder.append(" is3gpp2=").append(mIs3gpp2); if (InboundSmsHandler.VDBG) { builder.append(" address=").append(mAddress); builder.append(" timestamp=").append(mTimestamp); builder.append(" messageBody=").append(mMessageBody); } builder.append(" display_originating_addr=").append(mDisplayAddress); builder.append(" refNumber=").append(mReferenceNumber); builder.append(" seqNumber=").append(mSequenceNumber); builder.append(" msgCount=").append(mMessageCount); if (mDeleteWhere != null) { builder.append(" deleteWhere(").append(mDeleteWhere); builder.append(") deleteArgs=(").append(Arrays.toString(mDeleteWhereArgs)); builder.append(')'); } builder.append(" "); builder.append(SmsController.formatCrossStackMessageId(mMessageId)); builder.append("}"); return builder.toString(); } public byte[] getPdu() { return mPdu; } public long getTimestamp() { return mTimestamp; } public int getDestPort() { return mDestPort; } public boolean is3gpp2() { return mIs3gpp2; } public boolean isClass0() { return mIsClass0; } public int getSubId() { return mSubId; } @UnsupportedAppUsage(maxTargetSdk = Build.VERSION_CODES.R, trackingBug = 170729553) public String getFormat() { return mIs3gpp2 ? SmsConstants.FORMAT_3GPP2 : SmsConstants.FORMAT_3GPP; } public String getQueryForSegments() { return mIs3gpp2WapPdu ? SELECT_BY_REFERENCE_3GPP2WAP : SELECT_BY_REFERENCE; } /** * Get the query to find the exact same message/message segment in the db. * @return Pair with where as Pair.first and whereArgs as Pair.second */ public Pair<String, String[]> getExactMatchDupDetectQuery() { // convert to strings for query String address = getAddress(); String refNumber = Integer.toString(getReferenceNumber()); String count = Integer.toString(getMessageCount()); String seqNumber = Integer.toString(getSequenceNumber()); String date = Long.toString(getTimestamp()); String messageBody = getMessageBody(); String where = "address=? AND reference_number=? AND count=? AND sequence=? AND " + "date=? AND message_body=?"; where = addDestPortQuery(where); String[] whereArgs = new String[]{address, refNumber, count, seqNumber, date, messageBody}; return new Pair<>(where, whereArgs); } /** * The key differences here compared to exact match are: * - this is applicable only for multi-part message segments * - this does not match date or message_body * - this matches deleted=0 (undeleted segments) * The only difference as compared to getQueryForSegments() is that this checks for sequence as * well. * @return Pair with where as Pair.first and whereArgs as Pair.second */ public Pair<String, String[]> getInexactMatchDupDetectQuery() { if (getMessageCount() == 1) return null; // convert to strings for query String address = getAddress(); String refNumber = Integer.toString(getReferenceNumber()); String count = Integer.toString(getMessageCount()); String seqNumber = Integer.toString(getSequenceNumber()); String where = "address=? AND reference_number=? AND count=? AND sequence=? AND " + "deleted=0"; where = addDestPortQuery(where); String[] whereArgs = new String[]{address, refNumber, count, seqNumber}; return new Pair<>(where, whereArgs); } private String addDestPortQuery(String where) { String whereDestPort; if (mIs3gpp2WapPdu) { whereDestPort = "destination_port & " + DEST_PORT_FLAG_3GPP2_WAP_PDU + "=" + DEST_PORT_FLAG_3GPP2_WAP_PDU; } else { whereDestPort = "destination_port & " + DEST_PORT_FLAG_3GPP2_WAP_PDU + "=0"; } return where + " AND (" + whereDestPort + ")"; } private static long createMessageId(Context context, long timestamp, int subId) { int slotId = SubscriptionManager.getSlotIndex(subId); TelephonyManager telephonyManager = (TelephonyManager) context.getSystemService(Context.TELEPHONY_SERVICE); String deviceId = telephonyManager.getImei(slotId); if (TextUtils.isEmpty(deviceId)) { return 0L; } String messagePrint = deviceId + timestamp; return getShaValue(messagePrint); } private static long getShaValue(String messagePrint) { try { return ByteBuffer.wrap(getShaBytes(messagePrint, NUM_OF_BYTES_HASH_VALUE_FOR_MESSAGE_ID)).getLong(); } catch (final NoSuchAlgorithmException | UnsupportedEncodingException e) { Rlog.e("InboundSmsTracker", "Exception while getting SHA value for message", e); } return 0L; } private static byte[] getShaBytes(String messagePrint, int maxNumOfBytes) throws NoSuchAlgorithmException, UnsupportedEncodingException { MessageDigest messageDigest = MessageDigest.getInstance("SHA-1"); messageDigest.reset(); messageDigest.update(messagePrint.getBytes("UTF-8")); byte[] hashResult = messageDigest.digest(); if (hashResult.length >= maxNumOfBytes) { byte[] truncatedHashResult = new byte[maxNumOfBytes]; System.arraycopy(hashResult, 0, truncatedHashResult, 0, maxNumOfBytes); return truncatedHashResult; } return hashResult; } /** * Sequence numbers for concatenated messages start at 1. The exception is CDMA WAP PDU * messages, which use a 0-based index. * @return the offset to use to convert between mIndex and the sequence number */ @UnsupportedAppUsage(maxTargetSdk = Build.VERSION_CODES.R, trackingBug = 170729553) public int getIndexOffset() { return (mIs3gpp2 && mIs3gpp2WapPdu) ? 0 : 1; } public String getAddress() { return mAddress; } public String getDisplayAddress() { return mDisplayAddress; } public String getMessageBody() { return mMessageBody; } public int getReferenceNumber() { return mReferenceNumber; } public int getSequenceNumber() { return mSequenceNumber; } public int getMessageCount() { return mMessageCount; } public String getDeleteWhere() { return mDeleteWhere; } public String[] getDeleteWhereArgs() { return mDeleteWhereArgs; } public long getMessageId() { return mMessageId; } public @InboundSmsHandler.SmsSource int getSource() { return mSmsSource; } /** * Get/create the SmsBroadcastReceiver corresponding to the current tracker. */ public InboundSmsHandler.SmsBroadcastReceiver getSmsBroadcastReceiver( InboundSmsHandler handler) { // lazy initialization if (mSmsBroadcastReceiver == null) { mSmsBroadcastReceiver = handler.new SmsBroadcastReceiver(this); } return mSmsBroadcastReceiver; } }
KoKumagai/exercises
aoj/volume0/n0046/Main.cpp
#include <iostream> using namespace std; int main() { float min, max; cin >> min; max = min; float height; while (cin >> height) { if (height < min) { min = height; } if (height > max) { max = height; } } cout << max - min << endl; return 0; }
lixiny/CPF
hocontact/utils/netutils.py
import torch def rec_freeze(model): for module in model.modules(): if isinstance(module, torch.nn.modules.batchnorm._BatchNorm): module.momentum = 0 for name, child in model.named_children(): for param in child.parameters(): param.requires_grad = False rec_freeze(child) def freeze_batchnorm_stats(model): for module in model.modules(): if isinstance(module, torch.nn.modules.batchnorm._BatchNorm): module.momentum = 0 for name, child in model.named_children(): freeze_batchnorm_stats(child) def kmninit(model): for m in model.modules(): if isinstance(m, torch.nn.Conv2d): torch.nn.init.kaiming_normal_(m.weight, nonlinearity="relu")
crbecker1/mymntr
app/views/layouts/admin.rb
class Views::Layouts::Admin < Views::Base def content if !content_for?(:app_navigation) content_for :app_navigation do render partial: 'shared/admin/side_nav' end end render template: 'layouts/logged_in' end end
justshiv/weicoder
websocket/src/main/java/com/weicoder/websocket/listener/InitWebSocketListener.java
<gh_stars>1-10 package com.weicoder.websocket.listener; import javax.servlet.ServletContextEvent; import javax.servlet.ServletContextListener; import javax.servlet.annotation.WebListener; import com.weicoder.common.init.Inits; /** * 初始化监听器 * * @author WD */ @WebListener public class InitWebSocketListener implements ServletContextListener { /** * 初始化资源 */ public void contextInitialized(ServletContextEvent event) { Inits.init(); } }
1000applis/ovh-api
ovh.api/src/main/java/com/milleapplis/ovh/api/sms/enums/ServiceStateEnum.java
package com.milleapplis.ovh.api.sms.enums; import com.fasterxml.jackson.annotation.JsonValue; public enum ServiceStateEnum { EXPIRED("expired"), IN_CREATION("inCreation"), OK("ok"), UN_PAID("unPaid"); private String priority; private ServiceStateEnum(String priority) { this.priority = priority; } @JsonValue public String toString() { return priority; } @JsonValue public ServiceStateEnum fromString(String value) { if (value == null) { return null; } for (ServiceStateEnum currentPriority : ServiceStateEnum.values()) { if (value.equals(currentPriority.toString())) { return currentPriority; } } return null; } }
softicar/platform
platform-common/src/main/java/com/softicar/platform/common/core/java/classpath/JavaClasspathAnalyzedClassesLoader.java
package com.softicar.platform.common.core.java.classpath; import com.softicar.platform.common.core.java.classes.analyzer.AnalyzedJavaClass; import com.softicar.platform.common.core.java.classes.name.JavaClassName; import java.util.Map; import java.util.TreeMap; /** * Loads all classes on the class path as {@link AnalyzedJavaClass}. * * @author <NAME> */ public class JavaClasspathAnalyzedClassesLoader { private Map<JavaClassName, AnalyzedJavaClass> javaClasses; public Map<JavaClassName, AnalyzedJavaClass> loadClasses() { this.javaClasses = new TreeMap<>(); new JavaClasspathLoader()// .load() .getAllRoots() .stream() .forEach(this::addJavaClasses); return javaClasses; } private void addJavaClasses(IJavaClasspathRoot root) { root// .getAnalyzedClasses() .forEach(this::addJavaClass); } private void addJavaClass(AnalyzedJavaClass javaClass) { javaClasses.putIfAbsent(javaClass.getClassName(), javaClass); } }
digithun/jamplay-nap
scripts/seeds/seed-generator/recommend.js
<gh_stars>0 const { casual, loadSeedId, writeSeed, genFixArray } = require('../helpers') module.exports = async function generate () { const bookIds = loadSeedId('book') const categoryRecommends = ['N', 'M', 'D', 'G'].map(c => ({ _id: casual.objectId, type: `CATEGORY_${c}`, bookIds: genFixArray(bookIds, 10) })) const shelfRecommends = { _id: casual.objectId, type: 'shelf', bookIds: genFixArray(bookIds, 1) } const heroBanners = { _id: casual.objectId, type: 'heroBanner', bookIds: genFixArray(bookIds, 10) } const result = [...categoryRecommends, shelfRecommends, heroBanners] writeSeed('recommend', result) }
gottaegbert/penter
library/lib_study/167_pythonruntime_contextlib.py
from contextlib import contextmanager,asynccontextmanager # 重要:https://docs.python.org/zh-cn/3/library/contextlib.html # https://www.jianshu.com/p/94bc38e65fff # contextmanager 用来做事务提交 """ def auto_commit(self): try: yield self.session.commit() except Exception as e: self.session.rollback() raise e with db.auto_commit(): db.session.add() ... """ # @contextmanager # def managed_resource(*args, **kwds): # # Code to acquire resource, e.g.: # resource = acquire_resource(*args, **kwds) # try: # yield resource # finally: # # Code to release resource, e.g.: # release_resource(resource) # # >>> with managed_resource(timeout=3600) as resource: # ... # Resource is released at the end of this block, # ... # even if code in the block raises an exception # yield之前就是__init__中的代码块;yield之后其实就是__exit__中的代码块 # @asynccontextmanager # async def get_connection(): # conn = await acquire_db_connection() # try: # yield conn # finally: # await release_db_connection(conn) # # async def get_all_users(): # async with get_connection() as conn: # return conn.query('SELECT ...') print("_______contextlib.closing") """ 等效于 from contextlib import contextmanager @contextmanager def closing(thing): try: yield thing finally: thing.close() """ # from contextlib import closing # from urllib.request import urlopen # # with closing(urlopen('http://www.python.org')) as page: # for line in page: # print(line) print("-------------contextlib.suppress(压制) 忽略异常") import contextlib class NonFatalError(Exception): pass def non_idempotent_operation(): raise NonFatalError( 'The operation failed because of existing state' ) with contextlib.suppress(NonFatalError): print('trying non-idempotent operation') non_idempotent_operation() print('succeeded!') """ try: print('trying non-idempotent operation') non_idempotent_operation() print('succeeded!') except NonFatalError: pass """ print('done') print("--------------重定向输出流") from contextlib import redirect_stdout, redirect_stderr import io import sys def misbehaving_function(a): sys.stdout.write('(stdout) A: {!r}\n'.format(a)) sys.stderr.write('(stderr) A: {!r}\n'.format(a)) capture = io.StringIO() with redirect_stdout(capture), redirect_stderr(capture): misbehaving_function(5) print(capture.getvalue())
mI-PIV/app
app/src/main/java/com/onrpiv/uploadmedia/Learn/Pos1_Activity.java
<filename>app/src/main/java/com/onrpiv/uploadmedia/Learn/Pos1_Activity.java package com.onrpiv.uploadmedia.Learn; import android.os.Build; import androidx.annotation.RequiresApi; import com.google.android.material.bottomnavigation.BottomNavigationView; import android.os.Bundle; import android.text.Layout; import android.widget.ImageView; import android.widget.TextView; import com.onrpiv.uploadmedia.R; public class Pos1_Activity extends FluidGlossary { private int headerTextSize = 25; private int paraTextSize = 16; @RequiresApi(api = Build.VERSION_CODES.O) @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_pos1); TextView t0 = (TextView)findViewById(R.id.pos1TextView0); t0.setText("Boundary Layer"); t0.setTextSize(headerTextSize); TextView t1 = (TextView) findViewById(R.id.pos1TextView1); t1.setText("The boundary layer is the thin fluid layer which forms between a flowing fluid and a surface due to the no-slip condition (the flow velocity at a surface is 0). To satisfy the difference in the freestream and surface velocities, the region of fluid between the freestream and surface whose velocity ranges from 0 to 99% (typically) of the freestream velocity is referred to as the boundary layer. An example of the boundary layer (gray) along a flat plate is shown in the photo below:"); t1.setJustificationMode(Layout.JUSTIFICATION_MODE_INTER_WORD); TextView t2 = (TextView) findViewById(R.id.pos1TextView2); t2.setText("The thickness of a boundary layer is dependent on the surface geometry and Reynolds number. For a flat wall with laminar flow, the boundary layer thickness \uD835\uDEFF, at a distance \uD835\uDC65 down the plate in the freestream direction, may be solved by the following equation:"); t2.setJustificationMode(Layout.JUSTIFICATION_MODE_INTER_WORD); TextView textView0 = (TextView) findViewById(R.id.pos1TextView3); textView0.setText("For a flat plate with turbulent flow, the boundary layer thickness may be described by the following:"); textView0.setJustificationMode(Layout.JUSTIFICATION_MODE_INTER_WORD); TextView t3 = (TextView) findViewById(R.id.pos1TextView4); t3.setText("\nConsiderations for mI-PIV:"); t3.setJustificationMode(Layout.JUSTIFICATION_MODE_INTER_WORD); TextView t4 = (TextView) findViewById(R.id.pos1TextView5); t4.setText("Surfaces contacting the fluid will also likely be illuminated by the laser. Since particles will not be distinguishable from an illuminated boundary, boundary layers are difficult to observe in PIV. The photo below shows an example of this in pipe flow. The edge of the pipe is illuminated by the laser, making the velocity vectors null."); t4.setJustificationMode(Layout.JUSTIFICATION_MODE_INTER_WORD); TextView[] textViews = {t0, t1, t2, t3, t4}; for (int i = 0; i < textViews.length; i++) { textViews[i].setTextSize(paraTextSize); } BottomNavigationView bottomNavigationView = (BottomNavigationView) findViewById(R.id.bottom_navigation); bottomNavigationView.setOnNavigationItemSelectedListener(navListener); } }
jhyry-gcpud/jalico
old_googlecode_sources/com/ochafik/util/listenable/DefaultListenableSortedSet.java
/* Copyright 2008 <NAME> Licensed under the Apache License, Version 2.0 (the License); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an AS IS BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. This file comes from the Jalico project (Java Listenable Collections) http://jalico.googlecode.com/. */ package com.ochafik.util.listenable; import java.util.Comparator; import java.util.SortedSet; /** * Default implementation of the ListenableSet and SortedSet interface.<br/> * This class follows both the decorator and proxy patterns : it wraps an existing java.util.Set and adds the listenable feature to it.<br/> * @author <NAME> * @param <T> Type of the elements of the set */ class DefaultListenableSortedSet<T> extends DefaultListenableSet<T> implements ListenableSortedSet<T> { public DefaultListenableSortedSet(SortedSet<T> set, ListenableSupport<T> collectionSupport) { super(set,collectionSupport); } public DefaultListenableSortedSet(SortedSet<T> set) { super(set); } public Comparator<? super T> comparator() { return ((SortedSet<T>)collection).comparator(); } public T first() { return ((SortedSet<T>)collection).first(); } public SortedSet<T> headSet(T toElement) { return ((SortedSet<T>)collection).headSet(toElement); } public T last() { return ((SortedSet<T>)collection).last(); } public SortedSet<T> subSet(T fromElement, T toElement) { return ((SortedSet<T>)collection).subSet(fromElement, toElement); } public SortedSet<T> tailSet(T fromElement) { return ((SortedSet<T>)collection).tailSet(fromElement); } }
dymecard/j2cl
jre/javatests/com/google/gwt/emultest/java/util/TestObject.java
<filename>jre/javatests/com/google/gwt/emultest/java/util/TestObject.java // CHECKSTYLE_OFF: Copyrighted to ASF /* * Copyright 1999-2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // CHECKSTYLE_ON package com.google.gwt.emultest.java.util; /** * Tests base {@link java.lang.Object} methods and contracts. * * <p>To use, simply extend this class, and implement the {@link #makeObject()} method. * * <p>If your {@link Object} fails one of these tests by design, you may still use this base set of * cases. Simply override the test case (method) your {@link Object} fails. */ abstract class TestObject extends EmulTestBase { // current major release for Collections public static final int COLLECTIONS_MAJOR_VERSION = 2; /** * This constant makes it possible for TestMap (and other subclasses, if necessary) to * automatically check CVS for a versionX copy of a Serialized object, so we can make sure that * compatibility is maintained. See, for example, TestMap.getCanonicalFullMapName(Map map). * Subclasses can override this variable, indicating compatibility with earlier Collections * versions. Defaults to 1, the earliest Collections version. (Note: some collections did not even * exist in this version). * * @return 1 */ public int getCompatibilityVersion() { return 1; } /** Return a new, empty {@link Object} to used for testing. */ public abstract Object makeObject(); public void testObjectEqualsSelf() { Object obj = makeObject(); assertEquals("A Object should equal itself", obj, obj); } public void testObjectHashCodeEqualsSelfHashCode() { Object obj = makeObject(); assertEquals("hashCode should be repeatable", obj.hashCode(), obj.hashCode()); } public void testObjectHashCodeEqualsContract() { Object obj1 = makeObject(); if (obj1.equals(obj1)) { assertEquals( "[1] When two objects are equal, their hashCodes should be also.", obj1.hashCode(), obj1.hashCode()); } Object obj2 = makeObject(); if (obj1.equals(obj2)) { assertEquals( "[2] When two objects are equal, their hashCodes should be also.", obj1.hashCode(), obj2.hashCode()); assertTrue( "When obj1.equals(obj2) is true, then obj2.equals(obj1) should also be true", obj2.equals(obj1)); } } public String getCanonicalEmptyCollectionName(Object object) { StringBuilder retval = new StringBuilder(); retval.append("data/test/"); String colName = object.getClass().getName(); colName = colName.substring(colName.lastIndexOf(".") + 1); retval.append(colName); retval.append(".emptyCollection.version"); retval.append(getCompatibilityVersion()); retval.append(".obj"); return retval.toString(); } public String getCanonicalFullCollectionName(Object object) { StringBuilder retval = new StringBuilder(); retval.append("data/test/"); String colName = object.getClass().getName(); colName = colName.substring(colName.lastIndexOf(".") + 1); retval.append(colName); retval.append(".fullCollection.version"); retval.append(getCompatibilityVersion()); retval.append(".obj"); return retval.toString(); } /** * Override this method if a subclass is testing a Collections that cannot serialize an "empty" * Collection (e.g. Comparators have no contents) * * @return true */ public boolean supportsEmptyCollections() { return true; } /** * Override this method if a subclass is testing a Collections that cannot serialize a "full" * Collection (e.g. Comparators have no contents) * * @return true */ public boolean supportsFullCollections() { return true; } }
dtienq/SuperSchool
frontend/src/api/profileApi.js
<reponame>dtienq/SuperSchool<filename>frontend/src/api/profileApi.js<gh_stars>0 import axiosClient from './axiosClient'; const profileApi = { changePassword: (data) => { const url = '/users/changePassword'; return axiosClient.post(url, data); }, updateInfo: (data)=>{ const url = '/users/updateInfo'; return axiosClient.post(url, data); }, updateAvatar: (data)=>{ const url = '/users/updateAvatar'; return axiosClient.post(url, data); } }; export default profileApi;
afialapis/calustra
packages/router/src/index.js
import calustraRouter from './router' import {routerCache} from './cache' const getConnectionFromCache= (selector) => routerCache.getConnection(selector) const getModelFromCache= (selector, tablename) => routerCache.getModel(selector, tablename) export {calustraRouter as default, getConnectionFromCache, getModelFromCache}
solo123/woyin-op
src/pages/Merchant/MerchantInfo.js
import React from 'react'; import { connect } from 'dva'; import { Row, Col, Table, Card } from 'antd'; import { MemberApplayInter, MerchantWall, MerchantAddRate} from '@/components/Merchant'; import {routerRedux} from 'dva/router'; import PageHeaderWrapper from '@/components/PageHeaderWrapper'; import LocalStr from '@/utils/LocalStr'; import {getMerchantPlayApi, getMerchantAccApi} from '@/services/api'; import {HeadFootButton} from '@/components/HeadForm'; import {statuesRend, hreRend} from '@/utils/renderUtils'; import styles from './MerchantInfo.less'; @connect() class MerchantInfo extends React.Component{ constructor(props) { super(props); const merchanLogo ={}; const palyInfo = {}; const STATUSITEMS = [ {key: 0, describe: ['green', '正常']}, {key: 1, describe: ['red', '错误']} ]; const hreReng = [ {onClick: this.onHangMerchantWall, label: '帐户钱包'} ] merchanLogo.columns = [ {title: '账户编号',key: 'BalanceId',dataIndex:'BalanceId' }, // {title: '对象编号',key: 'merchantId',dataIndex: 'MerchantId' }, {title: '帐户类型',key: 'Currency',dataIndex: 'Currency'}, {title: '账户积分',key: 'Amount',dataIndex: 'Amount'}, // {title: '可用积分',key: 'Amount',dataIndex: 'Amount'}, {title: '冻结积分',key: 'BlockAmount',dataIndex: 'BlockAmount'}, {title: '状态',key: '',render: Status => (statuesRend(Status, STATUSITEMS))}, {title: '操作', dataIndex: 'action', key: 'action',fixed: 'right',width: 220, render:(texts, record)=>(hreRend(hreReng, texts, record)) }, ]; const buttonData = [ {type: 'primary', hangClick: this.onHangGoPround, labe: '产品折扣管理'}, {type: 'primary', hangClick: this.onHangRateMang, labe: '增加新费率'}, ]; merchanLogo.data = []; palyInfo.columns = [ {title: '操作员登录账号',key: 'MerchantAccount',dataIndex: 'MerchantAccount',}, {title: '操作员编号',key: 'UserId',dataIndex: 'UserId',}, {title: '操作员名称',key: 'UserName',dataIndex: 'UserName',}, {title: '创建时间 ',key: 'CreatedAt',dataIndex: 'CreatedAt',}, {title: '状态',key: 'state',dataIndex: 'state',render: statue => (statuesRend(statue, STATUSITEMS))} ]; palyInfo.data = [] const info = [ [ {label: '商户编号:',value: ''}, {label: '商户名称:',value: ''}, {label: '状态:',value: ''}], [ {label: '联系人:',value: ''}, {label: '创建时间:',value: ''}, {label: '转让费率(%):',value: ''}], [ {label: '商户地址:',value: ''}, {label: '手机号码:',value: ''}, {label: '固定电话:',value: ''} ] ] this.state={ merchanLogo, info, palyInfo, buttonData } } componentDidMount(){ this.int(); } onHangMerchantWall = (texts, account) =>{ this.MerchantWall.showModal(account); } onHangApplayInter = () => { const MeInfo = JSON.parse(LocalStr.get("merchantInfo")); this.MemberApplayInter.int(MeInfo); this.MemberApplayInter.showModal(); } onHangGoPround = () =>{ const MeInfo = JSON.parse(LocalStr.get("merchantInfo")); LocalStr.set("merchantId", MeInfo.key); this.props.dispatch(routerRedux.push({ pathname: '/merchant/memberproduct', })); } onHangRateMang = () => { this.MerchantAddRate.showModal(); } int = () => { const MeInfo = JSON.parse(LocalStr.get("merchantInfo")); const {info, palyInfo, merchanLogo} = this.state; info[0][0].value = MeInfo.key; info[0][1].value = MeInfo.MerchantName; info[0][2].value = MeInfo.statue === 1 ? '可用':'冻结'; info[1][0].value = MeInfo.Contact; info[1][1].value = MeInfo.CreatedAt; info[1][2].value = MeInfo.find; info[2][0].value = MeInfo.MerchantAddr; info[2][1].value = MeInfo.Mobile; info[2][2].value = MeInfo.Tel; // 获取商户下的所有操作员 getMerchantPlayApi(MeInfo.key).then((res) => { if(res.status === 200 && res.data){ for(let i = 0; i<res.data.length; i+=1){ const paly = {}; paly.key = res.data[i].UserId; paly.MerchantId = res.data[i].MerchantId; paly.UserId = res.data[i].UserId; paly.UserName = res.data[i].UserName; paly.CreatedAt = res.data[i].CreatedAt; paly.Status = res.data[i].Status; palyInfo.data.push(paly); this.setState({ info, palyInfo, }); } } }) // 获取商户下的所有账户 getMerchantAccApi({merchantId: MeInfo.key} ).then(ress => { if(ress.status === 200 && ress.data){ merchanLogo.data = []; for(let j = 0; j<ress.data.length; j+=1){ const merchan = { ...ress.data[j], MerchantId: MeInfo.key, key: ress.data[j].BalanceId, userBalance: parseInt(ress.data[j].Amount, 10) - parseInt(ress.data[j].BlockAmount, 10), }; merchanLogo.data.push(merchan); } } this.setState({ merchanLogo }); }) } render () { const {merchanLogo, info, palyInfo, buttonData} = this.state; return ( <PageHeaderWrapper> <Card bordered> <Row> <Col> <div className={styles.addButton}> <HeadFootButton buttonData={buttonData} /> </div> </Col> </Row> </Card> <div style={{background: '#fff'}}> <Row><Col className={styles.title}> 商户个人信息:</Col></Row> { info.map((item) =>( <Row className={styles.row} key={item[0].label}> <Col span={2} className={styles.col}>{item[0].label}</Col> <Col span={5} className={styles.col}>{item[0].value}</Col> <Col span={4} className={styles.col}>{item[1].label}</Col> <Col span={5} className={styles.col}>{item[1].value}</Col> <Col span={4} className={styles.col}>{item[2].label}</Col> <Col span={4} className={styles.col}>{item[2].value}</Col> </Row> )) } <Row> <Col style={{padding: '10px'}}>商户操作员信息: <Table pagination={false} columns={palyInfo.columns} dataSource={palyInfo.data} scroll={{ y: 300 }} /> </Col> </Row> <Row> <Col style={{padding: '10px'}}>商户帐户信息: <Table columns={merchanLogo.columns} dataSource={merchanLogo.data} /> </Col> </Row> </div> <MerchantAddRate ref={c => {this.MerchantAddRate = c}} /> <MemberApplayInter ref={c => {this.MemberApplayInter = c}} /> <MerchantWall ref={c => {this.MerchantWall = c}} Reset={this.int} /> </PageHeaderWrapper> ) } } export default MerchantInfo
SilverBlaze109/VAMPY2017
programs/Max_Thread.py
import _thread import threading import random a = 0 b = 0 c = 1 d = 0 m_a = "" array = [] while a < 1000: y = random.randint(1, 1001) array.append(y) a = a + 1 def max_array(name, delay): global b global c global d while d < 1000: if array[b] > array[c] array[b] = m_a b = b + 1 c = c + 1 d = d + 1 if array[b] < array[c] array[c] = m_a b = b + 1 c = c + 1 d = d + 1 if array[b] = array[c] arracy[c] = m_a b = b + 1 c = c + 1 d = d + 1 _thread.start_new_thread(max_array, ("T1", 1)) _thread.start_new_thread(max_array, ("T2", 1)) _thread.start_new_thread(max_array, ("T3", 1)) _thread.start_new_thread(max_array, ("T4", 1))
SahilChachra/DS-Algo-Practice
Sliding Window Problems/SlidingWinMaximumSum.java
public class SlidingWinMaximumSum { public int getMaxSum(int []arr, int k) { int i=0,j=0,sum=0; int max = Integer.MIN_VALUE; while(j< arr.length) { sum = sum + arr[j]; if(j-i+1 < k) j++; else if(j-i+1 == k){ max = Math.max(max, sum); sum = sum - arr[i]; i++; j++; } } return max; } public static void main(String[] args) { SlidingWinMaximumSum swms = new SlidingWinMaximumSum(); int arr[] = {100,200,300,400}; System.out.println(swms.getMaxSum(arr, 2)); } }
cgu101/Voogasalad
src/view/level/LevelType.java
<filename>src/view/level/LevelType.java package view.level; public enum LevelType { LEVEL, SPLASH; }
kamils-iRonin/dynflow
lib/dynflow/transaction_adapters.rb
<reponame>kamils-iRonin/dynflow # frozen_string_literal: true module Dynflow module TransactionAdapters require 'dynflow/transaction_adapters/abstract' require 'dynflow/transaction_adapters/none' require 'dynflow/transaction_adapters/active_record' end end
pravinva/aws-serverless-data-lake-framework
sdlf-utils/pipeline-examples/manifests/stageA/lambda/stage-a-process-object/src/lambda_function.py
import os import shutil from datalake_library.commons import init_logger from datalake_library.transforms.transform_handler import TransformHandler from datalake_library import octagon from datalake_library.octagon import Artifact, EventReasonEnum, peh from datalake_library.configuration.resource_configs import DynamoConfiguration from datalake_library.interfaces.dynamo_interface import DynamoInterface logger = init_logger(__name__) dynamo_config = DynamoConfiguration() dynamo_interface = DynamoInterface(dynamo_config) def remove_content_tmp(): # Remove contents of the Lambda /tmp folder (Not released by default) for root, dirs, files in os.walk('/tmp'): for f in files: os.unlink(os.path.join(root, f)) for d in dirs: shutil.rmtree(os.path.join(root, d)) def lambda_handler(event, context): """Calls custom transform developed by user Arguments: event {dict} -- Dictionary with details on previous processing step context {dict} -- Dictionary with details on Lambda context Returns: {dict} -- Dictionary with Processed Bucket and Key(s) """ try: logger.info('Fetching event data from previous step') bucket = event['body']['bucket'] key = event['body']['key'] team = event['body']['team'] stage = event['body']['pipeline_stage'] dataset = event['body']['dataset'] ddb_key = event['body']['manifest_ddb_key'] logger.info('Initializing Octagon client') component = context.function_name.split('-')[-2].title() octagon_client = ( octagon.OctagonClient() .with_run_lambda(True) .with_configuration_instance(event['body']['env']) .build() ) peh.PipelineExecutionHistoryAPI( octagon_client).retrieve_pipeline_execution(event['body']['peh_id']) # Call custom transform created by user and process the file logger.info('Calling user custom processing code') transform_handler = TransformHandler().stage_transform(team, dataset, stage) response = transform_handler().transform_object( bucket, key, team, dataset) # custom user code called remove_content_tmp() octagon_client.update_pipeline_execution(status="{} {} Processing".format(stage, component), component=component) dynamo_interface.update_manifests_control_table_stagea( ddb_key,"PROCESSING",response[0]) except Exception as e: logger.error("Fatal error", exc_info=True) octagon_client.end_pipeline_execution_failed(component=component, issue_comment="{} {} Error: {}".format(stage, component, repr(e))) remove_content_tmp() dynamo_interface.update_manifests_control_table_stagea( ddb_key, "FAILED") raise e return response
dingpuyu/MLN
MLN-Android-Demo/sample/src/main/java/com/mln/demo/android/fragment/message/controller/MessageFragment.java
<filename>MLN-Android-Demo/sample/src/main/java/com/mln/demo/android/fragment/message/controller/MessageFragment.java package com.mln.demo.android.fragment.message.controller; import android.content.Intent; import android.os.Bundle; import android.os.Handler; import android.os.Message; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.Toast; import com.mln.demo.R; import com.mln.demo.android.fragment.message.model.MessageEntity; import com.mln.demo.android.fragment.message.model.MessageManager; import java.util.ArrayList; import java.util.List; import androidx.annotation.NonNull; import androidx.fragment.app.Fragment; import androidx.recyclerview.widget.LinearLayoutManager; import androidx.recyclerview.widget.RecyclerView; import androidx.swiperefreshlayout.widget.SwipeRefreshLayout; public class MessageFragment extends Fragment implements SwipeRefreshLayout.OnRefreshListener { private View mView; private RecyclerView mRecyclerView; private RecyclerView.LayoutManager mLayoutManager; private SwipeRefreshLayout mRefreshLayout; private RecyclerAdapter mAdapter; private int mLastCompletelyVisibleItemPosition; private Handler mMessageHandler; private MessageManager mMessageManager; private MessageManager mAsyncMessageManager; private List<MessageEntity> mMessageList; @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { // Inflate the layout for this fragment mView = inflater.inflate(R.layout.fragment_message, container, false); setUpMessage(); setUpRecyclerView(); setUpSwipeRefreshLayout(); return mView; } private void setUpMessage() { setUpMessageHandler(); mMessageList = new ArrayList<MessageEntity>(); mMessageManager = new MessageManager(getActivity()); mAsyncMessageManager = new MessageManager(getActivity(), mMessageHandler); } private void setUpMessageHandler() { mMessageHandler = new Handler() { @Override public void handleMessage(Message msg) { switch (msg.what) { case 1: resetData(msg); stopRefresh(); break; case 2: getMoreData(msg); break; default: return; } } }; } private void getMoreData(Message msg) { addMessagesToListWith((List<MessageEntity>) msg.obj); mAdapter.notifyMessageDataSetChangedWith(mMessageList); } private void resetData(Message msg) { mMessageList.clear(); addMessagesToListWith((List<MessageEntity>) msg.obj); mAdapter.notifyMessageDataSetChangedWith(mMessageList); } private void setUpSwipeRefreshLayout() { mRefreshLayout = (SwipeRefreshLayout) mView.findViewById(R.id.refresh); mRefreshLayout.setProgressViewOffset(true, 20, 100); mRefreshLayout.setSize(SwipeRefreshLayout.DEFAULT); mRefreshLayout.setColorSchemeResources(R.color.colorPrimary, R.color.colorPrimaryDark, R.color.colorAccent); mRefreshLayout.setEnabled(true); mRefreshLayout.setOnRefreshListener(this); } @Override public void onRefresh() { resetDataAsync(); } private void stopRefresh() { if (mRefreshLayout.isRefreshing()) { mRefreshLayout.setRefreshing(false); } } private void setUpRecyclerView() { setRecyclerView(); setLayoutManager(); setAdapter(); } private void setRecyclerView() { mRecyclerView = (RecyclerView) mView.findViewById(R.id.recyclerview); mRecyclerView.setHasFixedSize(true); loadMoreDataAsync(); mRecyclerView.addOnScrollListener(new RecyclerView.OnScrollListener() { @Override public void onScrolled(@NonNull RecyclerView recyclerView, int dx, int dy){ mLastCompletelyVisibleItemPosition = lastCompletelyVisibleItemPosition(recyclerView); } @Override public void onScrollStateChanged(@NonNull RecyclerView recyclerView, int newState) { super.onScrollStateChanged(recyclerView, newState); if (canLoadMoreData(recyclerView, newState)) { loadMoreDataAsync(); } } private boolean canLoadMoreData(@NonNull RecyclerView recyclerView, int newState) { return didStopScroll(recyclerView, newState) && onBottom(recyclerView); } private boolean didStopScroll(@NonNull RecyclerView recyclerView, int newState) { return newState == RecyclerView.SCROLL_STATE_IDLE; } private boolean onBottom(@NonNull RecyclerView recyclerView) { return mLastCompletelyVisibleItemPosition >= ((LinearLayoutManager) recyclerView.getLayoutManager()).getItemCount() - 1; } private int lastCompletelyVisibleItemPosition(@NonNull RecyclerView recyclerView) { RecyclerView.LayoutManager lm = recyclerView.getLayoutManager(); if (lm instanceof LinearLayoutManager) { return ((LinearLayoutManager) lm).findLastVisibleItemPosition(); } return -1; } }); } private void setAdapter() { mAdapter = new RecyclerAdapter(getActivity()); mAdapter.setOnItemClickListener(new RecyclerAdapter.OnItemClickListener() { @Override public void onItemClick(View view, int position) { startActivityWith(position); showAlert(position); } private void startActivityWith(int position) { if (position == 0) { startActivity(new Intent(getContext(), MessageDetailActivity.class)); } else if (position == 1) { startActivity(new Intent(getContext(), MessageDetailNotificationActivity.class)); } } private void showAlert(int position) { Toast.makeText(mView.getContext(), mMessageList.get(position).getFemalename(), Toast.LENGTH_SHORT).show(); } }); mRecyclerView.setAdapter(mAdapter); } private void setLayoutManager() { mLayoutManager = new LinearLayoutManager(mView.getContext()); mRecyclerView.setLayoutManager(mLayoutManager); } private void resetDataAsync() { mMessageList.clear(); mAsyncMessageManager.fetchMessageDataAsync(); } private void loadMoreDataAsync() { mAsyncMessageManager.fetchMoreMessageDataAsync(); } private void addMessagesToListWith(List<MessageEntity> list) { if (mMessageList == null) { mMessageList = new ArrayList<MessageEntity>(); } if (mMessageList.size() == 0) { MessageEntity customerMessage = new MessageEntity(); customerMessage.setFemalename(customer()); customerMessage.setIcon(noImage()); MessageEntity notificationMessage = new MessageEntity(); notificationMessage.setFemalename(notification()); notificationMessage.setIcon(noImage()); mMessageList.add(customerMessage); mMessageList.add(notificationMessage); mMessageList.addAll(list); } mMessageList.addAll(list); } private String customer() { return "私信/客服"; } private String notification() { return "官方通知"; } private String noImage() { return ""; } }
prabhjyotsingh/cloudbreak
core-api/src/main/java/com/sequenceiq/cloudbreak/api/endpoint/v4/stacks/base/InstanceStatus.java
<reponame>prabhjyotsingh/cloudbreak<gh_stars>0 package com.sequenceiq.cloudbreak.api.endpoint.v4.stacks.base; public enum InstanceStatus { REQUESTED, CREATED, UNREGISTERED, REGISTERED, DECOMMISSIONED, TERMINATED, DELETED_ON_PROVIDER_SIDE, FAILED, STOPPED }
TCLProject/mod-director
mod-director-core/src/main/java/net/jan/moddirector/core/manage/check/StopModRepostsEntry.java
package net.jan.moddirector.core.manage.check; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import java.util.regex.Pattern; public class StopModRepostsEntry { private final String domain; private final String path; private final Pattern pattern; private final int advertising; private final int redistribution; private final int miscellaneous; private final String notes; @JsonCreator public StopModRepostsEntry( @JsonProperty(value = "domain", required = true) String domain, @JsonProperty(value = "path", required = true) String path, @JsonProperty(value = "pattern", required = true) Pattern pattern, @JsonProperty(value = "advertising", required = true) int advertising, @JsonProperty(value = "redistribution", required = true) int redistribution, @JsonProperty(value = "miscellaneous", required = true) int miscellaneous, @JsonProperty(value = "notes", required = true) String notes ) { this.domain = domain; this.path = path; this.pattern = pattern; this.advertising = advertising; this.redistribution = redistribution; this.miscellaneous = miscellaneous; this.notes = notes; } public String domain() { return domain; } public String path() { return path; } public Pattern pattern() { return pattern; } public int advertising() { return advertising; } public int redistribution() { return redistribution; } public int miscellaneous() { return miscellaneous; } public String notes() { return notes; } }
taolinqu/ds4p
DS4P/consent2share/core/src/main/java/gov/samhsa/consent2share/service/account/PasswordResetServiceImpl.java
<filename>DS4P/consent2share/core/src/main/java/gov/samhsa/consent2share/service/account/PasswordResetServiceImpl.java /******************************************************************************* * Open Behavioral Health Information Technology Architecture (OBHITA.org) * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the <organization> nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ******************************************************************************/ package gov.samhsa.consent2share.service.account; import java.util.Date; import javax.mail.MessagingException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.security.core.userdetails.UsernameNotFoundException; import org.springframework.security.crypto.password.PasswordEncoder; import org.springframework.stereotype.Component; import org.springframework.util.StringUtils; import gov.samhsa.consent2share.domain.account.EmailToken; import gov.samhsa.consent2share.domain.account.EmailTokenRepository; import gov.samhsa.consent2share.domain.account.TokenGenerator; import gov.samhsa.consent2share.domain.account.TokenType; import gov.samhsa.consent2share.domain.account.Users; import gov.samhsa.consent2share.domain.account.UsersRepository; import gov.samhsa.consent2share.domain.commondomainservices.EmailSender; import gov.samhsa.consent2share.domain.patient.Patient; import gov.samhsa.consent2share.domain.patient.PatientRepository; import gov.samhsa.consent2share.infrastructure.EmailType; import gov.samhsa.consent2share.infrastructure.security.EmailAddressNotExistException; import gov.samhsa.consent2share.infrastructure.security.TokenExpiredException; import gov.samhsa.consent2share.infrastructure.security.TokenNotExistException; import gov.samhsa.consent2share.infrastructure.security.UsernameNotExistException; import gov.samhsa.consent2share.service.dto.PasswordResetDto; import gov.samhsa.consent2share.service.dto.PasswordChangeDto; /** * The Class PasswordResetServiceImpl. */ @Component public class PasswordResetServiceImpl implements PasswordResetService { /** The logger. */ private final Logger logger = LoggerFactory.getLogger(this.getClass()); /** The users repository. */ private UsersRepository usersRepository; /** The patient repository. */ private PatientRepository patientRepository; /** The token generator. */ private TokenGenerator tokenGenerator; /** The password reset token expire in hours. */ private Integer passwordResetTokenExpireInHours; /** The password reset token repository. */ private EmailTokenRepository passwordResetTokenRepository; /** The email sender. */ private EmailSender emailSender; /** The password encoder. */ private PasswordEncoder passwordEncoder; /** * Instantiates a new password reset service impl. * * @param usersRepository the users repository * @param patientRepository the patient repository * @param tokenGenerator the token generator * @param passwordResetTokenExpireInHours the password reset token expire in hours * @param passwordResetTokenRepository the password reset token repository * @param emailSender the email sender * @param passwordEncoder the password encoder */ @Autowired public PasswordResetServiceImpl( UsersRepository usersRepository, PatientRepository patientRepository, TokenGenerator tokenGenerator, @Value("${passwordResetTokenExpireInHours}") Integer passwordResetTokenExpireInHours, EmailTokenRepository passwordResetTokenRepository, EmailSender emailSender, PasswordEncoder passwordEncoder) { this.usersRepository = usersRepository; this.patientRepository = patientRepository; this.tokenGenerator = tokenGenerator; this.passwordResetTokenExpireInHours = passwordResetTokenExpireInHours; this.passwordResetTokenRepository = passwordResetTokenRepository; this.emailSender = emailSender; this.passwordEncoder = passwordEncoder; } /* (non-Javadoc) * @see gov.samhsa.consent2share.service.account.PasswordResetService#createPasswordResetToken(java.lang.String, java.lang.String, java.lang.String) */ @Override public void createPasswordResetToken(String username, String emailAddress, String linkUrl) throws UsernameNotExistException, EmailAddressNotExistException, MessagingException { if (!StringUtils.hasText(username)) { throw new IllegalArgumentException("Username is required."); } if (!StringUtils.hasText(emailAddress)) { throw new IllegalArgumentException("Email Address is required."); } if (!StringUtils.hasText(linkUrl)) { throw new IllegalArgumentException("Email link is required."); } try { usersRepository.loadUserByUsername(username); } catch (UsernameNotFoundException e) { logger.warn(e.getMessage(), e); throw new UsernameNotExistException(e.getMessage()); } Patient patient = patientRepository.findByUsername(username); String patientEmailAddress = patient.getEmail(); if (!patientEmailAddress.equalsIgnoreCase(emailAddress)) { String message = String.format( "Email address %s doesn't exist for username %s.", emailAddress, username); logger.warn(message); throw new EmailAddressNotExistException(message); } EmailToken passwordResetToken = new EmailToken(); passwordResetToken.setExpireInHours(passwordResetTokenExpireInHours); passwordResetToken.setRequestDateTime(new Date()); String token = tokenGenerator.generateToken(); passwordResetToken.setUsername(username); passwordResetToken.setToken(token); passwordResetToken.setIsTokenUsed(false); passwordResetToken.setTokenType(TokenType.PASSWORD_RESET); passwordResetTokenRepository.save(passwordResetToken); emailSender.sendMessage( patient.getFirstName() + " " + patient.getLastName(), emailAddress, EmailType.PASSWORD_RESET_REQUEST, linkUrl, token); } /* (non-Javadoc) * @see gov.samhsa.consent2share.service.account.PasswordResetService#isPasswordResetTokenExpired(java.lang.String) */ @Override public Boolean isPasswordResetTokenExpired(String token) throws TokenNotExistException { if (!StringUtils.hasText(token)) { throw new IllegalArgumentException( "Password reset token is required."); } EmailToken passwordResetToken = findPasswordResetToken(token); Boolean isExpired = passwordResetToken.isTokenExpired(); return isExpired; } /* (non-Javadoc) * @see gov.samhsa.consent2share.service.account.PasswordResetService#resetPassword(gov.samhsa.consent2share.service.dto.PasswordResetDto, java.lang.String) */ @Override public void resetPassword(PasswordResetDto passwordResetDto, String linkUrl) throws TokenNotExistException, TokenExpiredException, UsernameNotExistException, MessagingException { if (passwordResetDto == null) { throw new IllegalArgumentException( "Password reset dto is required."); } String token = passwordResetDto.getToken(); EmailToken passwordResetToken = findPasswordResetToken(token); Boolean isExpired = passwordResetToken.isTokenExpired(); if (isExpired) { throw new TokenExpiredException("Password reset token is expired."); } passwordResetToken.setIsTokenUsed(true); passwordResetTokenRepository.save(passwordResetToken); String username = passwordResetToken.getUsername(); Users user = null; try { user = usersRepository.loadUserByUsername(username); } catch (UsernameNotFoundException e) { // TODO: Log here throw new UsernameNotExistException(e.getMessage()); } String encodedPassword = passwordEncoder.encode(passwordResetDto .getPassword()); Users updatedUser = null; updatedUser = new Users(user.getFailedLoginAttempts(), username, user.getPassword(), user.isEnabled(), user.isAccountNonExpired(), user.isCredentialsNonExpired(), user.getAuthorities()); // if (user.isEnabled()) { // // updatedUser = new User(username, encodedPassword, // user.getAuthorities()); // } else { // updatedUser = new User(username, encodedPassword, false, // true, true, true, user.getAuthorities()); // } usersRepository.updateUser(updatedUser); Patient patient = patientRepository.findByUsername(username); emailSender.sendMessage( patient.getFirstName() + " " + patient.getLastName(), patient.getEmail(), EmailType.PASSWORD_CONFIRMATION, linkUrl, null); } @Override public boolean changePassword(PasswordChangeDto passwordChangeDto) throws UsernameNotExistException, MessagingException { if (passwordChangeDto == null) { throw new IllegalArgumentException( "Password change dto is required."); } String username = passwordChangeDto.getUsername(); Users user = null; try { user = usersRepository.loadUserByUsername(username); } catch (UsernameNotFoundException e) { // TODO: Log here throw new UsernameNotExistException(e.getMessage()); } String encodedOldPassword = passwordEncoder.encode(passwordChangeDto.getOldPassword()); String encodedNewPassword = passwordEncoder.encode(passwordChangeDto.getNewPassword()); if(passwordEncoder.matches(passwordChangeDto.getOldPassword(), user.getPassword()) == true){ usersRepository.changePassword(encodedOldPassword, encodedNewPassword); return true; }else{ return false; } } /** * Find password reset token. * * @param token the token * @return the email token * @throws TokenNotExistException the token not exist exception */ private EmailToken findPasswordResetToken(String token) throws TokenNotExistException { EmailToken passwordResetToken = passwordResetTokenRepository .findByToken(token); if (passwordResetToken == null) { throw new TokenNotExistException( "Password reset token doesn't exist."); } return passwordResetToken; } }
LazyPanda07/HTTP
docs/search/functions_2.js
<gh_stars>0 var searchData= [ ['deleterequest_166',['deleteRequest',['../classweb_1_1_h_t_t_p_builder.html#af89af347f10292209346086aa017e05d',1,'web::HTTPBuilder']]] ];
masystech/pompidu
public/import/themes/Backend/ExtJs/backend/customer/view/customer_stream/conditions/field/attribute_value.js
/** * Shopware 5 * Copyright (c) shopware AG * * According to our dual licensing model, this program can be used either * under the terms of the GNU Affero General Public License, version 3, * or under a proprietary license. * * The texts of the GNU Affero General Public License with an additional * permission and of our proprietary license can be found at and * in the LICENSE file you have received along with this program. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * "Shopware" is a registered trademark of shopware AG. * The licensing of the program under the AGPLv3 does not imply a * trademark license. Therefore any rights, title and interest in * our trademarks remain entirely with us. * * @category Shopware * @package Customer * @subpackage CustumerStream * @version $Id$ * @author shopware AG */ // {namespace name=backend/customer/view/main} // {block name="backend/customer/view/customer_stream/conditions/field/attribute_value"} Ext.define('Shopware.apps.Customer.view.customer_stream.conditions.field.AttributeValue', { extend: 'Ext.form.FieldContainer', layout: { type: 'vbox', align: 'stretch' }, mixins: { formField: 'Ext.form.field.Base' }, initComponent: function() { var me = this; me.items = me.createItems(); me.operator = me.operatorField.getValue(); me.operatorField.on('change', function (field, value) { me.operator = value; if (value === 'BETWEEN') { me.betweenContainer.show(); me.valueField.hide(); me.fromField.setDisabled(false); me.toField.setDisabled(false); me.valueField.setDisabled(true); } else { me.betweenContainer.hide(); me.valueField.show(); me.fromField.setDisabled(true); me.toField.setDisabled(true); me.valueField.setDisabled(false); } }); me.callParent(arguments); }, createItems: function () { var me = this; return [ me.createValueField(), me.createBetweenContainer() ]; }, getValue: function() { var value = this.valueField.getValue(); if (this.operator === 'BETWEEN') { return { min: this.fromField.getValue(), max: this.toField.getValue() }; } else if (this.operator === 'IN') { return value.split(','); } return value; }, setValue: function(value) { var me = this; if (Ext.isObject(value)) { me.fromField.setValue(value.min); me.toField.setValue(value.max); return; } me.valueField.setValue(value); }, getSubmitData: function() { var result = {}; result[this.name] = this.getValue(); return result; }, createFromField: function() { var me = this; me.fromField = Ext.create('Ext.form.field.Number', { fieldLabel: '{s name=attribute/from_text}{/s}', allowBlank: false, width: '100%', listeners: { change: function() { me.toField.setMinValue(this.getValue() + 1); } } }); return me.fromField; }, createToField: function() { var me = this; me.toField = Ext.create('Ext.form.field.Number', { fieldLabel: '{s name=attribute/to_text}{/s}', allowBlank: false, width: '100%', listeners: { change: function() { me.fromField.setMaxValue(this.getValue() - 1); } } }); return me.toField; }, createValueField: function () { var me = this; me.valueField = Ext.create('Ext.form.field.Text', { fieldLabel: '{s name=attribute/value}{/s}', allowBlank: false, name: 'value' }); return me.valueField; }, createBetweenContainer: function () { var me = this; me.betweenContainer = Ext.create('Ext.container.Container', { layout: { type: 'vbox' }, hidden: true, items: [me.createFromField(), me.createToField()] }); return me.betweenContainer; } }); // {/block}
nextdaymedia/cmp
src/lib/cmp.test.js
import { expect } from 'chai'; import { getTCData } from "./cmp"; import log from './log'; describe('getTCData', () => { it('can get data from __tcfapi, if defined, with unknown cmpId, useractioncomplete', (done) => { log.logLevel = false; const tcfapi = jest.fn((command, version, callback) => { switch (command) { case 'addEventListener': callback({ cmpId: -1, eventStatus: 'useractioncomplete' }, true); break; case 'removeEventListener': callback(true); break; default: throw new Error(`unknown command '${command}'`); } }); const view = { __tcfapi: tcfapi }; const callback = (data, success) => { expect(success).to.equal(true); expect(data.eventStatus).to.equal('useractioncomplete'); expect(tcfapi.mock.calls.length).to.equal(2); expect(tcfapi.mock.calls[0][0]).to.equal('addEventListener'); expect(tcfapi.mock.calls[0][1]).to.equal(2); expect(typeof(tcfapi.mock.calls[0][2])).to.equal('function'); expect(tcfapi.mock.calls[1][0]).to.equal('removeEventListener'); expect(tcfapi.mock.calls[1][1]).to.equal(2); expect(typeof(tcfapi.mock.calls[1][2])).to.equal('function'); done(); }; getTCData(view, callback); }); it('can get data from __tcfapi, if defined, with unknown cmpId, tcloaded', (done) => { log.logLevel = false; const tcfapi = jest.fn((command, version, callback) => { switch (command) { case 'addEventListener': callback({ cmpId: -1, eventStatus: 'tcloaded', }, true); break; case 'removeEventListener': callback(true); break; default: throw new Error(`unknown command '${command}'`); } }); const view = { __tcfapi: tcfapi }; const callback = (data, success) => { expect(success).to.equal(true); expect(data.eventStatus).to.equal('tcloaded'); expect(tcfapi.mock.calls.length).to.equal(2); expect(tcfapi.mock.calls[0][0]).to.equal('addEventListener'); expect(tcfapi.mock.calls[0][1]).to.equal(2); expect(typeof(tcfapi.mock.calls[0][2])).to.equal('function'); expect(tcfapi.mock.calls[1][0]).to.equal('removeEventListener'); expect(tcfapi.mock.calls[1][1]).to.equal(2); expect(typeof(tcfapi.mock.calls[1][2])).to.equal('function'); done(); }; getTCData(view, callback); }); it('can get data from __tcfapi, if defined, with quantcast cmp v5', (done) => { log.logLevel = false; const tcfapi = jest.fn((command, version, callback) => { switch (command) { case 'addEventListener': callback({ cmpId: 10, cmpVersion: 5, eventStatus: 'useractioncomplete', }, true); break; case 'removeEventListener': callback(true); break; default: throw new Error(`unknown command '${command}'`); } }); const view = { __tcfapi: tcfapi }; const callback = (data, success) => { expect(success).to.equal(true); expect(data.eventStatus).to.equal('useractioncomplete'); expect(tcfapi.mock.calls.length).to.equal(2); expect(tcfapi.mock.calls[0][0]).to.equal('addEventListener'); expect(tcfapi.mock.calls[0][1]).to.equal(2); expect(typeof(tcfapi.mock.calls[0][2])).to.equal('function'); expect(tcfapi.mock.calls[1][0]).to.equal('removeEventListener'); expect(tcfapi.mock.calls[1][1]).to.equal(2); expect(typeof(tcfapi.mock.calls[1][2])).to.equal('function'); done(); }; getTCData(view, callback); }); it('can get data from __tcfapi, if defined, with quantcast cmp >=v6, useractioncomplete', (done) => { log.logLevel = false; const tcfapi = jest.fn((command, version, callback) => { switch (command) { case 'addEventListener': callback({ cmpId: 10, cmpVersion: 6, eventStatus: 'useractioncomplete', }, true); break; case 'removeEventListener': callback(true); break; default: throw new Error(`unknown command '${command}'`); } }); const view = { __tcfapi: tcfapi }; const callback = (data, success) => { expect(success).to.equal(true); expect(data.eventStatus).to.equal('useractioncomplete'); expect(tcfapi.mock.calls.length).to.equal(2); expect(tcfapi.mock.calls[0][0]).to.equal('addEventListener'); expect(tcfapi.mock.calls[0][1]).to.equal(2); expect(typeof(tcfapi.mock.calls[0][2])).to.equal('function'); expect(tcfapi.mock.calls[1][0]).to.equal('removeEventListener'); expect(tcfapi.mock.calls[1][1]).to.equal(2); expect(typeof(tcfapi.mock.calls[1][2])).to.equal('function'); done(); }; getTCData(view, callback); }); it('can get data from __tcfapi, if defined, with quantcast cmp >=v6, tcloaded', (done) => { log.logLevel = false; const tcfapi = jest.fn((command, version, callback) => { switch (command) { case 'addEventListener': callback({ cmpId: 10, cmpVersion: 6, eventStatus: 'tcloaded', }, true); break; case 'removeEventListener': callback(true); break; default: throw new Error(`unknown command '${command}'`); } }); const view = { __tcfapi: tcfapi }; const callback = (data, success) => { expect(success).to.equal(true); expect(data.eventStatus).to.equal('tcloaded'); expect(tcfapi.mock.calls.length).to.equal(2); expect(tcfapi.mock.calls[0][0]).to.equal('addEventListener'); expect(tcfapi.mock.calls[0][1]).to.equal(2); expect(typeof(tcfapi.mock.calls[0][2])).to.equal('function'); expect(tcfapi.mock.calls[1][0]).to.equal('removeEventListener'); expect(tcfapi.mock.calls[1][1]).to.equal(2); expect(typeof(tcfapi.mock.calls[1][2])).to.equal('function'); done(); }; getTCData(view, callback); }); it('can get data from __tcfapi, if defined, with liveramp cmp, gdpr does apply', (done) => { log.logLevel = false; let consentDataExistCallback; const tcfapi = jest.fn((command, version, callback) => { switch (command) { case 'addEventListener': callback({ cmpId: 3, gdprApplies: true, }, true); break; case 'removeEventListener': callback(true); break; case 'consentDataExist': // LiveRamp specific command consentDataExistCallback = callback; callback(true, true); break; default: throw new Error(`unknown command '${command}'`); } }); const view = { __tcfapi: tcfapi }; const callback = (data, success) => { // extra invocations from the consentDataExist command should not matter consentDataExistCallback(true, true); consentDataExistCallback(true, true); consentDataExistCallback(true, true); consentDataExistCallback(true, true); expect(success).to.equal(true); expect(data.gdprApplies).to.equal(true); expect(tcfapi.mock.calls.length).to.equal(3); expect(tcfapi.mock.calls[0][0]).to.equal('addEventListener'); expect(tcfapi.mock.calls[0][1]).to.equal(2); expect(typeof(tcfapi.mock.calls[0][2])).to.equal('function'); expect(tcfapi.mock.calls[1][0]).to.equal('consentDataExist'); expect(tcfapi.mock.calls[1][1]).to.equal(2); expect(typeof(tcfapi.mock.calls[1][2])).to.equal('function'); expect(tcfapi.mock.calls[2][0]).to.equal('removeEventListener'); expect(tcfapi.mock.calls[2][1]).to.equal(2); expect(typeof(tcfapi.mock.calls[2][2])).to.equal('function'); done(); }; getTCData(view, callback); }); it('can get data from __tcfapi, if defined, with liveramp cmp, gdpr does not apply', (done) => { log.logLevel = false; const tcfapi = jest.fn((command, version, callback) => { switch (command) { case 'addEventListener': callback({ cmpId: 3, gdprApplies: false, }, true); break; case 'removeEventListener': callback(true); break; default: throw new Error(`unknown command '${command}'`); } }); const view = { __tcfapi: tcfapi }; const callback = (data, success) => { expect(success).to.equal(true); expect(data.gdprApplies).to.equal(false); expect(tcfapi.mock.calls.length).to.equal(2); expect(tcfapi.mock.calls[0][0]).to.equal('addEventListener'); expect(tcfapi.mock.calls[0][1]).to.equal(2); expect(typeof(tcfapi.mock.calls[0][2])).to.equal('function'); expect(tcfapi.mock.calls[1][0]).to.equal('removeEventListener'); expect(tcfapi.mock.calls[1][1]).to.equal(2); expect(typeof(tcfapi.mock.calls[1][2])).to.equal('function'); done(); }; getTCData(view, callback); }); it('can get data from __tcfapi, if defined, with cookiebot cmp', (done) => { log.logLevel = false; const tcfapi = jest.fn((command, version, callback) => { switch (command) { case 'addEventListener': callback({ cmpId: 134, tcString: '1234', }, true); break; case 'removeEventListener': callback(true); break; default: throw new Error(`unknown command '${command}'`); } }); const view = { __tcfapi: tcfapi }; const callback = (data, success) => { expect(success).to.equal(true); expect(data.tcString).to.equal('1234'); expect(tcfapi.mock.calls.length).to.equal(2); expect(tcfapi.mock.calls[0][0]).to.equal('addEventListener'); expect(tcfapi.mock.calls[0][1]).to.equal(2); expect(typeof(tcfapi.mock.calls[0][2])).to.equal('function'); expect(tcfapi.mock.calls[1][0]).to.equal('removeEventListener'); expect(tcfapi.mock.calls[1][1]).to.equal(2); expect(typeof(tcfapi.mock.calls[1][2])).to.equal('function'); done(); }; getTCData(view, callback); }); it('can get data from __tcfapi, with Funding Choices cmp, GDPR does not apply', (done) => { log.logLevel = false; const tcfapi = jest.fn((command, version, callback) => { switch (command) { case 'addEventListener': callback({ cmpId: 300, gdprApplies: false, }, true); break; case 'removeEventListener': callback(true); break; default: throw new Error(`unknown command '${command}'`); } }); const view = { __tcfapi: tcfapi }; const callback = (data, success) => { expect(success).to.equal(true); expect(data.gdprApplies).to.equal(false); expect(tcfapi.mock.calls.length).to.equal(2); expect(tcfapi.mock.calls[0][0]).to.equal('addEventListener'); expect(tcfapi.mock.calls[0][1]).to.equal(2); expect(typeof(tcfapi.mock.calls[0][2])).to.equal('function'); expect(tcfapi.mock.calls[1][0]).to.equal('removeEventListener'); expect(tcfapi.mock.calls[1][1]).to.equal(2); expect(typeof(tcfapi.mock.calls[1][2])).to.equal('function'); done(); }; getTCData(view, callback); }); it('can get data from __tcfapi, with Funding Choices cmp, on useractioncomplete', (done) => { log.logLevel = false; const tcfapi = jest.fn((command, version, callback) => { switch (command) { case 'addEventListener': callback({ cmpId: 300, eventStatus: 'useractioncomplete', }, true); break; case 'removeEventListener': callback(true); break; default: throw new Error(`unknown command '${command}'`); } }); const view = { __tcfapi: tcfapi }; const callback = (data, success) => { expect(success).to.equal(true); expect(data.eventStatus).to.equal('useractioncomplete'); expect(tcfapi.mock.calls.length).to.equal(2); expect(tcfapi.mock.calls[0][0]).to.equal('addEventListener'); expect(tcfapi.mock.calls[0][1]).to.equal(2); expect(typeof(tcfapi.mock.calls[0][2])).to.equal('function'); expect(tcfapi.mock.calls[1][0]).to.equal('removeEventListener'); expect(tcfapi.mock.calls[1][1]).to.equal(2); expect(typeof(tcfapi.mock.calls[1][2])).to.equal('function'); done(); }; getTCData(view, callback); }); it('can get data from __tcfapi, with Funding Choices cmp, on tcloaded', (done) => { log.logLevel = false; const tcfapi = jest.fn((command, version, callback) => { switch (command) { case 'addEventListener': callback({ cmpId: 300, eventStatus: 'tcloaded', }, true); break; case 'removeEventListener': callback(true); break; default: throw new Error(`unknown command '${command}'`); } }); const view = { __tcfapi: tcfapi }; const callback = (data, success) => { expect(success).to.equal(true); expect(data.eventStatus).to.equal('tcloaded'); expect(tcfapi.mock.calls.length).to.equal(2); expect(tcfapi.mock.calls[0][0]).to.equal('addEventListener'); expect(tcfapi.mock.calls[0][1]).to.equal(2); expect(typeof(tcfapi.mock.calls[0][2])).to.equal('function'); expect(tcfapi.mock.calls[1][0]).to.equal('removeEventListener'); expect(tcfapi.mock.calls[1][1]).to.equal(2); expect(typeof(tcfapi.mock.calls[1][2])).to.equal('function'); done(); }; getTCData(view, callback); }); it('can get data from __tcfapi, with Funding Choices cmp, wait for CMP', (done) => { log.logLevel = false; const tcfapi = jest.fn((command, version, callback) => { switch (command) { case 'addEventListener': callback({ cmpId: 300, gdprApplies: false, }, true); break; case 'removeEventListener': callback(true); break; default: throw new Error(`unknown command '${command}'`); } }); const view = {}; setTimeout(() => { view.__tcfapi = tcfapi; }, 1000); const callback = (data, success) => { expect(success).to.equal(true); expect(data.gdprApplies).to.equal(false); expect(tcfapi.mock.calls.length).to.equal(2); expect(tcfapi.mock.calls[0][0]).to.equal('addEventListener'); expect(tcfapi.mock.calls[0][1]).to.equal(2); expect(typeof(tcfapi.mock.calls[0][2])).to.equal('function'); expect(tcfapi.mock.calls[1][0]).to.equal('removeEventListener'); expect(tcfapi.mock.calls[1][1]).to.equal(2); expect(typeof(tcfapi.mock.calls[1][2])).to.equal('function'); done(); }; getTCData(view, callback); }); it('can get data from __cmp, if defined', (done) => { log.logLevel = false; const cmp = jest.fn((command, arg, callback) => { switch (command) { case 'getConsentData': callback({consentData: '1234'}, true); break; default: throw new Error(`unknown command '${command}'`); } }); const view = { __cmp: cmp, }; const callback = (data, success) => { expect(success).to.equal(true); expect(data.tcString).to.equal('1234'); done(); }; getTCData(view, callback); expect(cmp.mock.calls.length).to.equal(1); expect(cmp.mock.calls[0][0]).to.equal('getConsentData'); expect(cmp.mock.calls[0][1]).to.equal(null); expect(typeof(cmp.mock.calls[0][2])).to.equal('function'); }); });
skiyooka/blackduck-alert
src/main/java/com/synopsys/integration/alert/web/audit/AuditEntryController.java
/** * blackduck-alert * * Copyright (c) 2020 Synopsys, Inc. * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package com.synopsys.integration.alert.web.audit; import java.util.Optional; import java.util.UUID; import org.apache.commons.lang3.BooleanUtils; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.ResponseEntity; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; import com.synopsys.integration.alert.common.ContentConverter; import com.synopsys.integration.alert.common.exception.AlertJobMissingException; import com.synopsys.integration.alert.common.exception.AlertNotificationPurgedException; import com.synopsys.integration.alert.common.persistence.model.AuditEntryModel; import com.synopsys.integration.alert.common.persistence.model.AuditJobStatusModel; import com.synopsys.integration.alert.common.rest.ResponseFactory; import com.synopsys.integration.alert.common.rest.model.AlertPagedModel; import com.synopsys.integration.alert.common.security.authorization.AuthorizationManager; import com.synopsys.integration.alert.component.audit.AuditDescriptorKey; import com.synopsys.integration.alert.web.controller.BaseController; import com.synopsys.integration.exception.IntegrationException; @RestController @RequestMapping(AuditEntryController.AUDIT_BASE_PATH) public class AuditEntryController extends BaseController { public static final String AUDIT_BASE_PATH = BaseController.BASE_PATH + "/audit"; private final AuditEntryActions auditEntryActions; private final ContentConverter contentConverter; private final ResponseFactory responseFactory; private final AuthorizationManager authorizationManager; private final AuditDescriptorKey descriptorKey; @Autowired public AuditEntryController(AuditEntryActions auditEntryActions, ContentConverter contentConverter, ResponseFactory responseFactory, AuthorizationManager authorizationManager, AuditDescriptorKey descriptorKey) { this.auditEntryActions = auditEntryActions; this.contentConverter = contentConverter; this.responseFactory = responseFactory; this.authorizationManager = authorizationManager; this.descriptorKey = descriptorKey; } @GetMapping public ResponseEntity<String> get(@RequestParam(value = "pageNumber", required = false) Integer pageNumber, @RequestParam(value = "pageSize", required = false) Integer pageSize, @RequestParam(value = "searchTerm", required = false) String searchTerm, @RequestParam(value = "sortField", required = false) String sortField, @RequestParam(value = "sortOrder", required = false) String sortOrder, @RequestParam(value = "onlyShowSentNotifications", required = false) Boolean onlyShowSentNotifications) { if (!hasGlobalPermission(authorizationManager::hasReadPermission, descriptorKey)) { return responseFactory.createForbiddenResponse(); } AlertPagedModel<AuditEntryModel> auditEntries = auditEntryActions.get(pageNumber, pageSize, searchTerm, sortField, sortOrder, BooleanUtils.toBoolean(onlyShowSentNotifications)); return responseFactory.createOkContentResponse(contentConverter.getJsonString(auditEntries)); } @GetMapping(value = "/{id}") public ResponseEntity<String> get(@PathVariable(value = "id") Long id) { if (!hasGlobalPermission(authorizationManager::hasReadPermission, descriptorKey)) { return responseFactory.createForbiddenResponse(); } Optional<AuditEntryModel> auditEntryModel = auditEntryActions.get(id); String stringId = contentConverter.getStringValue(id); if (auditEntryModel.isPresent()) { return responseFactory.createOkResponse(stringId, contentConverter.getJsonString(auditEntryModel.get())); } else { return responseFactory.createGoneResponse(stringId, "This Audit entry could not be found."); } } @GetMapping(value = "/job/{jobId}") public ResponseEntity<String> getAuditInfoForJob(@PathVariable(value = "jobId") UUID jobId) { if (!hasGlobalPermission(authorizationManager::hasReadPermission, descriptorKey)) { return responseFactory.createForbiddenResponse(); } Optional<AuditJobStatusModel> jobAuditModel = auditEntryActions.getAuditInfoForJob(jobId); String jobIdString = jobId.toString(); if (jobAuditModel.isPresent()) { return responseFactory.createOkResponse(jobIdString, contentConverter.getJsonString(jobAuditModel.get())); } else { return responseFactory.createGoneResponse(jobIdString, "The Audit information could not be found for this job."); } } @PostMapping(value = "/resend/{id}/") public ResponseEntity<String> post(@PathVariable(value = "id") Long notificationId) { if (!hasGlobalPermission(authorizationManager::hasExecutePermission, descriptorKey)) { return responseFactory.createForbiddenResponse(); } return resendNotification(notificationId, null); } @PostMapping(value = "/resend/{id}/job/{jobId}") public ResponseEntity<String> post(@PathVariable(value = "id") Long notificationId, @PathVariable(value = "jobId") UUID jobId) { if (!hasGlobalPermission(authorizationManager::hasExecutePermission, descriptorKey)) { return responseFactory.createForbiddenResponse(); } return resendNotification(notificationId, jobId); } private ResponseEntity<String> resendNotification(Long notificationId, UUID commonConfigId) { String stringNotificationId = contentConverter.getStringValue(notificationId); try { AlertPagedModel<AuditEntryModel> auditEntries = auditEntryActions.resendNotification(notificationId, commonConfigId); return responseFactory.createOkResponse(stringNotificationId, contentConverter.getJsonString(auditEntries)); } catch (AlertNotificationPurgedException e) { return responseFactory.createGoneResponse(stringNotificationId, e.getMessage()); } catch (AlertJobMissingException e) { return responseFactory.createGoneResponse(e.getMissingUUID().toString(), e.getMessage()); } catch (IntegrationException e) { return responseFactory.createBadRequestResponse(stringNotificationId, e.getMessage()); } } }
elifkus/java-this-and-that
Problems/src/com/safkanyazilim/mergesort/Solution.java
<reponame>elifkus/java-this-and-that package com.safkanyazilim.mergesort; import java.util.List; /** * Solution for < * @author elif * */ public class Solution { public static void mergeSort(List<Integer> list) { } }
qodirovshohijahon/dates-in-uzbek
aws-pentesting-with-python/ec2/stop_or_delete_ec2.py
#!/usr/bin/env python3 import boto3 import json import os import os import pprint from sys import version_info import sys AWS_REGION = "us-west-1" EC2_CLIENT = boto3.client('ec2', region_name=AWS_REGION) INSTANCE_ID = 'i-06a2ac220369ddb08' # Stopping the instance using stop_instances. instances = EC2_CLIENT.stop_instances( InstanceIds=[ INSTANCE_ID, ], ) for instance in instances['StoppingInstances']: print(f'Stopping instance "{instance["InstanceId"]}"') print(f'Status of instance "{instance["CurrentState"]["Name"]}"') print(json.dumps(instances, indent=4, sort_keys=True)) def terminating_instances(): # Terminating the instance using terminate_instances. instances = EC2_CLIENT.terminate_instances( InstanceIds=[ INSTANCE_ID, ], ) for instance in instances['TerminatingInstances']: print(f'Terminating instance "{instance["InstanceId"]}"') print(f'Status of instance "{instance["CurrentState"]["Name"]}"') print(json.dumps(instances, indent=4, sort_keys=True)) yes = {'yes','y', 'ye', ''} no = {'no','n'} print("Do you want to delete instance? (y/n)") choice = input().lower() if choice in yes: print('Now it is beeing deleted') terminating_instances() elif choice in no: print('Successfully stopped') else: sys.stdout.write("Please respond with 'yes' or 'no'")
zhouguangping/pentaho-kettle
ui/src/main/java/org/pentaho/di/ui/spoon/ChangedWarningDialog.java
/*! ****************************************************************************** * * Pentaho Data Integration * * Copyright (C) 2002-2017 by <NAME> : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.ui.spoon; import org.eclipse.swt.SWT; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.ui.xul.XulDomContainer; import org.pentaho.ui.xul.XulException; import org.pentaho.ui.xul.components.XulMessageBox; public class ChangedWarningDialog implements ChangedWarningInterface { private static ChangedWarningInterface instance = new ChangedWarningDialog(); protected String result = null; protected XulDomContainer container = null; private static Class<?> PKG = Spoon.class; public ChangedWarningDialog() { } public static void setInstance( ChangedWarningInterface cwi ) { // Cannot null out the instance if ( cwi != null ) { instance = cwi; } } public static ChangedWarningInterface getInstance() { return instance; } public String getName() { return "changedWarningController"; } public int show() throws Exception { return show( null ); } public int show( String fileName ) throws Exception { return runXulChangedWarningDialog( fileName ).open(); } protected XulMessageBox runXulChangedWarningDialog( String fileName ) throws IllegalArgumentException, XulException { container = Spoon.getInstance().getMainSpoonContainer(); XulMessageBox messageBox = (XulMessageBox) container.getDocumentRoot().createElement( "messagebox" ); messageBox.setTitle( BaseMessages.getString( PKG, "Spoon.Dialog.PromptSave.Title" ) ); if ( fileName != null ) { messageBox.setMessage( BaseMessages.getString( PKG, "Spoon.Dialog.PromptToSave.Message", fileName ) ); } else { messageBox.setMessage( BaseMessages.getString( PKG, "Spoon.Dialog.PromptSave.Message" ) ); } messageBox.setButtons( new Integer[] { SWT.YES, SWT.NO, SWT.CANCEL } ); return messageBox; } }
dvuckovic/fdb
src/fdb5/database/WipeVisitor.cc
<filename>src/fdb5/database/WipeVisitor.cc /* * (C) Copyright 1996- ECMWF. * * This software is licensed under the terms of the Apache Licence Version 2.0 * which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. * In applying this licence, ECMWF does not waive the privileges and immunities * granted to it by virtue of its status as an intergovernmental organisation nor * does it submit to any jurisdiction. */ #include "fdb5/database/WipeVisitor.h" namespace fdb5 { //---------------------------------------------------------------------------------------------------------------------- WipeVisitor::WipeVisitor(const metkit::mars::MarsRequest& request, std::ostream& out, bool doit, bool porcelain, bool unsafeWipeAll) : EntryVisitor(), request_(request), out_(out), doit_(doit), porcelain_(porcelain), unsafeWipeAll_(unsafeWipeAll) {} WipeVisitor::~WipeVisitor() {} //---------------------------------------------------------------------------------------------------------------------- } // namespace fdb5
anfark/insilico-lab
bundles/org.insilico.jsbml.core/src/org/insilico/jsbml/core/Constants.java
<reponame>anfark/insilico-lab package org.insilico.jsbml.core; public interface Constants { }
LiquidEngine/legui2
core/src/main/java/com/spinyowl/spinygui/core/event/Event.java
<gh_stars>0 package com.spinyowl.spinygui.core.event; import lombok.Data; import lombok.NonNull; import lombok.experimental.SuperBuilder; @Data @SuperBuilder public class Event { /** Element which cause event generation. */ @NonNull private final EventTarget source; /** Target element to which the event was originally dispatched. */ @NonNull private final EventTarget target; /** Timestamp of event. */ private final double timestamp; /** * Currently registered target for the event. This is the object to which the event is currently * slated to be sent. It's possible this has been changed along the way through retargeting. */ private EventTarget currentTarget; }
zburke/ui-inn-reach
src/components/common/filters/MultiChoiceFilter/MultiChoiceFilter.js
import React from 'react'; import { FormattedMessage } from 'react-intl'; import PropTypes from 'prop-types'; import { MultiSelectionFilter } from '@folio/stripes-smart-components'; import FilterAccordion from '../FilterAccordion'; const MultiChoiceFilter = ({ name, labelId, activeFilters, dataOptions, closedByDefault, disabled, onChange, }) => { return ( <FilterAccordion id={`${name}-filter-accordion`} label={<FormattedMessage id={labelId} />} name={name} activeFilters={activeFilters} closedByDefault={closedByDefault} disabled={disabled} onChange={onChange} > <MultiSelectionFilter id={`${name}-filter`} name={name} ariaLabelledBy={`accordion-toggle-button-transaction-filter-${name}`} dataOptions={dataOptions} selectedValues={activeFilters} disabled={disabled} onChange={onChange} /> </FilterAccordion> ); }; MultiChoiceFilter.propTypes = { dataOptions: PropTypes.arrayOf(PropTypes.object).isRequired, labelId: PropTypes.string.isRequired, name: PropTypes.string.isRequired, onChange: PropTypes.func.isRequired, activeFilters: PropTypes.arrayOf(PropTypes.string), closedByDefault: PropTypes.bool, disabled: PropTypes.bool, }; MultiChoiceFilter.defaultProps = { disabled: false, }; export default MultiChoiceFilter;
rentianhua/tsf_android
houseAd/HouseGo/src/com/dumu/housego/model/IPaySuccessModel.java
<gh_stars>0 package com.dumu.housego.model; import com.dumu.housego.model.IModel.AsycnCallBack; public interface IPaySuccessModel { void PayInfo(String resultStatus,String jine,String order_no,String trade_no,AsycnCallBack back); }
moutainhigh/ses-server
ses-app/ses-web-ros/src/main/java/com/redescooter/ses/web/ros/dao/base/OpeProductionPurchasePartsBMapper.java
<reponame>moutainhigh/ses-server package com.redescooter.ses.web.ros.dao.base; import com.baomidou.mybatisplus.core.mapper.BaseMapper; import com.redescooter.ses.web.ros.dm.OpeProductionPurchasePartsB; import java.util.List; import org.apache.ibatis.annotations.Param; public interface OpeProductionPurchasePartsBMapper extends BaseMapper<OpeProductionPurchasePartsB> { int updateBatch(List<OpeProductionPurchasePartsB> list); int batchInsert(@Param("list") List<OpeProductionPurchasePartsB> list); int insertOrUpdate(OpeProductionPurchasePartsB record); int insertOrUpdateSelective(OpeProductionPurchasePartsB record); }
definejs/packer
modules/MetaInfo.js
<filename>modules/MetaInfo.js const File = require('@definejs/file'); const $Object = require('@definejs/object'); const IDRequires = require('./MetaInfo/IDRequires'); module.exports = { render(opt) { let { dir, id$info, name$pkg, name$id, name$requires, third$version, } = opt; let ids = Object.keys(id$info); let name$version = $Object.map(name$pkg, (name, pkg) => { return pkg.version; }); Object.assign(name$version, third$version); //最终要返回的元数据。 let info = { id$info, ids, name$id, name$requires, name$version, }; //生成元数据,用于以后查阅和参考。 if (dir) { $Object.each(info, (key, value) => { File.writeSortJSON(`${dir}${key}.json`, value); }); } return info; //以下代码分析出来的依赖关系,仅仅是从包的粒度,太粗了。 //需要从模板粒度进行分析,TODO... // let id$requires = IDRequires.get(opt); // File.writeSortJSON(`${dir}id$requires.json`, id$requires); // let id$dependents = {}; // Object.keys(id$requires).forEach((id) => { // let requires = id$requires[id]; // requires.forEach((sid) => { // let dependents = id$dependents[sid] || []; // dependents.push(id); // id$dependents[sid] = [...new Set(dependents)]; // }); // }); // File.writeSortJSON(`${dir}id$dependents.json`, id$dependents); }, };
pedrotari7/advent_of_code
py/2016/8A.py
<gh_stars>0 def shift(seq, n): i = n % len(seq) return seq[-i:] + seq[:-i] dim = (50,6) l = [[0 for i in xrange(dim[0])] for j in xrange(dim[1])] with open('8.in') as f: for c in f: cmd = c.split() if cmd[0] == 'rect': c = cmd[1].split('x') for i in xrange(int(c[1])): l[i][:int(c[0])] = [1]*int(c[0]) elif 'row' in cmd: y = int(cmd[2].split('=')[1]) l[y] = shift(l[y],int(cmd[4])) elif 'column' in cmd: x = int(cmd[2].split('=')[1]) col = [l[i][x] for i in xrange(dim[1])] col = shift(col,int(cmd[4])) for i in xrange(dim[1]): l[i][x] = col[i] total = 0 for i in xrange(dim[1]): total += l[i].count(1) print total
cassianobecker/dnn
dataset/synth/dwi.py
import numpy as np import os from os.path import join import subprocess import shutil from dipy.io.image import load_nifti, save_nifti from dipy.io import read_bvals_bvecs from dipy.reconst.csdeconv import ConstrainedSphericalDeconvModel from dipy.reconst.csdeconv import auto_response from dipy.core.gradients import gradient_table from fwk.config import Config from util.path import absolute_path, copy_folder from dataset.synth.regression import FibercupRegressionDataset class SynthProcessor: def __init__(self, dry_run=False): self.dry_run = dry_run self.database_processing_path = os.path.expanduser(Config.get_option('DATABASE', 'local_processing_directory')) self.container_path = os.path.expanduser(Config.get_option('DWI', 'path_to_container')) self.container_rel_proc_path = Config.get_option('DWI', 'container_relative_processing_path') self.container_processing_path = join(self.container_path, *self.container_rel_proc_path.split(os.path.sep)) self.dwi_params_file = (Config.get_option('DWI', 'dwi_params_file')) def make_path(self, sample_id, paths, container=False): if container: path = join(self.container_processing_path, f'{sample_id}', paths) else: path = join(self.database_processing_path, f'{sample_id}', paths) if not os.path.isdir(path): os.makedirs(path) return path def process_subject(self, sample_id): self.create_tractogram_files(sample_id) self.setup_dwi_params(sample_id) self.simulate_dwi(sample_id) self.transfer_files_from_container(sample_id, delete_after=True) self.fit_dti(sample_id) self.fit_odf(sample_id) def create_tractogram_files(self, sample_id): regression_dataset = FibercupRegressionDataset() tractogram, covariate = regression_dataset.generate_tractogram_and_covariate() tract_path = self.make_path(sample_id, 'tracts', container=True) regression_dataset.save_tract_and_label(tract_path, tractogram, label=covariate) mask_path = self.make_path(sample_id, 'dwi', container=True) regression_dataset.make_mask(mask_path) def setup_dwi_params(self, sample_id): self._copy_dwi_params(sample_id) self._flip_evecs(sample_id, flips=(1, -1, 1)) def _copy_dwi_params(self, sample_id): src_path = join(absolute_path('dataset'), 'synth', 'dwi_params') # resource folder in codebase dest_path = self.make_path(sample_id, 'params', container=True) os.makedirs(dest_path, exist_ok=True) suffixes = ['', '.bvals', '.bvecs'] for suffix in suffixes: src = join(src_path, self.dwi_params_file + suffix) dest = join(dest_path, self.dwi_params_file + suffix) shutil.copyfile(src, dest) def _flip_evecs(self, sample_id, flips=(1, -1, 1)): # flip eigenvectors for compatibility between Mitk Fiberfox and FSL dtifit bvals_url = join(self.make_path(sample_id, 'params', container=True), self.dwi_params_file + '.bvals') bvecs_url = join(self.make_path(sample_id, 'params', container=True), self.dwi_params_file + '.bvecs') bvals, bvecs = read_bvals_bvecs(bvals_url, bvecs_url) new_bvecs = bvecs @ np.diag(flips) flipped_bvals_url = join(self.make_path(sample_id, 'params', container=True), 'flipped_' + self.dwi_params_file + '.bvals') np.savetxt(flipped_bvals_url, np.expand_dims(bvals, axis=0), fmt='%d', delimiter=' ') flipped_bvecs_url = join(self.make_path(sample_id, 'params', container=True), 'flipped_' + self.dwi_params_file + '.bvecs') np.savetxt(flipped_bvecs_url, new_bvecs.T, fmt='%2.6f', delimiter=' ') def simulate_dwi(self, sample_id): # setup paths and files for container use container = Config.get_option('DWI', 'container_type', 'docker') if container == 'docker': params_url = join(self.container_rel_proc_path, f'{sample_id}', 'params', self.dwi_params_file) tracts_url = join(self.container_rel_proc_path, f'{sample_id}', 'tracts', 'tracts.fib') target_url = join(self.container_rel_proc_path, f'{sample_id}', 'dwi', 'data') container_prefix = Config.get_option('DWI', 'docker_container_prefix') fiberfox_executable = Config.get_option('DWI', 'fiberfox_executable_within_container') elif container == 'singularity': params_url = join(self.make_path(sample_id, 'params', container=True), self.dwi_params_file) tracts_url = join(self.make_path(sample_id, 'tracts', container=True), 'tracts.fib') target_url = join(self.make_path(sample_id, 'dwi', container=True), 'data') container_prefix = Config.get_option('DWI', 'singularity_container_prefix') fiberfox_executable = os.path.expanduser(join( self.container_path, *Config.get_option('DWI', 'fiberfox_executable_within_container').split(os.path.sep), )) os.makedirs(self.make_path(sample_id, 'dwi', container=True), exist_ok=True) str_cmd = f'{container_prefix} ' \ f'{fiberfox_executable} ' \ f'-o {target_url} ' \ f'-i {tracts_url} ' \ f'-p {params_url} ' \ f'--verbose' subprocess.run(str_cmd, shell=True, check=True) def transfer_files_from_container(self, sample_id, delete_after=False): folders = ['tracts', 'dwi', 'params'] for folder in folders: src_folder = self.make_path(sample_id, folder, container=True) dest_folder = self.make_path(sample_id, folder, container=False) copy_folder(src_path=src_folder, dest_path=dest_folder) # delete folder for sample_id if delete_after: shutil.rmtree(join(self.container_processing_path, f'{sample_id}')) def fit_dti(self, sample_id): dti_params = { 'data': join(self.make_path(sample_id, 'dwi'), 'data.nii.gz'), 'mask': join(self.make_path(sample_id, 'dwi'), 'data_mask.nii.gz'), 'bvals': join(self.make_path(sample_id, 'params'), 'flipped_' + self.dwi_params_file + '.bvals'), 'bvecs': join(self.make_path(sample_id, 'params'), 'flipped_' + self.dwi_params_file + '.bvecs'), 'output': join(self.make_path(sample_id, 'dti'), 'dti'), } self._perform_dti_fit(dti_params, save_tensor=True) # convert file for compatibility on CBICA for older versions of FSL dti_tensor_url = join(self.make_path(sample_id, 'dti'), 'dti_tensor.*') fslconvert_command_str = f'fslchfiletype NIFTI_GZ {dti_tensor_url}' subprocess.run(fslconvert_command_str, shell=True, check=True) @staticmethod def _perform_dti_fit(dti_params, save_tensor=False): dti_fit_command_str = f"dtifit " \ f"-k {dti_params['data']} " \ f"-o {dti_params['output']} " \ f"-m {dti_params['mask']} " \ f"-r {dti_params['bvecs']} " \ f"-b {dti_params['bvals']} " if save_tensor is True: dti_fit_command_str += '--save_tensor' subprocess.run(dti_fit_command_str, shell=True, check=True) def fit_odf(self, sample_id): bvals_url = join(self.make_path(sample_id, 'params'), self.dwi_params_file + '.bvals') bvecs_url = join(self.make_path(sample_id, 'params'), self.dwi_params_file + '.bvecs') bvals, bvecs = read_bvals_bvecs(bvals_url, bvecs_url) gtab = gradient_table(bvals, bvecs) volumes_url = join(self.make_path(sample_id, 'dwi'), 'data.nii.gz') volumes, volumes_affine = load_nifti(volumes_url) response, ratio = auto_response(gtab, volumes, roi_center=(29, 48, 2), roi_radius=1, fa_thr=0.24) csd_model = ConstrainedSphericalDeconvModel(gtab, response) csd_fit = csd_model.fit(volumes) odf = csd_fit.shm_coeff mask, mask_affine = load_nifti(join(self.make_path(sample_id, 'dwi'), 'data_mask.nii.gz')) masked_odf = (mask[..., 0] * odf.transpose((2, 3, 1, 0))).transpose((3, 2, 0, 1)) odf_url = join(self.make_path(sample_id, 'odf'), 'odf.nii.gz') save_nifti(odf_url, masked_odf, volumes_affine)
PhenixRTS/PlatformTesting
test/models/reporters/sync-reporter.js
/** * Copyright 2020 Phenix Real Time Solutions, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import {t} from 'testcafe'; import Logger from '../../../scripts/logger.js'; import reporter from './common-reporter.js'; import moment from 'moment'; import math from '../math.js'; import config from '../../../config.js'; const logger = new Logger('Sync Test'); async function CollectMediaChanges() { logger.log('Collecting media changes...'); const streamReceivedAtTitle = '[Acceptance Testing] [Subscriber Stream received] '; const subscriberVideoTitle = '[Acceptance Testing] [Subscriber Video] '; const subscriberAudioTitle = '[Acceptance Testing] [Subscriber Audio] '; const streamIdTitle = '[Acceptance Testing] [Stream ID] '; const sessionIdTitle = '[Acceptance Testing] [Session ID] '; const logs = await t.getBrowserConsoleMessages(); const collectedStats = { streamReceivedAt: undefined, streamId: undefined, sessionId: undefined, averageSync: undefined, maxSync: undefined, framerateMin: undefined, framerateMax: undefined, video: [], audio: [] }; let allSyncs = []; let maxSync = 0; logs.info.forEach(el => { el = el.trim(); if (el.startsWith(streamReceivedAtTitle)) { collectedStats.streamReceivedAt = parseInt(el.replace(streamReceivedAtTitle, '')); return; } if (el.startsWith(streamIdTitle)) { const streamId = el.replace(streamIdTitle, ''); logger.log(`For stream [${streamId} ]`); collectedStats.streamId = streamId; return; } collectedStats.framerateMin = reporter.ExtractFramerate('min', collectedStats.framerateMin, el); collectedStats.framerateMax = reporter.ExtractFramerate('max', collectedStats.framerateMax, el); if (el.startsWith(sessionIdTitle)) { const sessionId = el.replace(sessionIdTitle, ''); logger.log(`For session [${sessionId}]`); collectedStats.sessionId = sessionId; } var title = ''; title = el.startsWith(subscriberVideoTitle) ? subscriberVideoTitle : title; title = el.startsWith(subscriberAudioTitle) ? subscriberAudioTitle : title; if (title === '') { return; } el = el.replace(title, ''); var stat = JSON.parse(el); stat.formattedTimestamp = moment(stat.timestamp).format('HH:mm:ss.SSS'); switch (title) { case subscriberVideoTitle: stat.syncWithAudio = undefined; stat.closestAudioTimestamp = undefined; collectedStats.video.push(stat); break; case subscriberAudioTitle: collectedStats.audio.push(stat); break; default: return; } }); collectedStats.video.forEach(videoEl => { let closest = ClosestElement(videoEl.timestamp, collectedStats.audio); if (closest !== undefined) { videoEl.syncWithAudio = Math.abs(videoEl.timestamp - closest.timestamp); videoEl.closestAudioTimestamp = moment(closest.timestamp).format('HH:mm:ss.SSS'); allSyncs.push(videoEl.syncWithAudio); maxSync = videoEl.syncWithAudio > maxSync ? videoEl.syncWithAudio : maxSync; } }); collectedStats.averageSync = math.average(allSyncs).toFixed(2); collectedStats.maxSync = maxSync; return collectedStats; } function ClosestElement(number, arr){ var minDiff = 1000; var closest; arr.forEach(el => { var m = Math.abs(number - el.timestamp); if (m < minDiff) { minDiff = m; closest = el; } }); return closest; } async function CreateTestReport(testController, page, channel = {}) { let header = {}; let content = {}; let additionalInfo = ''; if (config.args.reportFormat === 'json') { header = [ { name: 'subscriber_stream_received_at', valueFormatted: moment(page.stats.streamReceivedAt).format('HH:mm:ss.SSS'), value: page.stats.streamReceivedAt }, { name: 'average_sync', value: page.stats.averageSync, units: 'milliseconds' }, { name: 'max_sync', value: page.stats.maxSync, units: 'milliseconds' } ]; content = { videoStats: page.stats.video, audioStats: page.stats.audio }; if (channel && channel.channelId) { const {applicationId, channelId, streamKey, created} = channel; additionalInfo = { applicationId: applicationId, channelId: channelId, streamKey: streamKey, created: created }; } } else { header = '\nSubscriber stream received at ' + `${moment(page.stats.streamReceivedAt).format('HH:mm:ss.SSS')} (${page.stats.streamReceivedAt})` + `\n\nAverage Sync: ${page.stats.averageSync} ms` + `\nMax Sync: ${page.stats.maxSync}`; content = `\n\nVideo Stats:\n` + JSON.stringify(page.stats.video, undefined, 2) + `\n\nAudio Stats:\n` + JSON.stringify(page.stats.audio, undefined, 2); if (channel && channel.channelId) { const {applicationId, channelId, streamKey, created} = channel; additionalInfo = `\n\nApplication ID: ${applicationId}\nChannel ID: ${channelId}\nStream Key: ${streamKey}\nCreated: ${created}\n`; } } return reporter.CreateTestReport(testController, page, header, content, additionalInfo); } // eslint-disable-next-line no-unused-vars function GenerateTelemetryRecords(page) { // TODO: - Implement this return []; } export default { CollectMediaChanges, CreateTestReport, CreateConsoleDump: reporter.CreateConsoleDump, GenerateTelemetryRecords };
MartinMelo/DomiticTIP
public/modules/climatizacions/controllers/climatizacions.client.controller.js
'use strict'; // Climatizacions controller angular.module('climatizacions').controller('ClimatizacionsController', ['$scope', '$stateParams', '$location', 'Authentication', 'Climatizacions', function($scope, $stateParams, $location, Authentication, Climatizacions ) { $scope.urlList = 'modules/climatizacions/views/list-climatizacions.client.view.html'; $scope.urlView = 'modules/climatizacions/views/view-climatizacion.client.view.html'; $scope.urlCreate = 'modules/climatizacions/views/create-climatizacion.client.view.html'; $scope.urlEdit = 'modules/climatizacions/views/edit-climatizacion.client.view.html'; $scope.authentication = Authentication; // Remove existing Climatizacion $scope.remove = function( climatizacion ) { if ( climatizacion ) { climatizacion.$remove(); for (var i in $scope.climatizacions ) { if ($scope.climatizacions [i] === climatizacion ) { $scope.climatizacions.splice(i, 1); } } } else { $scope.climatizacion.$remove(function() { $scope.cambiarPagina($scope.urlList); }); } }; // Update existing Climatizacion $scope.update = function() { var climatizacion = $scope.climatizacion ; climatizacion.$update(function() { $scope.cambiarPagina($scope.urlList); }, function(errorResponse) { $scope.error = errorResponse.data.message; }); }; // Find a list of Climatizacions $scope.find = function() { $scope.climatizacions = Climatizacions.query(); }; // Find existing Climatizacion $scope.findOne = function() { $scope.climatizacion = Climatizacions.get({ climatizacionId: $stateParams.climatizacionId }); }; $scope.cargarUno = function() { $scope.climatizacion = Climatizacions.get({ climatizacionId: $scope.idView }); }; } ]);
msqljj/aoce
code/aoce_vulkan_extra/layer/VkAlphaShowLayer.cpp
#include "VkAlphaShowLayer.hpp" #include "aoce/layer/PipeGraph.hpp" namespace aoce { namespace vulkan { namespace layer { VkAlphaShowLayer::VkAlphaShowLayer() { // inFormats[0].imageType由上一层决定 bAutoImageType = true; } VkAlphaShowLayer::~VkAlphaShowLayer() {} void VkAlphaShowLayer::onInitLayer() { glslPath = "glsl/alphaShow.comp.spv"; if (inFormats[0].imageType == ImageType::r8) { glslPath = "glsl/alphaShowC1.comp.spv"; } else if (inFormats[0].imageType == ImageType::rgba32f) { glslPath = "glsl/alphaShowF4.comp.spv"; } else if (inFormats[0].imageType == ImageType::r32f) { glslPath = "glsl/alphaShowF1.comp.spv"; } else if (inFormats[0].imageType == ImageType::r32) { glslPath = "glsl/alphaShowSI1.comp.spv"; } else if (inFormats[0].imageType == ImageType::rgba32) { glslPath = "glsl/alphaShowSI4.comp.spv"; } // 加载shader onInitGraph(); outFormats[0].imageType = ImageType::rgba8; VkLayer::onInitLayer(); } VkAlphaShow2Layer::VkAlphaShow2Layer() { glslPath = "glsl/showRound.comp.spv"; inCount = 2; outCount = 1; } VkAlphaShow2Layer::~VkAlphaShow2Layer() {} void VkAlphaShow2Layer::onInitGraph() { VkLayer::onInitGraph(); // inFormats[0].imageType = ImageType::r8; inFormats[1].imageType = ImageType::rgba8; outFormats[0].imageType = ImageType::rgba8; } } // namespace layer } // namespace vulkan } // namespace aoce
Palem1988/nuls
tools-module/tools/src/main/java/io/nuls/core/tools/log/BlockLog.java
/* * MIT License * * Copyright (c) 2017-2019 nuls.io * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. * */ package io.nuls.core.tools.log; import ch.qos.logback.classic.Level; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.HashMap; import java.util.Map; /** * 公共日志输出工具 <br> * 该类提供了基本的日志输出。该类不可以继承。 * 依赖于slf4j * * @author Niels */ public final class BlockLog { /** * 日志对象 */ private static final Logger LOG = LoggerFactory.getLogger("blockLog"); /** * 日志级别 */ private static final Map<String, Level> LOG_LEVELS = new HashMap<>(); /** * 存放deviceId等关键信息 */ private static final ThreadLocal<String> THREAD_LOCAL = new ThreadLocal<>(); /** * 不允许实例化该类 */ private BlockLog() { } @Override public String toString() { return super.toString(); } /** * 初始化日志等级(只开放DEBUG/INFO/WARN/ERROR/FATAL 5个级别的配置) */ static { LOG_LEVELS.put("DEBUG", Level.DEBUG); LOG_LEVELS.put("INFO", Level.INFO); LOG_LEVELS.put("WARN", Level.WARN); LOG_LEVELS.put("ERROR", Level.ERROR); } /** * 提供debug级别基本的日志输出 * * @param msg 需要显示的消息 */ public static void debug(String msg) { if (LOG.isDebugEnabled()) { String logContent = isStringBlank(getId()) ? (getLogTrace() + ":" + msg) : (getLogTrace() + "[" + getId() + "]" + ":" + msg); LOG.debug(logContent); } } public static void debug(String msg, Object... objs) { if (LOG.isDebugEnabled()) { String logContent = isStringBlank(getId()) ? (getLogTrace() + ":" + msg) : (getLogTrace() + "[" + getId() + "]" + ":" + msg); LOG.debug(logContent, objs); } } /** * 提供debug级别基本的日志输出 * * @param msg 需要显示的消息 * @param throwable 异常信息 */ public static void debug(String msg, Throwable throwable) { if (LOG.isDebugEnabled()) { String logContent = isStringBlank(getId()) ? (getLogTrace() + ":" + msg) : (getLogTrace() + "[" + getId() + "]" + ":" + msg); // if (!(throwable instanceof NulsException) || !(throwable instanceof NulsRuntimeException)) { // throwable = new NulsException(ErrorCode.FAILED, throwable); // } LOG.debug(logContent, throwable); } } private static boolean isStringBlank(String val) { return null == val || val.trim().isEmpty(); } /** * 获取日志记录点的全路径 * * @return 日志记录点的全路径 */ private static String getLogTrace() { StringBuilder logTrace = new StringBuilder(); StackTraceElement stack[] = Thread.currentThread().getStackTrace(); if (stack.length > 1) { // index为3上一级调用的堆栈信息,index为1和2都为Log类自己调两次(可忽略),index为0为主线程触发(可忽略) StackTraceElement ste = stack[3]; if (ste != null) { // logTrace.append("[" + DateUtil.convertDate(new Date(TimeService.currentTimeMillis())) + "]"); // 获取类名、方法名、日志的代码行数 logTrace.append(ste.getClassName()); logTrace.append('.'); logTrace.append(ste.getMethodName()); logTrace.append('('); logTrace.append(ste.getFileName()); logTrace.append(':'); logTrace.append(ste.getLineNumber()); logTrace.append(')'); } } logTrace.append("\n"); return logTrace.toString(); } // /** // * 设置日志级别 // * // * @param level 日志级别 // */ // public static void setLevel(String level) { // if (LOG_LEVELS.containsKey(level.toUpperCase())) { // LOG.setLevel(LOG_LEVELS.get(level.toUpperCase())); // } // } /** * 获取当前线程16位唯一序列号 * * @return 当前线程16位唯一序列号 */ private static String getId() { return THREAD_LOCAL.get(); } /** * 设置日志流水号 * * @param id 流水号 */ public static void setId(String id) { THREAD_LOCAL.set(id); } public static void removeId() { THREAD_LOCAL.remove(); } }
guyguy2001/txircd
txircd/modules/server/fjoin.py
from twisted.plugin import IPlugin from txircd.channel import IRCChannel from txircd.module_interface import Command, ICommand, IModuleData, ModuleData from txircd.utils import ModeType from zope.interface import implements from datetime import datetime class FJoinCommand(ModuleData, Command): implements(IPlugin, IModuleData, ICommand) name = "FJoinCommand" core = True def serverCommands(self): return [ ("FJOIN", 1, self) ] def parseParams(self, server, params, prefix, tags): if len(params) < 4: return None try: time = datetime.utcfromtimestamp(int(params[1])) except ValueError: return None modes = {} currParam = 3 for mode in params[2]: if mode == "+": continue if mode not in self.ircd.channelModeTypes or self.ircd.channelModeTypes[mode] == ModeType.Status: return None modeType = self.ircd.channelModeTypes[mode] if modeType in (ModeType.ParamOnUnset, ModeType.Param): try: modes[mode] = params[currParam] except IndexError: return None currParam += 1 else: modes[mode] = None try: usersInChannel = params[currParam].split() except IndexError: return None if currParam + 1 < len(params): return None users = {} try: for userData in usersInChannel: ranks, uuid = userData.split(",") if uuid not in self.ircd.users: return None for rank in ranks: if rank not in self.ircd.channelModeTypes or self.ircd.channelModeTypes[rank] != ModeType.Status: return None users[self.ircd.users[uuid]] = ranks except ValueError: return None if params[0] in self.ircd.channels: channel = self.ircd.channels[params[0]] else: channel = IRCChannel(self.ircd, params[0]) return { "channel": channel, "time": time, "modes": modes, "users": users } def execute(self, server, data): channel = data["channel"] time = data["time"] remoteModes = data["modes"] remoteStatuses = [] for user, ranks in data["users"].iteritems(): user.joinChannel(channel, True, True) for rank in ranks: remoteStatuses.append((user.uuid, rank)) if time < channel.existedSince: modeUnsetList = [] for mode, param in channel.modes.iteritems(): modeType = self.ircd.channelModeTypes[mode] if modeType == ModeType.List: for paramData in param: modeUnsetList.append((False, mode, paramData[0])) else: modeUnsetList.append((False, mode, param)) for user, data in channel.users.iteritems(): for rank in data["status"]: modeUnsetList.append((False, rank, user.uuid)) if modeUnsetList: channel.setModes(modeUnsetList, self.ircd.serverID) channel.existedSince = time if time == channel.existedSince: modeSetList = [] for mode, param in remoteModes.iteritems(): modeSetList.append((True, mode, param)) for status in remoteStatuses: modeSetList.append((True, status[1], status[0])) if modeSetList: channel.setModes(modeSetList, self.ircd.serverID) return True fjoinCmd = FJoinCommand()
siahr/basex
basex-core/src/main/java/org/basex/query/expr/TypeCase.java
package org.basex.query.expr; import static org.basex.query.QueryText.*; import org.basex.query.*; import org.basex.query.func.*; import org.basex.query.iter.Iter; import org.basex.query.util.*; import org.basex.query.value.*; import org.basex.query.value.node.*; import org.basex.query.value.type.*; import org.basex.query.var.*; import org.basex.util.InputInfo; import org.basex.util.Token; import org.basex.util.TokenBuilder; import org.basex.util.hash.*; import org.basex.util.list.*; /** * Case expression for typeswitch. * * @author BaseX Team 2005-13, BSD License * @author <NAME> */ public final class TypeCase extends Single { /** Variable. */ final Var var; /** Matched sequence types. */ private final SeqType[] types; /** * Constructor. * @param ii input info * @param v variable * @param ts sequence types this case matches, the empty array means {@code default} * @param r return expression */ public TypeCase(final InputInfo ii, final Var v, final SeqType[] ts, final Expr r) { super(ii, r); var = v; types = ts; } @Override public TypeCase compile(final QueryContext ctx, final VarScope scp) throws QueryException { return compile(ctx, scp, null); } /** * Compiles the expression. * @param ctx query context * @param scp variable scope * @param v value to be bound * @return resulting item * @throws QueryException query exception */ TypeCase compile(final QueryContext ctx, final VarScope scp, final Value v) throws QueryException { if(var != null && v != null) ctx.set(var, v, info); try { super.compile(ctx, scp); } catch(final QueryException ex) { // replace original expression with error expr = FNInfo.error(ex, expr.type()); } type = expr.type(); return this; } @Override public Expr inline(final QueryContext ctx, final VarScope scp, final Var v, final Expr e) { try { return super.inline(ctx, scp, v, e); } catch(final QueryException qe) { expr = FNInfo.error(qe, expr.type()); return this; } } @Override public TypeCase copy(final QueryContext ctx, final VarScope scp, final IntObjMap<Var> vs) { final Var v = var == null ? null : scp.newCopyOf(ctx, var); if(var != null) vs.put(var.id, v); return new TypeCase(info, v, types.clone(), expr.copy(ctx, scp, vs)); } /** * Checks if the given value matches this case. * @param val value to be matched * @return {@code true} if it matches, {@code false} otherwise */ public boolean matches(final Value val) { if(types.length == 0) return true; for(final SeqType t : types) if(t.instance(val)) return true; return false; } /** * Evaluates the expression. * @param ctx query context * @param seq sequence to be checked * @return resulting item * @throws QueryException query exception */ Iter iter(final QueryContext ctx, final Value seq) throws QueryException { if(!matches(seq)) return null; if(var == null) return ctx.iter(expr); ctx.set(var, seq, info); return ctx.value(expr).iter(); } @Override public void plan(final FElem plan) { final FElem e = planElem(); if(types.length == 0) { e.add(planAttr(Token.token(DEFAULT), Token.TRUE)); } else { final byte[] or = { ' ', '|', ' ' }; final ByteList bl = new ByteList(); for(final SeqType t : types) { if(!bl.isEmpty()) bl.add(or); bl.add(Token.token(t.toString())); } e.add(planAttr(Token.token(TYPE), bl.toArray())); } if(var != null) e.add(planAttr(VAR, Token.token(var.toString()))); expr.plan(e); plan.add(e); } @Override public String toString() { final TokenBuilder tb = new TokenBuilder(types.length == 0 ? DEFAULT : CASE); if(var != null) { tb.add(' ').add(var.toString()); if(types.length != 0) tb.add(' ').add(AS); } if(types.length != 0) { for(int i = 0; i < types.length; i++) { if(i > 0) tb.add(" |"); tb.add(' ').add(types[i].toString()); } } return tb.add(' ' + RETURN + ' ' + expr).toString(); } @Override public void markTailCalls(final QueryContext ctx) { expr.markTailCalls(ctx); } @Override public boolean accept(final ASTVisitor visitor) { return super.accept(visitor) && (var == null || visitor.declared(var)); } @Override public int exprSize() { return expr.exprSize(); } }
epfl-lasa/TutorialICRA2018
vendor/ruby/2.3.0/gems/aws-sdk-core-2.10.125/lib/aws-sdk-core/plugins/s3_host_id.rb
module Aws module Plugins # Support S3 host id, more information, see: # http://docs.aws.amazon.com/AmazonS3/latest/dev/troubleshooting.html#sdk-request-ids # # This plugin adds :host_id for s3 responses when available # @api private class S3HostId < Seahorse::Client::Plugin class Handler < Seahorse::Client::Handler def call(context) response = @handler.call(context) h = context.http_response.headers context[:s3_host_id] = h['x-amz-id-2'] response end end handler(Handler, step: :sign) end end end
DevelopByTarun/TODAY-HELP
REACTJS/BOOKS-CODES/letters-social-master/src/pages/home.js
<gh_stars>0 import PropTypes from 'prop-types'; import React, { Component } from 'react'; import { connect } from 'react-redux'; import { bindActionCreators } from 'redux'; import orderBy from 'lodash/orderBy'; import { createError } from '../actions/error'; import { createNewPost, getPostsForPage } from '../actions/posts'; import { showComments } from '../actions/comments'; import Ad from '../components/ad/Ad'; import CreatePost from '../components/post/Create'; import Post from '../components/post/Post'; import Welcome from '../components/welcome/Welcome'; export class Home extends Component { componentDidMount() { this.props.actions.getPostsForPage(); } componentDidCatch(err, info) { this.props.actions.createError(err, info); } render() { return ( <div className="home"> <Welcome /> <div> <CreatePost onSubmit={this.props.actions.createNewPost} /> {this.props.posts && ( <div className="posts"> {this.props.posts.map(post => ( <Post key={post.id} post={post} openCommentsDrawer={this.props.actions.showComments} /> ))} </div> )} <button className="block" onClick={this.props.actions.getNextPageOfPosts}> Load more posts </button> </div> <div> <Ad url="https://ifelse.io/book" imageUrl="/static/assets/ads/ria.png" /> <Ad url="https://ifelse.io/book" imageUrl="/static/assets/ads/orly.jpg" /> </div> </div> ); } } Home.propTypes = { posts: PropTypes.arrayOf(PropTypes.object), actions: PropTypes.shape({ createNewPost: PropTypes.func, getPostsForPage: PropTypes.func, showComments: PropTypes.func, createError: PropTypes.func, getNextPageOfPosts: PropTypes.func }) }; export const mapStateToProps = state => { const posts = orderBy(state.postIds.map(postId => state.posts[postId]), 'date', 'desc'); return { posts }; }; export const mapDispatchToProps = dispatch => { return { actions: bindActionCreators( { createNewPost, getPostsForPage, showComments, createError, getNextPageOfPosts: getPostsForPage.bind(this, 'next') }, dispatch ) }; }; export default connect(mapStateToProps, mapDispatchToProps)(Home);
apexrtos/apex
sys/arch/riscv32/syscall.cpp
<filename>sys/arch/riscv32/syscall.cpp #include "locore.h" #include <debug.h> #include <errno.h> long arch_syscall(uint32_t a0, uint32_t a1, uint32_t a2, uint32_t a3, uint32_t a4, uint32_t a5, uint32_t a6, uint32_t a7) { dbg("WARNING: unimplemented syscall %u\n", a7); return DERR(-ENOSYS); }
oilegor1029/sri-front
src/containers/peeringPartner/PeeringPartnerList.js
<filename>src/containers/peeringPartner/PeeringPartnerList.js import { connect } from 'react-redux'; import PeeringPartnerList from '../../components/peeringPartner/PeeringPartnerList'; const mapStateToProps = (state, props) => { const { columns_visible, all_columns } = state.filterColumns.peeringPartner; return { columns_visible, all_columns }; }; const mapDispatchToProps = (dispatch, props) => { return {}; }; const PeeringPartnerListContainer = connect(mapStateToProps, mapDispatchToProps)(PeeringPartnerList); export default PeeringPartnerListContainer;
griggt/scala-js
javalib/src/main/scala/java/util/SortedMap.scala
<reponame>griggt/scala-js /* * Scala.js (https://www.scala-js.org/) * * Copyright EPFL. * * Licensed under Apache License 2.0 * (https://www.apache.org/licenses/LICENSE-2.0). * * See the NOTICE file distributed with this work for * additional information regarding copyright ownership. */ package java.util trait SortedMap[K, V] extends Map[K, V] { def firstKey(): K def comparator(): Comparator[_ >: K] def lastKey(): K def subMap(fromKey: K, toKey: K): SortedMap[K, V] def headMap(toKey: K): SortedMap[K, V] def tailMap(fromKey: K): SortedMap[K, V] }
CharLemAznable/char-elves
src/test/java/com/github/charlemaznable/core/spring/AnnotationElfTest.java
package com.github.charlemaznable.core.spring; import com.github.charlemaznable.core.config.EnvConfig; import lombok.SneakyThrows; import org.junit.jupiter.api.Test; import org.springframework.context.annotation.ComponentScan; import org.springframework.context.annotation.ComponentScans; import org.springframework.stereotype.Component; import static com.github.charlemaznable.core.spring.AnnotationElf.findAnnotation; import static com.github.charlemaznable.core.spring.AnnotationElf.resolveContainerAnnotationType; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertNull; public class AnnotationElfTest { @SneakyThrows @Test public void testAnnotationElf() { assertNull(resolveContainerAnnotationType(Component.class)); assertEquals(ComponentScans.class, resolveContainerAnnotationType(ComponentScan.class)); assertNotNull(findAnnotation(SuperInterface.class, EnvConfig.class)); assertNotNull(findAnnotation(SuperInterface.class.getMethod("config"), EnvConfig.class)); assertNull(findAnnotation(SubInterface.class, EnvConfig.class)); assertNull(findAnnotation(SubInterface.class.getMethod("config"), EnvConfig.class)); } @EnvConfig public interface SuperInterface { @EnvConfig String config(); } public interface SubInterface extends SuperInterface { @Override String config(); } }
TheScienceMuseum/web-design-system
fractal/docs/03-colour/gradients.config.js
<filename>fractal/docs/03-colour/gradients.config.js<gh_stars>0 module.exports = { context: { gradients: { "2col": { "u-grad-red-orange": { col1: "red", col2: "orange" }, "u-grad-orange-red": { col1: "orange", col2: "red" }, "u-grad-orange-yellow": { col1: "orange", col2: "yellow" }, "u-grad-yellow-orange": { col1: "yellow", col2: "orange" }, "u-grad-yellow-green": { col1: "yellow", col2: "green" }, "u-grad-green-yellow": { col1: "green", col2: "yellow" }, "u-grad-green-teal": { col1: "green", col2: "teal" }, "u-grad-teal-green": { col1: "teal", col2: "green" }, "u-grad-teal-blue": { col1: "teal", col2: "blue" }, "u-grad-blue-teal": { col1: "blue", col2: "teal" }, "u-grad-blue-purple": { col1: "blue", col2: "purple" }, "u-grad-purple-blue": { col1: "purple", col2: "blue" }, "u-grad-purple-red": { col1: "purple", col2: "red" }, "u-grad-red-purple": { col1: "red", col2: "purple" } }, "3col": { "u-grad-red-orange-yellow": { col1: "red", col2: "orange", col3: "yellow" }, "u-grad-yellow-orange-red": { col1: "yellow", col2: "orange", col3: "red" }, "u-grad-orange-yellow-green": { col1: "orange", col2: "yellow", col3: "green" }, "u-grad-green-yellow-orange": { col1: "green", col2: "yellow", col3: "orange" }, "u-grad-yellow-green-teal": { col1: "yellow", col2: "green", col3: "teal" }, "u-grad-teal-green-yellow": { col1: "teal", col2: "green", col3: "yellow" }, "u-grad-green-teal-blue": { col1: "green", col2: "teal", col3: "blue" }, "u-grad-blue-teal-green": { col1: "blue", col2: "teal", col3: "green" }, "u-grad-teal-blue-purple": { col1: "teal", col2: "blue", col3: "purple" }, "u-grad-purple-blue-teal": { col1: "purple", col2: "blue", col3: "teal" }, "u-grad-blue-purple-red": { col1: "blue", col2: "purple", col3: "red" }, "u-grad-red-purple-blue": { col1: "red", col2: "purple", col3: "blue" }, "u-grad-purple-red-orange": { col1: "purple", col2: "red", col3: "orange" }, "u-grad-orange-red-purple": { col1: "orange", col2: "red", col3: "purple" } } } } };
meng630/GMD_E3SM_SCM
externals/kokkos/core/unit_test/TestAggregate.hpp
/* //@HEADER // ************************************************************************ // // Kokkos v. 3.0 // Copyright (2020) National Technology & Engineering // Solutions of Sandia, LLC (NTESS). // // Under the terms of Contract DE-NA0003525 with NTESS, // the U.S. Government retains certain rights in this software. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // 1. Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // // 2. Redistributions in binary form must reproduce the above copyright // notice, this list of conditions and the following disclaimer in the // documentation and/or other materials provided with the distribution. // // 3. Neither the name of the Corporation nor the names of the // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY NTESS "AS IS" AND ANY // EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR // PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL NTESS OR THE // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, // EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // // Questions? Contact <NAME> (<EMAIL>) // // ************************************************************************ //@HEADER */ #ifndef TEST_AGGREGATE_HPP #define TEST_AGGREGATE_HPP #include <gtest/gtest.h> #include <stdexcept> #include <sstream> #include <iostream> #include <impl/Kokkos_ViewArray.hpp> namespace Test { template <class DeviceType> void TestViewAggregate() { using value_type = Kokkos::Array<double, 32>; using analysis_1d = Kokkos::Impl::ViewDataAnalysis<value_type *, Kokkos::LayoutLeft, value_type>; static_assert( std::is_same<typename analysis_1d::specialize, Kokkos::Array<> >::value, ""); using a32_traits = Kokkos::ViewTraits<value_type **, DeviceType>; using flat_traits = Kokkos::ViewTraits<typename a32_traits::scalar_array_type, DeviceType>; static_assert( std::is_same<typename a32_traits::specialize, Kokkos::Array<> >::value, ""); static_assert( std::is_same<typename a32_traits::value_type, value_type>::value, ""); static_assert(a32_traits::rank == 2, ""); static_assert(a32_traits::rank_dynamic == 2, ""); static_assert(std::is_same<typename flat_traits::specialize, void>::value, ""); static_assert(flat_traits::rank == 3, ""); static_assert(flat_traits::rank_dynamic == 2, ""); static_assert(flat_traits::dimension::N2 == 32, ""); using a32_type = Kokkos::View<Kokkos::Array<double, 32> **, DeviceType>; using a32_flat_type = typename a32_type::array_type; static_assert(std::is_same<typename a32_type::value_type, value_type>::value, ""); static_assert(std::is_same<typename a32_type::pointer_type, double *>::value, ""); static_assert(a32_type::Rank == 2, ""); static_assert(a32_flat_type::Rank == 3, ""); a32_type x("test", 4, 5); a32_flat_type y(x); ASSERT_EQ(x.extent(0), 4); ASSERT_EQ(x.extent(1), 5); ASSERT_EQ(y.extent(0), 4); ASSERT_EQ(y.extent(1), 5); ASSERT_EQ(y.extent(2), 32); // Initialize arrays from brace-init-list as for std::array. // // Comment: Clang will issue the following warning if we don't use double // braces here (one for initializing the Kokkos::Array and one for // initializing the sub-aggreagate C-array data member), // // warning: suggest braces around initialization of subobject // // but single brace syntax would be valid as well. Kokkos::Array<float, 2> aggregate_initialization_syntax_1 = {{1.41, 3.14}}; ASSERT_FLOAT_EQ(aggregate_initialization_syntax_1[0], 1.41); ASSERT_FLOAT_EQ(aggregate_initialization_syntax_1[1], 3.14); Kokkos::Array<int, 3> aggregate_initialization_syntax_2{ {0, 1, 2}}; // since C++11 for (int i = 0; i < 3; ++i) { ASSERT_EQ(aggregate_initialization_syntax_2[i], i); } // Note that this is a valid initialization. Kokkos::Array<double, 3> initialized_with_one_argument_missing = {{255, 255}}; for (int i = 0; i < 2; ++i) { ASSERT_DOUBLE_EQ(initialized_with_one_argument_missing[i], 255); } // But the following line would not compile // Kokkos::Array< double, 3 > initialized_with_too_many{ { 1, 2, 3, 4 } }; // The code below must compile for zero-sized arrays. using T = float; constexpr int N = 0; Kokkos::Array<T, N> a; for (int i = 0; i < N; ++i) { a[i] = T(); } } TEST(TEST_CATEGORY, view_aggregate) { TestViewAggregate<TEST_EXECSPACE>(); } } // namespace Test #endif /* #ifndef TEST_AGGREGATE_HPP */
phoenix-engine/phoenix
include/phx_sdl/sdl_input.hpp
<filename>include/phx_sdl/sdl_input.hpp #include "input.hpp" namespace phx_sdl { template <typename Consumer = input::Simple> class Input { public: Input(input::Input<Consumer>& with) noexcept; // poll will block the current thread until quit is signaled. void poll() noexcept; private: input::Input<Consumer>& into; }; // Specialized only for input::Simple. To enable further // specializations, add them to the implementation file. } // namespace phx_sdl
ghuntley/COVIDSafe_1.0.11.apk
src/sources/com/google/crypto/tink/subtle/RsaSsaPssVerifyJce.java
package com.google.crypto.tink.subtle; import com.google.crypto.tink.PublicKeyVerify; import com.google.crypto.tink.subtle.Enums; import java.math.BigInteger; import java.security.GeneralSecurityException; import java.security.MessageDigest; import java.security.interfaces.RSAPublicKey; import java.util.Arrays; public final class RsaSsaPssVerifyJce implements PublicKeyVerify { private final Enums.HashType mgf1Hash; private final RSAPublicKey publicKey; private final int saltLength; private final Enums.HashType sigHash; public RsaSsaPssVerifyJce(RSAPublicKey rSAPublicKey, Enums.HashType hashType, Enums.HashType hashType2, int i) throws GeneralSecurityException { Validators.validateSignatureHash(hashType); Validators.validateRsaModulusSize(rSAPublicKey.getModulus().bitLength()); this.publicKey = rSAPublicKey; this.sigHash = hashType; this.mgf1Hash = hashType2; this.saltLength = i; } public void verify(byte[] bArr, byte[] bArr2) throws GeneralSecurityException { BigInteger publicExponent = this.publicKey.getPublicExponent(); BigInteger modulus = this.publicKey.getModulus(); int bitLength = (modulus.bitLength() + 7) / 8; int bitLength2 = ((modulus.bitLength() - 1) + 7) / 8; if (bitLength == bArr.length) { BigInteger bytes2Integer = SubtleUtil.bytes2Integer(bArr); if (bytes2Integer.compareTo(modulus) < 0) { emsaPssVerify(bArr2, SubtleUtil.integer2Bytes(bytes2Integer.modPow(publicExponent, modulus), bitLength2), modulus.bitLength() - 1); return; } throw new GeneralSecurityException("signature out of range"); } throw new GeneralSecurityException("invalid signature's length"); } private void emsaPssVerify(byte[] bArr, byte[] bArr2, int i) throws GeneralSecurityException { byte[] bArr3 = bArr2; Validators.validateSignatureHash(this.sigHash); MessageDigest instance = EngineFactory.MESSAGE_DIGEST.getInstance(SubtleUtil.toDigestAlgo(this.sigHash)); byte[] digest = instance.digest(bArr); int digestLength = instance.getDigestLength(); int length = bArr3.length; if (length < this.saltLength + digestLength + 2) { throw new GeneralSecurityException("inconsistent"); } else if (bArr3[bArr3.length - 1] == -68) { int i2 = length - digestLength; int i3 = i2 - 1; byte[] copyOf = Arrays.copyOf(bArr3, i3); byte[] copyOfRange = Arrays.copyOfRange(bArr3, copyOf.length, copyOf.length + digestLength); int i4 = 0; while (true) { int i5 = i3; MessageDigest messageDigest = instance; byte[] bArr4 = digest; long j = (((long) length) * 8) - ((long) i); if (((long) i4) < j) { if (((copyOf[i4 / 8] >> (7 - (i4 % 8))) & 1) == 0) { i4++; i3 = i5; instance = messageDigest; digest = bArr4; } else { throw new GeneralSecurityException("inconsistent"); } } else { byte[] mgf1 = SubtleUtil.mgf1(copyOfRange, i5, this.mgf1Hash); int length2 = mgf1.length; byte[] bArr5 = new byte[length2]; for (int i6 = 0; i6 < length2; i6++) { bArr5[i6] = (byte) (mgf1[i6] ^ copyOf[i6]); } for (int i7 = 0; ((long) i7) <= j; i7++) { int i8 = i7 / 8; bArr5[i8] = (byte) ((~(1 << (7 - (i7 % 8)))) & bArr5[i8]); } int i9 = 0; while (true) { int i10 = this.saltLength; if (i9 < (i2 - i10) - 2) { if (bArr5[i9] == 0) { i9++; } else { throw new GeneralSecurityException("inconsistent"); } } else if (bArr5[(i2 - i10) - 2] == 1) { byte[] copyOfRange2 = Arrays.copyOfRange(bArr5, length2 - i10, length2); int i11 = digestLength + 8; byte[] bArr6 = new byte[(this.saltLength + i11)]; byte[] bArr7 = bArr4; System.arraycopy(bArr7, 0, bArr6, 8, bArr7.length); System.arraycopy(copyOfRange2, 0, bArr6, i11, copyOfRange2.length); if (!Bytes.equal(messageDigest.digest(bArr6), copyOfRange)) { throw new GeneralSecurityException("inconsistent"); } return; } else { throw new GeneralSecurityException("inconsistent"); } } } } } else { throw new GeneralSecurityException("inconsistent"); } } }
onmyway133/Runtime-Headers
iOS/10.0.2/Frameworks/CoreData.framework/_PFWeakReference.h
<gh_stars>10-100 /* Generated by RuntimeBrowser Image: /System/Library/Frameworks/CoreData.framework/CoreData */ @interface _PFWeakReference : NSObject { id _object; long long _objectAddress; } @property (readonly) long long address; @property (readonly) id object; + (id)weakReferenceWithObject:(id)arg1; - (long long)address; - (void)dealloc; - (unsigned long long)hash; - (id)initWithObject:(id)arg1; - (bool)isEqual:(id)arg1; - (id)object; - (id)retainedObject; @end
dh-linghaibin/xboot
src/arch/arm32/mach-v3ss/driver/fb-sandbox.c
<gh_stars>0 /* * driver/fb-sandbox.c * * Copyright(c) 2007-2019 <NAME> <<EMAIL>> * Official site: http://xboot.org * Mobile phone: +86-18665388956 * QQ: 8192542 * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. * */ #include <xboot.h> #include <sandbox.h> #include <framebuffer/framebuffer.h> struct fb_sandbox_pdata_t { int width; int height; int pwidth; int pheight; int bpp; int fullscreen; void * priv; }; static void fb_setbl(struct framebuffer_t * fb, int brightness) { struct fb_sandbox_pdata_t * pdat = (struct fb_sandbox_pdata_t *)fb->priv; sandbox_sdl_fb_set_backlight(pdat->priv, brightness); } static int fb_getbl(struct framebuffer_t * fb) { struct fb_sandbox_pdata_t * pdat = (struct fb_sandbox_pdata_t *)fb->priv; return sandbox_sdl_fb_get_backlight(pdat->priv); } static struct render_t * fb_create(struct framebuffer_t * fb) { struct fb_sandbox_pdata_t * pdat = (struct fb_sandbox_pdata_t *)fb->priv; struct sandbox_fb_surface_t * surface; struct render_t * render; surface = malloc(sizeof(struct sandbox_fb_surface_t)); if(!surface) return NULL; if(sandbox_sdl_fb_surface_create(pdat->priv, surface) != 0) { free(surface); return NULL; } render = malloc(sizeof(struct render_t)); if(!render) { sandbox_sdl_fb_surface_destroy(pdat->priv, surface); free(surface); return NULL; } render->width = surface->width; render->height = surface->height; render->pitch = surface->pitch; render->format = PIXEL_FORMAT_ARGB32; render->pixels = surface->pixels; render->pixlen = surface->height * surface->pitch; render->priv = surface; return render; } static void fb_destroy(struct framebuffer_t * fb, struct render_t * render) { struct fb_sandbox_pdata_t * pdat = (struct fb_sandbox_pdata_t *)fb->priv; if(render) { sandbox_sdl_fb_surface_destroy(pdat->priv, render->priv); free(render->priv); free(render); } } static void fb_present(struct framebuffer_t * fb, struct render_t * render, struct dirty_rect_t * rect, int nrect) { struct fb_sandbox_pdata_t * pdat = (struct fb_sandbox_pdata_t *)fb->priv; sandbox_sdl_fb_surface_present(pdat->priv, render->priv); } static struct device_t * fb_sandbox_probe(struct driver_t * drv, struct dtnode_t * n) { struct fb_sandbox_pdata_t * pdat; struct framebuffer_t * fb; struct device_t * dev; char title[64]; pdat = malloc(sizeof(struct fb_sandbox_pdata_t)); if(!pdat) return NULL; fb = malloc(sizeof(struct framebuffer_t)); if(!fb) { free(pdat); return NULL; } sprintf(title, "Xboot Runtime Environment - V%s", xboot_version_string()); pdat->width = dt_read_int(n, "width", 800); pdat->height = dt_read_int(n, "height", 480); pdat->pwidth = dt_read_int(n, "physical-width", 216); pdat->pheight = dt_read_int(n, "physical-height", 135); pdat->bpp = dt_read_int(n, "bits-per-pixel", 32); pdat->fullscreen = dt_read_bool(n, "fullscreen", 0); pdat->priv = sandbox_sdl_fb_init(title, pdat->width, pdat->height, pdat->fullscreen); pdat->width = sandbox_sdl_fb_get_width(pdat->priv); pdat->height = sandbox_sdl_fb_get_height(pdat->priv); fb->name = alloc_device_name(dt_read_name(n), dt_read_id(n)); fb->width = pdat->width; fb->height = pdat->height; fb->pwidth = pdat->pwidth; fb->pheight = pdat->pheight; fb->bpp = pdat->bpp; fb->setbl = fb_setbl; fb->getbl = fb_getbl; fb->create = fb_create; fb->destroy = fb_destroy; fb->present = fb_present; fb->priv = pdat; if(!register_framebuffer(&dev, fb)) { sandbox_sdl_fb_exit(pdat->priv); free_device_name(fb->name); free(fb->priv); free(fb); return NULL; } dev->driver = drv; return dev; } static void fb_sandbox_remove(struct device_t * dev) { struct framebuffer_t * fb = (struct framebuffer_t *)dev->priv; struct fb_sandbox_pdata_t * pdat = (struct fb_sandbox_pdata_t *)fb->priv; if(fb && unregister_framebuffer(fb)) { sandbox_sdl_fb_exit(pdat->priv); free_device_name(fb->name); free(fb->priv); free(fb); } } static void fb_sandbox_suspend(struct device_t * dev) { } static void fb_sandbox_resume(struct device_t * dev) { } static struct driver_t fb_sandbox = { .name = "fb-sandbox", .probe = fb_sandbox_probe, .remove = fb_sandbox_remove, .suspend = fb_sandbox_suspend, .resume = fb_sandbox_resume, }; static __init void fb_sandbox_driver_init(void) { register_driver(&fb_sandbox); } static __exit void fb_sandbox_driver_exit(void) { unregister_driver(&fb_sandbox); } driver_initcall(fb_sandbox_driver_init); driver_exitcall(fb_sandbox_driver_exit);
DweebsUnited/CodeMonkey
resources/hemesh/ref/html/classwblut_1_1hemesh_1_1_h_e_c___geodesic.js
var classwblut_1_1hemesh_1_1_h_e_c___geodesic = [ [ "HEC_Geodesic", "classwblut_1_1hemesh_1_1_h_e_c___geodesic.html#a8012c1b966e78ac6e5e271dd14bb8831", null ], [ "HEC_Geodesic", "classwblut_1_1hemesh_1_1_h_e_c___geodesic.html#a684038be79f0617c5856ee0687d53366", null ], [ "createBase", "classwblut_1_1hemesh_1_1_h_e_c___geodesic.html#a20fe5bd1cc242b55af1583a78a18cb14", null ], [ "setB", "classwblut_1_1hemesh_1_1_h_e_c___geodesic.html#a7038c9d71853bfc39663b6e5a4fbe674", null ], [ "setC", "classwblut_1_1hemesh_1_1_h_e_c___geodesic.html#a3bc47fa814cec313d930267c13fca601", null ], [ "setRadius", "classwblut_1_1hemesh_1_1_h_e_c___geodesic.html#ab995b7e495da3e7858e04bdf5158aa2e", null ], [ "setRadius", "classwblut_1_1hemesh_1_1_h_e_c___geodesic.html#ae69f3a56a9049d3e86e6f5537bb25bff", null ], [ "setSphere", "classwblut_1_1hemesh_1_1_h_e_c___geodesic.html#a06912c09824b40da24b8c4d573b45e98", null ], [ "setType", "classwblut_1_1hemesh_1_1_h_e_c___geodesic.html#a5a9b72f8459739863883f35276248acb", null ], [ "b", "classwblut_1_1hemesh_1_1_h_e_c___geodesic.html#a9b75d781c97ea96ff78418131a892897", null ], [ "c", "classwblut_1_1hemesh_1_1_h_e_c___geodesic.html#abadaf33850c75c9980ae2a4cb3aa890b", null ], [ "rx", "classwblut_1_1hemesh_1_1_h_e_c___geodesic.html#abbe3c3c6117f2461569b1acc17617f1e", null ], [ "type", "classwblut_1_1hemesh_1_1_h_e_c___geodesic.html#a06b78c03df79b79100244d2868eb95e5", null ] ];
ajozwik/akka-smtp-server
akka-smtp/src/main/scala/pl/jozwik/smtp/server/command/RcptCommand.scala
/* * Copyright (c) 2017 <NAME> * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package pl.jozwik.smtp package server package command import pl.jozwik.smtp.server.Errors._ import pl.jozwik.smtp.util.Constants._ import pl.jozwik.smtp.util.Response._ import pl.jozwik.smtp.util.Utils._ object RcptCommand { def handleRcpt( iterator: Iterator[String], argument: String, accumulator: MailAccumulator, addressHandler: AddressHandler ): (MailAccumulator, ResponseMessage) = { val (acc, message) = if (iterator.hasNext && iterator.next() == TO && !iterator.hasNext) { responseForRcptAndValidation(accumulator, argument, addressHandler) } else { (accumulator, syntaxError(TO)) } response(acc, message) } private def responseForRcptAndValidation(accumulator: MailAccumulator, argument: String, addressHandler: AddressHandler): (MailAccumulator, String) = (accumulator.from.isEmpty, argument.isEmpty) match { case (EMPTY, _) => (accumulator, MAIL_MISSING) case (NOT_EMPTY, EMPTY) => (accumulator, syntaxError(s"$TO")) case _ => responseForRcptTo(accumulator, argument, addressHandler) } private def responseForRcptTo(accumulator: MailAccumulator, argument: String, addressHandler: AddressHandler): (MailAccumulator, String) = toMailAddress(argument) match { case Right(mailAddress) if addressHandler.acceptTo(mailAddress) => val acc = accumulator.copy(to = mailAddress +: accumulator.to) (acc, recipientOk(mailAddress)) case Right(mailAddress) => (accumulator, userUnknown(mailAddress)) case Left(error) => (accumulator, error) } }
zhongxinghong/Botzone-Tank2
core/utils.py
# -*- coding: utf-8 -*- # @Author: Administrator # @Date: 2019-04-26 22:07:11 # @Last Modified by: Administrator # @Last Modified time: 2019-05-29 00:01:20 """ 工具类 """ __all__ = [ "debug_print", "debug_pprint", "simulator_print", "simulator_pprint", "outer_label", "memorize", "CachedProperty", "SingletonMeta", "UniqueIntEnumMeta", "DataSerializer", ] from .const import DEBUG_MODE, SIMULATOR_ENV, SIMULATOR_PRINT from .global_ import pprint, pickle, base64, gzip, contextmanager, hashlib, functools, types #{ BEGIN }# _null_func = lambda *args, **kwargs: None if DEBUG_MODE: debug_print = print debug_pprint = pprint else: debug_print = _null_func debug_pprint = _null_func if SIMULATOR_ENV and SIMULATOR_PRINT: simulator_print = print simulator_pprint = pprint else: simulator_print = _null_func simulator_pprint = _null_func @contextmanager def outer_label(): """ 用于直接打断外层循环,或者继续外层循环 如果置于循环体之外,就是 break outer 如果置于循环体之内,就是 continue outer """ class _GotoOuterException(Exception): pass try: yield _GotoOuterException() # 每次创建后都不相同,嵌套的情况下,需要确保名称不相同 except _GotoOuterException: # 这样做是为了防止嵌套的情况下,无法从内层直接跳到最外层 pass class _Missing(object): """ from werkzeug._internal """ def __repr__(self): return 'no value' def __reduce__(self): return '_missing' _MISSING = _Missing() class CachedProperty(property): """ from werkzeug.utils """ def __init__(self, func, name=None, doc=None): self.__name__ = name or func.__name__ self.__module__ = func.__module__ self.__doc__ = doc or func.__doc__ self.func = func def __set__(self, obj, value): obj.__dict__[self.__name__] = value def __get__(self, obj, type=None): if obj is None: return self value = obj.__dict__.get(self.__name__, _MISSING) if value is _MISSING: value = self.func(obj) obj.__dict__[self.__name__] = value return value @staticmethod def clean(obj, key): """ 清除缓存 """ obj.__dict__.pop(key, None) def memorize(func): """ 根据参数列表缓存函数的返回值的修饰器 ------------------------------------ 1. func 会以 __memory__ 缓存返回结果 2. func 会带上 make_key 方法,可以用来获取传入参数列表对应的缓存 key 3. func 会带上 clear_memory 方法,可以清空所有的缓存结果 4. 如果返回值是生成器,会立即获得完整结果并转为 tuple 类型 这个函数主要用于缓存搜索路径 """ def _make_key(func, *args, **kwargs): _key = ( func.__module__, func.__name__, args, sorted(kwargs.items()) # kwargs 自动排序 ) return hashlib.md5(pickle.dumps(_key)).hexdigest() def _clear_memory(func): if hasattr(func, "__memory__"): func.__memory__.clear() @functools.wraps(func) def wrapper(*args, **kwargs): if not hasattr(func, "__memory__"): func.__memory__ = {} key = _make_key(func, *args, **kwargs) res = func.__memory__.get(key, _MISSING) if res is _MISSING: res = func(*args, **kwargs) if isinstance(res, types.GeneratorType): res = list(res) # 如果返回结果是生成器,那么马上获得所有结果 func.__memory__[key] = res return res wrapper.make_key = functools.partial(_make_key, func) wrapper.clear_memory = functools.partial(_clear_memory, func) return wrapper class SingletonMeta(type): """ Singleton Metaclass @link https://github.com/jhao104/proxy_pool/blob/428359c8dada998481f038dbdc8d3923e5850c0e/Util/utilClass.py """ _instance = {} def __call__(cls, *args, **kwargs): if cls not in cls._instance: cls._instance[cls] = super(SingletonMeta, cls).__call__(*args, **kwargs) return cls._instance[cls] class UniqueIntEnumMeta(type): """ 使得枚举类内所有的 int 值都增加一个 __offset__ 偏移量 使得不同的枚举类可以用同样的 int 值申明 case,但是不同枚举类间,实际的属性值不同不同 需要在类属性中通过 __offset__ 值申明偏移量 """ def __new__(cls, name, bases, attrs): offset = attrs.get("__offset__", 0) # 默认为 0 for k, v in attrs.items(): if isinstance(v, int): attrs[k] += offset return super(UniqueIntEnumMeta, cls).__new__(cls, name, bases, attrs) class DataSerializer(object): @staticmethod def _unpad(s): return s.rstrip("=") @staticmethod def _pad(s): return s + "=" * ( 4 - len(s) % 4 ) @staticmethod def serialize(obj): return __class__._unpad( base64.b64encode( gzip.compress( pickle.dumps(obj))).decode("utf-8")) @staticmethod def deserialize(s): return pickle.loads( gzip.decompress( base64.b64decode( __class__._pad(s).encode("utf-8")))) #{ END }#
nguyentruongngoclan/MalmoAI
Minecraft/build/tmp/recompileMc/sources/net/minecraft/client/renderer/vertex/VertexFormat.java
<filename>Minecraft/build/tmp/recompileMc/sources/net/minecraft/client/renderer/vertex/VertexFormat.java package net.minecraft.client.renderer.vertex; import com.google.common.collect.Lists; import java.util.Iterator; import java.util.List; import net.minecraftforge.fml.relauncher.Side; import net.minecraftforge.fml.relauncher.SideOnly; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @SideOnly(Side.CLIENT) public class VertexFormat { private static final Logger LOGGER = LogManager.getLogger(); private final List elements; private final List offsets; /** The next available offset in this vertex format */ private int nextOffset; private int colorElementOffset; private List elementOffsetsById; private int normalElementOffset; private static final String __OBFID = "CL_00002401"; public VertexFormat(VertexFormat p_i46097_1_) { this(); for (int i = 0; i < p_i46097_1_.getElementCount(); ++i) { this.setElement(p_i46097_1_.getElement(i)); } this.nextOffset = p_i46097_1_.getNextOffset(); } public VertexFormat() { this.elements = Lists.newArrayList(); this.offsets = Lists.newArrayList(); this.nextOffset = 0; this.colorElementOffset = -1; this.elementOffsetsById = Lists.newArrayList(); this.normalElementOffset = -1; } public void clear() { this.elements.clear(); this.offsets.clear(); this.colorElementOffset = -1; this.elementOffsetsById.clear(); this.normalElementOffset = -1; this.nextOffset = 0; } public void setElement(VertexFormatElement p_177349_1_) { if (p_177349_1_.isPositionElement() && this.hasPosition()) { LOGGER.warn("VertexFormat error: Trying to add a position VertexFormatElement when one already exists, ignoring."); } else { this.elements.add(p_177349_1_); this.offsets.add(Integer.valueOf(this.nextOffset)); p_177349_1_.setOffset(this.nextOffset); this.nextOffset += p_177349_1_.getSize(); switch (VertexFormat.SwitchEnumUsage.field_177382_a[p_177349_1_.getUsage().ordinal()]) { case 1: this.normalElementOffset = p_177349_1_.getOffset(); break; case 2: this.colorElementOffset = p_177349_1_.getOffset(); break; case 3: this.elementOffsetsById.add(p_177349_1_.getIndex(), Integer.valueOf(p_177349_1_.getOffset())); } } } public boolean hasNormal() { return this.normalElementOffset >= 0; } public int getNormalOffset() { return this.normalElementOffset; } public boolean hasColor() { return this.colorElementOffset >= 0; } public int getColorOffset() { return this.colorElementOffset; } public boolean hasElementOffset(int id) { return this.elementOffsetsById.size() - 1 >= id; } public int getElementOffsetById(int id) { return ((Integer)this.elementOffsetsById.get(id)).intValue(); } public String toString() { String s = "format: " + this.elements.size() + " elements: "; for (int i = 0; i < this.elements.size(); ++i) { s = s + ((VertexFormatElement)this.elements.get(i)).toString(); if (i != this.elements.size() - 1) { s = s + " "; } } return s; } private boolean hasPosition() { Iterator iterator = this.elements.iterator(); VertexFormatElement vertexformatelement; do { if (!iterator.hasNext()) { return false; } vertexformatelement = (VertexFormatElement)iterator.next(); } while (!vertexformatelement.isPositionElement()); return true; } public int getNextOffset() { return this.nextOffset; } public List getElements() { return this.elements; } public int getElementCount() { return this.elements.size(); } public VertexFormatElement getElement(int p_177348_1_) { return (VertexFormatElement)this.elements.get(p_177348_1_); } public boolean equals(Object p_equals_1_) { if (this == p_equals_1_) { return true; } else if (p_equals_1_ != null && this.getClass() == p_equals_1_.getClass()) { VertexFormat vertexformat = (VertexFormat)p_equals_1_; return this.nextOffset != vertexformat.nextOffset ? false : (!this.elements.equals(vertexformat.elements) ? false : this.offsets.equals(vertexformat.offsets)); } else { return false; } } public int hashCode() { int i = this.elements.hashCode(); i = 31 * i + this.offsets.hashCode(); i = 31 * i + this.nextOffset; return i; } @SideOnly(Side.CLIENT) static final class SwitchEnumUsage { static final int[] field_177382_a = new int[VertexFormatElement.EnumUsage.values().length]; private static final String __OBFID = "CL_00002400"; static { try { field_177382_a[VertexFormatElement.EnumUsage.NORMAL.ordinal()] = 1; } catch (NoSuchFieldError var3) { ; } try { field_177382_a[VertexFormatElement.EnumUsage.COLOR.ordinal()] = 2; } catch (NoSuchFieldError var2) { ; } try { field_177382_a[VertexFormatElement.EnumUsage.UV.ordinal()] = 3; } catch (NoSuchFieldError var1) { ; } } } }
bTest2018/griffon
subprojects/griffon-pivot/src/main/java/griffon/pivot/support/adapters/WindowAdapter.java
<filename>subprojects/griffon-pivot/src/main/java/griffon/pivot/support/adapters/WindowAdapter.java /* * SPDX-License-Identifier: Apache-2.0 * * Copyright 2008-2018 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package griffon.pivot.support.adapters; import griffon.core.CallableWithArgs; import org.apache.pivot.wtk.media.Image; /** * @author <NAME> * @since 2.0.0 */ public class WindowAdapter implements GriffonPivotAdapter, org.apache.pivot.wtk.WindowListener { private CallableWithArgs<Void> iconAdded; private CallableWithArgs<Void> iconInserted; private CallableWithArgs<Void> iconsRemoved; private CallableWithArgs<Void> titleChanged; private CallableWithArgs<Void> contentChanged; private CallableWithArgs<Void> activeChanged; private CallableWithArgs<Void> maximizedChanged; public CallableWithArgs<Void> getIconAdded() { return this.iconAdded; } public CallableWithArgs<Void> getIconInserted() { return this.iconInserted; } public CallableWithArgs<Void> getIconsRemoved() { return this.iconsRemoved; } public CallableWithArgs<Void> getTitleChanged() { return this.titleChanged; } public CallableWithArgs<Void> getContentChanged() { return this.contentChanged; } public CallableWithArgs<Void> getActiveChanged() { return this.activeChanged; } public CallableWithArgs<Void> getMaximizedChanged() { return this.maximizedChanged; } public void setIconAdded(CallableWithArgs<Void> iconAdded) { this.iconAdded = iconAdded; } public void setIconInserted(CallableWithArgs<Void> iconInserted) { this.iconInserted = iconInserted; } public void setIconsRemoved(CallableWithArgs<Void> iconsRemoved) { this.iconsRemoved = iconsRemoved; } public void setTitleChanged(CallableWithArgs<Void> titleChanged) { this.titleChanged = titleChanged; } public void setContentChanged(CallableWithArgs<Void> contentChanged) { this.contentChanged = contentChanged; } public void setActiveChanged(CallableWithArgs<Void> activeChanged) { this.activeChanged = activeChanged; } public void setMaximizedChanged(CallableWithArgs<Void> maximizedChanged) { this.maximizedChanged = maximizedChanged; } public void iconAdded(org.apache.pivot.wtk.Window arg0, org.apache.pivot.wtk.media.Image arg1) { if (iconAdded != null) { iconAdded.call(arg0, arg1); } } public void iconInserted(org.apache.pivot.wtk.Window arg0, org.apache.pivot.wtk.media.Image arg1, int arg2) { if (iconInserted != null) { iconInserted.call(arg0, arg1, arg2); } } public void iconsRemoved(org.apache.pivot.wtk.Window arg0, int arg1, org.apache.pivot.collections.Sequence<Image> arg2) { if (iconsRemoved != null) { iconsRemoved.call(arg0, arg1, arg2); } } public void titleChanged(org.apache.pivot.wtk.Window arg0, java.lang.String arg1) { if (titleChanged != null) { titleChanged.call(arg0, arg1); } } public void contentChanged(org.apache.pivot.wtk.Window arg0, org.apache.pivot.wtk.Component arg1) { if (contentChanged != null) { contentChanged.call(arg0, arg1); } } public void activeChanged(org.apache.pivot.wtk.Window arg0, org.apache.pivot.wtk.Window arg1) { if (activeChanged != null) { activeChanged.call(arg0, arg1); } } public void maximizedChanged(org.apache.pivot.wtk.Window arg0) { if (maximizedChanged != null) { maximizedChanged.call(arg0); } } }
Sakshi14-code/Facebook-Clone
Facebook-Clone-master/Facebook-Clone-master/backend/routes/postRoute.js
const express=require('express'); const router=express.Router(); const Post=require('../models/post'); const {isLoggedIn}=require('./logininfo'); router.get('/logincheck',isLoggedIn,(req,res)=>{ res.send("Yes you are logged in"); }); router.get('/allposts',isLoggedIn,async(req,res)=>{ const posts=await Post.find({}); res.send(posts); }); router.post('/posts',async(req,res)=>{ const post = await Post.create(req.body); res.send(post); }) router.get('/posts/edit/:id',async(req,res)=>{ const post= await Post.findById(req.params.id).populate('comments'); res.send(post); }) router.patch('/posts/edit/:id',async(req,res)=>{ const post=await Post.findByIdAndUpdate(req.params.id,req.body); res.send(post); }) router.delete('/posts/delete/:id',async(req,res)=>{ const post=await Post.findByIdAndDelete(req.params.id); res.send(post); }) module.exports=router;
carnei-ro/openipc-2.1
br-ext-chip-xiongmai/package/xiongmai-osdrv-xm530/files/include/mpi_vo.h
<gh_stars>1-10 #ifndef __MPI_VO_H__ #define __MPI_VO_H__ #include "xm_comm_vo.h" #ifdef __cplusplus #if __cplusplus extern "C" { #endif #endif /* __cplusplus */ /* Device Settings */ XM_S32 XM_MPI_VO_Init(void); XM_S32 XM_MPI_VO_SetPubAttr(VO_DEV VoDev, const VO_PUB_ATTR_S *pstPubAttr); XM_S32 XM_MPI_VO_GetPubAttr(VO_DEV VoDev, VO_PUB_ATTR_S *pstPubAttr); XM_S32 XM_MPI_VO_Enable (VO_DEV VoDev); XM_S32 XM_MPI_VO_Disable(VO_DEV VoDev); /*XM_S32 XM_MPI_VO_CloseFd(XM_VOID);*/ /* General Operation of Channel */ XM_S32 XM_MPI_VO_EnableChn (VO_LAYER VoLayer, VO_CHN VoChn); XM_S32 XM_MPI_VO_DisableChn(VO_LAYER VoLayer, VO_CHN VoChn); XM_S32 XM_MPI_VO_SetChnAttr(VO_LAYER VoLayer, VO_CHN VoChn, const VO_CHN_ATTR_S *pstChnAttr); XM_S32 XM_MPI_VO_GetChnAttr(VO_LAYER VoLayer, VO_CHN VoChn, VO_CHN_ATTR_S *pstChnAttr); XM_S32 XM_MPI_VO_GetChnFrame(VO_LAYER VoLayer, VO_CHN VoChn, VIDEO_FRAME_INFO_S *pstFrame, XM_S32 s32MilliSec); XM_S32 XM_MPI_VO_ReleaseChnFrame(VO_LAYER VoLayer, VO_CHN VoChn, const VIDEO_FRAME_INFO_S *pstFrame); /*XM_S32 XM_MPI_VO_PauseChn (VO_LAYER VoLayer, VO_CHN VoChn);*/ /*XM_S32 XM_MPI_VO_ResumeChn(VO_LAYER VoLayer, VO_CHN VoChn);*/ /*XM_S32 XM_MPI_VO_StepChn(VO_LAYER VoLayer, VO_CHN VoChn);*/ /*XM_S32 XM_MPI_VO_RefreshChn( VO_LAYER VoLayer, VO_CHN VoChn);*/ /*XM_S32 XM_MPI_VO_ShowChn(VO_LAYER VoLayer, VO_CHN VoChn);*/ /*XM_S32 XM_MPI_VO_HideChn(VO_LAYER VoLayer, VO_CHN VoChn);*/ /*XM_S32 XM_MPI_VO_SendFrame(VO_LAYER VoLayer, VO_CHN VoChn, VIDEO_FRAME_INFO_S *pstVFrame, XM_S32 s32MilliSec);*/ /*XM_S32 XM_MPI_VO_ClearChnBuffer(VO_LAYER VoLayer, VO_CHN VoChn, XM_BOOL bClrAll);*/ /*XM_S32 XM_MPI_VO_GetChnRegionLuma(VO_LAYER VoLayer, VO_CHN VoChn, VO_REGION_INFO_S *pstRegionInfo,*/ /*XM_U32 *pu32LumaData, XM_S32 s32MilliSec);*/ /* Cascade setting */ /*XM_S32 XM_MPI_VO_SetCascadeAttr(const VO_CAS_ATTR_S *pstCasAttr);*/ /*XM_S32 XM_MPI_VO_GetCascadeAttr(VO_CAS_ATTR_S *pstCasAttr);*/ /*XM_S32 XM_MPI_VO_EnableCascadeDev (VO_DEV VoCasDev);*/ /*XM_S32 XM_MPI_VO_DisableCascadeDev(VO_DEV VoCasDev);*/ /*XM_S32 XM_MPI_VO_SetCascadePattern(VO_DEV VoCasDev, XM_U32 u32Pattern);*/ /*XM_S32 XM_MPI_VO_GetCascadePattern(VO_DEV VoCasDev, XM_U32 *pu32Pattern);*/ /*XM_S32 XM_MPI_VO_CascadePosBindChn(XM_U32 u32Pos, VO_DEV VoCasDev, VO_CHN VoChn);*/ /*XM_S32 XM_MPI_VO_CascadePosUnBindChn(XM_U32 u32Pos, VO_DEV VoCasDev, VO_CHN VoChn);*/ /*XM_S32 XM_MPI_VO_EnableCascade (XM_VOID);*/ /*XM_S32 XM_MPI_VO_DisableCascade(XM_VOID);*/ /* VGA setting */ /*XM_S32 XM_MPI_VO_GetVgaParam(VO_DEV VoDev, VO_VGA_PARAM_S *pstVgaParam);*/ /*XM_S32 XM_MPI_VO_SetVgaParam(VO_DEV VoDev, VO_VGA_PARAM_S *pstVgaParam);*/ /*XM_S32 XM_MPI_VO_SetDevFrameRate(VO_DEV VoDev, XM_U32 u32FrameRate);*/ /*XM_S32 XM_MPI_VO_GetDevFrameRate(VO_DEV VoDev, XM_U32 *pu32FrameRate);*/ /*XM_S32 XM_MPI_VO_EnableRecvFrameRateMatch (VO_LAYER VoLayer, VO_CHN VoChn);*/ /*XM_S32 XM_MPI_VO_DisableRecvFrameRateMatch (VO_LAYER VoLayer, VO_CHN VoChn);*/ /* HDMI setting */ /*XM_S32 XM_MPI_VO_GetHdmiParam(VO_DEV VoDev, VO_HDMI_PARAM_S *pstHdmiParam);*/ /*XM_S32 XM_MPI_VO_SetHdmiParam(VO_DEV VoDev, VO_HDMI_PARAM_S *pstHdmiParam);*/ /*XM_S32 XM_MPI_VO_SetVtth(VO_DEV VoDev, XM_U32 u32Vtth);*/ /*XM_S32 XM_MPI_VO_GetVtth(VO_DEV VoDev, XM_U32* pu32Vtth);*/ XM_S32 XM_MPI_VO_SetImageLayerAttr(VO_LAYER VoLayer, const VO_IMAGE_LAYER_ATTR_S *pstLayerAttr); XM_S32 XM_MPI_VO_GetImageLayerAttr(VO_LAYER VoLayer, const VO_IMAGE_LAYER_ATTR_S *pstLayerAttr); XM_S32 XM_MPI_VO_EnableImageLayer(VO_LAYER VoLayer); XM_S32 XM_MPI_VO_DisableImageLayer(VO_LAYER VoLayer); #ifdef __cplusplus #if __cplusplus } #endif #endif /* __cplusplus */ #endif /*__MPI_VO_H__ */
lesaint/experimenting-annotation-processing
experimenting-rounds/massive-count-of-annotated-classes/src/main/java/fr/javatronic/blog/massive/annotation1/sub1/Class_8831.java
package fr.javatronic.blog.massive.annotation1.sub1; import fr.javatronic.blog.processor.Annotation_001; @Annotation_001 public class Class_8831 { }
deweixu/ulcdemo
ipc/selectdemo.c
<reponame>deweixu/ulcdemo #include <stdio.h> #include <stdlib.h> #include <sys/time.h> #include <sys/types.h> #include <sys/select.h> #include <unistd.h> #include <fcntl.h> #define oops(m,x) {perror(m); exit(x);} void showdata(char *, int); int main(int ac, char *av[]) { int fd1, fd2; struct timeval timeout; fd_set readfds; int maxfd; int retval; if (ac != 4) { fprintf(stderr, "useage: %s file file timeout", *av); exit(1); } if ((fd1 = open(av[1], O_RDONLY)) == -1) oops(av[1], 2); if ((fd2 = open(av[2], O_RDONLY)) == -1) oops(av[2], 3); maxfd = 1 + (fd1 > fd2 ? fd1 : fd2); while (1) { FD_ZERO(&readfds); FD_SET(fd1, &readfds); FD_SET(fd2, &readfds); timeout.tv_sec = atoi(av[3]); timeout.tv_usec = 0; retval = select(maxfd, &readfds, NULL, NULL, &timeout); if (retval == -1) oops("select", 4); if ( retval > 0) { if (FD_ISSET(fd1, &readfds)) showdata(av[1], fd1); if (FD_ISSET(fd2, &readfds)) showdata(av[2], fd2); } else { printf("no input after %d seconds\n", atoi(av[3])); } } } void showdata(char *fname, int fd) { char buf[BUFSIZ]; int n; printf("%s: %d", fname, n); fflush(stdout); n = read(fd, buf, BUFSIZ); if (n == -1) oops(fname, 5); write(1, buf, n); write(1, "\n", 1); }
bocke/amissl
libcmt/error.c
<reponame>bocke/amissl /* * Portable ISO 'C' (1994) runtime library for the Amiga computer * Copyright (c) 2002-2005 by <NAME> <<EMAIL>> * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * - Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * - Neither the name of Olaf Barthel nor the names of contributors * may be used to endorse or promote products derived from this * software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ #include <errno.h> #include <string.h> #include <dos/dos.h> #include <dos/dosasl.h> int __io2errno(int io_err) { static const struct { LONG io_err; LONG errno; } map_table[] = { { ERROR_NO_FREE_STORE, ENOMEM }, { ERROR_TASK_TABLE_FULL, ENOMEM }, { ERROR_BAD_TEMPLATE, EINVAL }, { ERROR_BAD_NUMBER, EINVAL }, { ERROR_REQUIRED_ARG_MISSING, EINVAL }, { ERROR_KEY_NEEDS_ARG, EINVAL }, { ERROR_TOO_MANY_ARGS, EINVAL }, { ERROR_UNMATCHED_QUOTES, EINVAL }, { ERROR_LINE_TOO_LONG, ENAMETOOLONG }, { ERROR_FILE_NOT_OBJECT, ENOEXEC }, { ERROR_OBJECT_IN_USE, EBUSY }, { ERROR_OBJECT_EXISTS, EEXIST }, { ERROR_DIR_NOT_FOUND, ENOENT }, { ERROR_OBJECT_NOT_FOUND, ENOENT }, { ERROR_BAD_STREAM_NAME, EINVAL }, { ERROR_OBJECT_TOO_LARGE, EFBIG }, { ERROR_ACTION_NOT_KNOWN, ENOSYS }, { ERROR_INVALID_COMPONENT_NAME, EINVAL }, { ERROR_INVALID_LOCK, EBADF }, { ERROR_OBJECT_WRONG_TYPE, EFTYPE }, { ERROR_DISK_NOT_VALIDATED, EROFS }, { ERROR_DISK_WRITE_PROTECTED, EROFS }, { ERROR_RENAME_ACROSS_DEVICES, EXDEV }, { ERROR_DIRECTORY_NOT_EMPTY, ENOTEMPTY }, { ERROR_TOO_MANY_LEVELS, ENAMETOOLONG }, { ERROR_DEVICE_NOT_MOUNTED, ENXIO }, { ERROR_COMMENT_TOO_BIG, ENAMETOOLONG }, { ERROR_DISK_FULL, ENOSPC }, { ERROR_DELETE_PROTECTED, EACCES }, { ERROR_WRITE_PROTECTED, EACCES }, { ERROR_READ_PROTECTED, EACCES }, { ERROR_NOT_A_DOS_DISK, EFTYPE }, { ERROR_NO_DISK, EACCES }, { ERROR_IS_SOFT_LINK, EFTYPE }, { ERROR_BAD_HUNK, ENOEXEC }, { ERROR_NOT_IMPLEMENTED, ENOSYS }, { ERROR_LOCK_COLLISION, EACCES }, { ERROR_BREAK, EINTR }, { ERROR_NOT_EXECUTABLE, ENOEXEC } }; unsigned int i; int result; result = EIO; for(i = 0 ; i < sizeof(map_table) / sizeof(map_table[0]) ; i++) { if(map_table[i].io_err == io_err) { result = map_table[i].errno; break; } } return(result); } /****************************************************************************/ char * strerror(int error_number) { const char *result; switch(error_number) { #ifdef EPERM case EPERM: result = "Operation not permitted"; break; #endif /* EPERM */ #ifdef ENOENT case ENOENT: result = "No such file or directory"; break; #endif /* ENOENT */ #ifdef ESRCH case ESRCH: result = "No such process"; break; #endif /* ESRCH */ #ifdef EINTR case EINTR: result = "Interrupted system call"; break; #endif /* EINTR */ #ifdef EIO case EIO: result = "Input/output error"; break; #endif /* EIO */ #ifdef ENXIO case ENXIO: result = "Device not configured"; break; #endif /* ENXIO */ #ifdef ENOEXEC case ENOEXEC: result = "Exec format error"; break; #endif /* ENOEXEC */ #ifdef EBADF case EBADF: result = "Bad file descriptor"; break; #endif /* EBADF */ #ifdef ECHILD case ECHILD: result = "No child processes"; break; #endif /* ECHILD */ #ifdef EDEADLK case EDEADLK: result = "Resource deadlock avoided"; break; #endif /* EDEADLK */ #ifdef ENOMEM case ENOMEM: result = "Cannot allocate memory"; break; #endif /* ENOMEM */ #ifdef EACCES case EACCES: result = "Permission denied"; break; #endif /* EACCES */ #ifdef EFAULT case EFAULT: result = "Bad address"; break; #endif /* EFAULT */ #ifdef ENOTBLK case ENOTBLK: result = "Block device required"; break; #endif /* ENOTBLK */ #ifdef EBUSY case EBUSY: result = "Device busy"; break; #endif /* EBUSY */ #ifdef EEXIST case EEXIST: result = "File exists"; break; #endif /* EEXIST */ #ifdef EXDEV case EXDEV: result = "Cross-device link"; break; #endif /* EXDEV */ #ifdef ENODEV case ENODEV: result = "Operation not supported by device"; break; #endif /* ENODEV */ #ifdef ENOTDIR case ENOTDIR: result = "Not a directory"; break; #endif /* ENOTDIR */ #ifdef EISDIR case EISDIR: result = "Is a directory"; break; #endif /* EISDIR */ #ifdef EINVAL case EINVAL: result = "Invalid argument"; break; #endif /* EINVAL */ #ifdef ENFILE case ENFILE: result = "Too many open files in system"; break; #endif /* ENFILE */ #ifdef EMFILE case EMFILE: result = "Too many open files"; break; #endif /* EMFILE */ #ifdef ENOTTY case ENOTTY: result = "Inappropriate ioctl for device"; break; #endif /* ENOTTY */ #ifdef ETXTBSY case ETXTBSY: result = "Text file busy"; break; #endif /* ETXTBSY */ #ifdef EFBIG case EFBIG: result = "File too large"; break; #endif /* EFBIG */ #ifdef ENOSPC case ENOSPC: result = "No space left on device"; break; #endif /* ENOSPC */ #ifdef ESPIPE case ESPIPE: result = "Illegal seek"; break; #endif /* ESPIPE */ #ifdef EROFS case EROFS: result = "Read-only file system"; break; #endif /* EROFS */ #ifdef EMLINK case EMLINK: result = "Too many links"; break; #endif /* EMLINK */ #ifdef EPIPE case EPIPE: result = "Broken pipe"; break; #endif /* EPIPE */ #ifdef EDOM case EDOM: result = "Numerical argument out of domain"; break; #endif /* EDOM */ #ifdef ERANGE case ERANGE: result = "Result too large"; break; #endif /* ERANGE */ #ifdef EAGAIN case EAGAIN: result = "Resource temporarily unavailable"; break; #endif /* EAGAIN */ #if defined(EWOULDBLOCK) && (EWOULDBLOCK != EAGAIN) case EWOULDBLOCK: result = "Operation would block"; break; #endif /* EWOULDBLOCK */ #ifdef EINPROGRESS case EINPROGRESS: result = "Operation now in progress"; break; #endif /* EINPROGRESS */ #ifdef EALREADY case EALREADY: result = "Operation already in progress"; break; #endif /* EALREADY */ #ifdef ENOTSOCK case ENOTSOCK: result = "Socket operation on non-socket"; break; #endif /* ENOTSOCK */ #ifdef EDESTADDRREQ case EDESTADDRREQ: result = "Destination address required"; break; #endif /* EDESTADDRREQ */ #ifdef EMSGSIZE case EMSGSIZE: result = "Message too long"; break; #endif /* EMSGSIZE */ #ifdef EPROTOTYPE case EPROTOTYPE: result = "Protocol wrong type for socket"; break; #endif /* EPROTOTYPE */ #ifdef ENOPROTOOPT case ENOPROTOOPT: result = "Protocol not available"; break; #endif /* ENOPROTOOPT */ #ifdef EPROTONOSUPPORT case EPROTONOSUPPORT: result = "Protocol not supported"; break; #endif /* EPROTONOSUPPORT */ #ifdef ESOCKTNOSUPPORT case ESOCKTNOSUPPORT: result = "Socket type not supported"; break; #endif /* ESOCKTNOSUPPORT */ #ifdef EOPNOTSUPP case EOPNOTSUPP: result = "Operation not supported on socket"; break; #endif /* EOPNOTSUPP */ #ifdef EPFNOSUPPORT case EPFNOSUPPORT: result = "Protocol family not supported"; break; #endif /* EPFNOSUPPORT */ #ifdef EAFNOSUPPORT case EAFNOSUPPORT: result = "Address family not supported by protocol family"; break; #endif /* EAFNOSUPPORT */ #ifdef EADDRINUSE case EADDRINUSE: result = "Address already in use"; break; #endif /* EADDRINUSE */ #ifdef EADDRNOTAVAIL case EADDRNOTAVAIL: result = "Can't assign requested address"; break; #endif /* EADDRNOTAVAIL */ #ifdef ENETDOWN case ENETDOWN: result = "Network is down"; break; #endif /* ENETDOWN */ #ifdef ENETUNREACH case ENETUNREACH: result = "Network is unreachable"; break; #endif /* ENETUNREACH */ #ifdef ENETRESET case ENETRESET: result = "Network dropped connection on reset"; break; #endif /* ENETRESET */ #ifdef ECONNABORTED case ECONNABORTED: result = "Software caused connection abort"; break; #endif /* ECONNABORTED */ #ifdef ECONNRESET case ECONNRESET: result = "Connection reset by peer"; break; #endif /* ECONNRESET */ #ifdef ENOBUFS case ENOBUFS: result = "No buffer space available"; break; #endif /* ENOBUFS */ #ifdef EISCONN case EISCONN: result = "Socket is already connected"; break; #endif /* EISCONN */ #ifdef ENOTCONN case ENOTCONN: result = "Socket is not connected"; break; #endif /* ENOTCONN */ #ifdef ESHUTDOWN case ESHUTDOWN: result = "Can't send after socket shutdown"; break; #endif /* ESHUTDOWN */ #ifdef ETOOMANYREFS case ETOOMANYREFS: result = "Too many references: can't splice"; break; #endif /* ETOOMANYREFS */ #ifdef ETIMEDOUT case ETIMEDOUT: result = "Connection timed out"; break; #endif /* ETIMEDOUT */ #ifdef ECONNREFUSED case ECONNREFUSED: result = "Connection refused"; break; #endif /* ECONNREFUSED */ #ifdef ELOOP case ELOOP: result = "Too many levels of symbolic links"; break; #endif /* ELOOP */ #ifdef ENAMETOOLONG case ENAMETOOLONG: result = "File name too long"; break; #endif /* ENAMETOOLONG */ #ifdef EHOSTDOWN case EHOSTDOWN: result = "Host is down"; break; #endif /* EHOSTDOWN */ #ifdef EHOSTUNREACH case EHOSTUNREACH: result = "No route to host"; break; #endif /* EHOSTUNREACH */ #ifdef ENOTEMPTY case ENOTEMPTY: result = "Directory not empty"; break; #endif /* ENOTEMPTY */ #ifdef EPROCLIM case EPROCLIM: result = "Too many processes"; break; #endif /* EPROCLIM */ #ifdef EUSERS case EUSERS: result = "Too many users"; break; #endif /* EUSERS */ #ifdef EDQUOT case EDQUOT: result = "Disc quota exceeded"; break; #endif /* EDQUOT */ #ifdef ESTALE case ESTALE: result = "Stale NFS file handle"; break; #endif /* ESTALE */ #ifdef EREMOTE case EREMOTE: result = "Too many levels of remote in path"; break; #endif /* EREMOTE */ #ifdef EBADRPC case EBADRPC: result = "RPC struct is bad"; break; #endif /* EBADRPC */ #ifdef ERPCMISMATCH case ERPCMISMATCH: result = "RPC version wrong"; break; #endif /* ERPCMISMATCH */ #ifdef EPROGUNAVAIL case EPROGUNAVAIL: result = "RPC prog. not avail"; break; #endif /* EPROGUNAVAIL */ #ifdef EPROGMISMATCH case EPROGMISMATCH: result = "Program version wrong"; break; #endif /* EPROGMISMATCH */ #ifdef EPROCUNAVAIL case EPROCUNAVAIL: result = "Bad procedure for program"; break; #endif /* EPROCUNAVAIL */ #ifdef ENOLCK case ENOLCK: result = "No locks available"; break; #endif /* ENOLCK */ #ifdef ENOSYS case ENOSYS: result = "Function not implemented"; break; #endif /* ENOSYS */ #ifdef EFTYPE case EFTYPE: result = "Inappropriate file type or format"; break; #endif /* EFTYPE */ #ifdef EAUTH case EAUTH: result = "Authentication error."; break; #endif /* EAUTH */ #ifdef ENEEDAUTH case ENEEDAUTH: result = "Need authenticator."; break; #endif /* ENEEDAUTH */ #ifdef EIDRM case EIDRM: result = "Identifier removed."; break; #endif /* EIDRM */ #ifdef ENOMSG case ENOMSG: result = "No message of the desired type."; break; #endif /* ENOMSG */ #ifdef EOVERFLOW case EOVERFLOW: result = "Value too large to be stored in data type."; break; #endif /* EOVERFLOW */ #ifdef EILSEQ case EILSEQ: result = "Encoding error detected"; break; #endif /* EILSEQ */ default: { static char error_buffer[80]; char number[30]; char *s = number; int is_negative = 0; unsigned int n; int i,len,c; /* We convert the error number into in an unsigned integer, so that numbers such as 0x80000000 can come out of the conversion. */ if(error_number < 0) { is_negative = 1; n = (-error_number); } else { n = error_number; } /* Convert the error number into a string of digits. */ len = 0; do { (*s++) = '0' + (n % 10); n /= 10; len++; } while(n > 0 && len < (int)sizeof(number)-1); /* Add the sign, if necessary. */ if(is_negative && len < (int)sizeof(number)-1) { (*s++) = '-'; len++; } (*s) = '\0'; /* Reverse the string in place. */ for(i = 0 ; i < len / 2 ; i++) { c = number[len-1-i]; number[len-1-i] = number[i]; number[i] = c; } strcpy(error_buffer,"Unknown error "); strcat(error_buffer,number); result = error_buffer; break; } } return((char *)result); }
faserg1/adb
tests/tests/test-channel.cpp
#include "test-guild.hpp" #include <libadb/api/api.hpp> #include <libadb/api/channel/channel-api.hpp> using namespace adb::api; void testPins(adb::api::DiscordApi &api) { auto channelId = adb::types::SFID{"964458591102853120"}; auto messageId = adb::types::SFID{"964826080009650226"}; auto channelApi = api.CreateChannelApi(); channelApi->pinMessage(channelId, messageId, std::string{"Pin this!"}); auto pinnedMessages = channelApi->getPinnedMessages(channelId); channelApi->unpinMessage(channelId, messageId, std::string{"Unpin this!"}); }
bgoonz/DS-n-Algos-Mega-Archive
JAVASCRIPT/javascript.datastructures.algorithms.master/Graph/Graph.js
<gh_stars>0 var Dictionary = require('./../Dictionary/Dictionary'); var Queue = require('./../Queue/Queue'); function Graph() { var vertices = []; var adjList = new Dictionary(); this.addVertex = function (v) { vertices.push(v); adjList.set(v, []); }; this.addEdge = function (v, w) { adjList.get(v).push(w); adjList.get(w).push(v); }; this.getAdjList = function () { return adjList.getItems(); }; var initializeColor = function () { var color = {}; for (var i = 0; i < vertices.length; i++) { color[vertices[i]] = 'white'; } return color; }; this.bfs = function (v, callback) { var color = initializeColor(), queue = new Queue(); queue.enqueue(v); color[v] = 'black'; while (!queue.isEmpty()) { var u = queue.dequeue(), neighbors = adjList.get(u); for (var i = 0; i < neighbors.length; i++) { var w = neighbors[i]; if (color[w] === 'white') { queue.enqueue(w); color[w] = 'black'; } } if (callback) { callback(u); } } }; this.BFS = function (v) { var color = initializeColor(), queue = new Queue(), d = {}, pred = {}; queue.enqueue(v); for (var i = 0; i < vertices.length; i++) { d[vertices[i]] = 0; pred[vertices[i]] = null; } while (!queue.isEmpty()) { var u = queue.dequeue(), neighbors = adjList.get(u); color[v] = 'black'; for (i = 0; i < neighbors.length; i++) { var w = neighbors[i]; if (color[w] === 'white') { queue.enqueue(w); color[w] = 'black'; d[w] = d[u] + 1; pred[w] = u; } } color[u] = 'black'; } return { distances: d, predecessors: pred }; }; this.dfs = function (v, callback) { var color = initializeColor(); dfsVisit(v, color, callback); }; var dfsVisit = function (u, color, callback) { if (callback) { callback(u); } var neighbors = adjList.get(u); color[u] = 'black'; for (var i = 0; i < neighbors.length; i++) { var w = neighbors[i]; if (color[w] === 'white') { dfsVisit(w, color, callback); } } }; var time = 0; this.DFS = function () { var color = initializeColor(), d = {}, f = {}, p = {}; time = 0; for (var i = 0; i < vertices.length; i++) { d[vertices[i]] = 0; f[vertices[i]] = 0; p[vertices[i]] = null; } for (i = 0; i < vertices.length; i++) { if (color[vertices[i]] === 'white') { DFSVisit(vertices[i], color, d, f, p); } } return { discovery: d, finished: f, predecessors: p } }; var DFSVisit = function (u, color, d, f, p) { color[u] = 'black'; d[u] = ++time; var neighbors = adjList.get(u); for (var i = 0; i < neighbors.length; i++) { var w = neighbors[i]; if (color[w] === 'white') { p[w] = u; DFSVisit(w, color, d, f, p); } } f[u] = ++time; }; } module.exports = Graph;
syin2/openthread
third_party/silabs/gecko_sdk_suite/v1.0/platform/base/hal/micro/cortexm3/efm32/cstartup-common.c
<filename>third_party/silabs/gecko_sdk_suite/v1.0/platform/base/hal/micro/cortexm3/efm32/cstartup-common.c //============================================================================= // FILE // cstartup.c - Startup and low-level utility code for Ember's Cortex based // SOCs when using the IAR toolchain. // // DESCRIPTION // This file defines the basic information needed to go from reset up to // the main() found in C code. // // Copyright 2013 Silicon Laboratories, Inc. *80* //============================================================================= #include PLATFORM_HEADER #include "hal/micro/cortexm3/diagnostic.h" #include "hal/micro/cortexm3/efm32/mpu.h" #include "hal/micro/micro.h" #include "hal/micro/cortexm3/memmap.h" #include "hal/micro/cortexm3/cstartup-common.h" #include "hal/micro/cortexm3/internal-storage.h" #include "stack/include/ember-types.h" #include "hal/micro/bootloader-interface.h" #include "em_device.h" #include "em_rmu.h" // TODO: comments in this file relate to the em3xx instead of efr32. Should // be cleaned up. // Pull in the SOFTWARE_VERSION and EMBER_BUILD_NUMBER from the stack #include "stack/config/config.h" // Define the CUSTOMER_APPLICATION_VERSION if it wasn't set #ifndef CUSTOMER_APPLICATION_VERSION #define CUSTOMER_APPLICATION_VERSION 0 #endif // Define the CUSTOMER_BOOTLOADER_VERSION if it wasn't set #ifndef CUSTOMER_BOOTLOADER_VERSION #define CUSTOMER_BOOTLOADER_VERSION 0 #endif // Verify the various bootloader options that may be specified. Use of some // options is now deprecated and will be removed in a future release. // On the 35x platform, the use of these options is only important to specify // the size of the bootloader, rather than the bootloader type. // By default, the lack of any option will indicate an 8k bootloader size // The NULL_BTL option indicates no bootloader is used. #ifdef APP_BTL #pragma message("The APP_BTL build option is deprecated. Removing this option will build for any 16k bootloader type.") #endif #ifdef SERIAL_UART_BTL #pragma message("The SERIAL_UART_BTL build option is deprecated. Removing this option will build for any 16k bootloader type.") #endif #ifdef SERIAL_OTA_BTL #pragma message("The SERIAL_UART_OTA build option is deprecated. Removing this option will build for any 16k bootloader type.") #endif #ifdef NULL_BTL // Fully supported, no error #endif #ifdef SMALL_BTL #error SMALL_BTL is not supported #endif #if defined GECKO_INFO_PAGE_BTL \ || defined APP_GECKO_INFO_PAGE_BTL \ || defined STA_GECKO_INFO_PAGE_BTL \ || defined LOCAL_STORAGE_GECKO_INFO_PAGE_BTL #define NO_BAT #endif //============================================================================= // Define the size of the call stack and define a block of memory for it. // // Place the cstack area in a segment named CSTACK. This segment is // defined soley for the purpose of placing the stack. Refer to reset handler // for the initialization code and iar-cfg-common.icf for segment placement // in memory. // // halResetInfo, used to store crash information and bootloader parameters, is // overlayed on top of the base of this segment so it can be overwritten by the // call stack. // This assumes that the stack will not go that deep between reset and // use of the crash or the bootloader data. //============================================================================= #ifndef CSTACK_SIZE #ifdef RTOS // The RTOS will handle the actual CSTACK sizing per-task, but we must // still allocate some space for startup and exceptions. #define CSTACK_SIZE (128) // *4 = 512 bytes #else #if (! defined(EMBER_STACK_IP)) // Pro Stack // Right now we define the stack size to be for the worst case scenario, // ECC. The ECC 163k1 library and the ECC 283k1 Library both use the stack // for calculations. Empirically I have seen it use as much as 1900 bytes // for the 'key bit generate' operation. // So we add a 25% buffer: 1900 * 1.25 = 2375 #define CSTACK_SIZE (600) // *4 = 2400 bytes #else // IP Stack #define CSTACK_SIZE (950) // *4 = 3800 bytes #endif // !EMBER_STACK_IP #endif #endif VAR_AT_SEGMENT(NO_STRIPPING uint32_t cstackMemory[CSTACK_SIZE], __CSTACK__); #ifndef HTOL_EM3XX // Create an array to hold space for the guard region. Do not actually use this // array in code as we will move the guard region around programatically. This // is only here so that the linker takes into account the size of the guard // region when configuring the RAM. ALIGNMENT(HEAP_GUARD_REGION_SIZE_BYTES) VAR_AT_SEGMENT(NO_STRIPPING uint8_t guardRegionPlaceHolder[HEAP_GUARD_REGION_SIZE_BYTES], __GUARD_REGION__); #endif // Reset cause and crash info live in a special RAM segment that is // not modified during startup. This segment is overlayed on top of the // bottom of the cstack. VAR_AT_SEGMENT(NO_STRIPPING HalResetInfoType halResetInfo, __RESETINFO__); // If space is needed in the flash for data storage like for the local storage // bootloader then create an array here to hold a place for this data. #if INTERNAL_STORAGE_SIZE_B > 0 // Define the storage region as an uninitialized array in the // __INTERNAL_STORAGE__ region which the linker knows how to place. VAR_AT_SEGMENT(NO_STRIPPING uint8_t internalStorage[INTERNAL_STORAGE_SIZE_B], __INTERNAL_STORAGE__); #endif //============================================================================= // Declare the address tables which will always live at well known addresses //============================================================================= #ifdef NULL_BTL // In the case of a NULL_BTL application, we define a dummy BAT VAR_AT_SEGMENT(NO_STRIPPING const HalBootloaderAddressTableType halBootloaderAddressTable, __BAT_INIT__) = { { _CSTACK_SEGMENT_END, halEntryPoint, halNmiIsr, halHardFaultIsr, BOOTLOADER_ADDRESS_TABLE_TYPE, BAT_NULL_VERSION, NULL // No other vector table. }, BL_EXT_TYPE_NULL, //uint16_t bootloaderType; BOOTLOADER_INVALID_VERSION, //uint16_t bootloaderVersion; &halAppAddressTable, PLAT, //uint8_t platInfo; // type of platform, defined in micro.h MICRO, //uint8_t microInfo; // type of micro, defined in micro.h PHY, //uint8_t phyInfo; // type of phy, defined in micro.h 0, //uint8_t reserved; // reserved for future use NULL, // eblProcessInit NULL, // eblProcess NULL, // eblDataFuncs NULL, // eepromInit NULL, // eepromRead NULL, // eepromWrite NULL, // eepromShutdown NULL, // eepromInfo NULL, // eepromErase NULL, // eepromBusy EMBER_BUILD_NUMBER, // uint16_t softwareBuild; 0, // uint16_t reserved2; CUSTOMER_BOOTLOADER_VERSION // uint32_t customerBootloaderVersion; }; #elif !defined NO_BAT // otherwise we just define a variable that maps to the real bootloader BAT VAR_AT_SEGMENT(NO_STRIPPING __no_init const HalBootloaderAddressTableType halBootloaderAddressTable, __BAT__); #endif VAR_AT_SEGMENT(NO_STRIPPING const HalAppAddressTableType halAppAddressTable, __AAT__) = { { _CSTACK_SEGMENT_END, halEntryPoint, halNmiIsr, halHardFaultIsr, APP_ADDRESS_TABLE_TYPE, AAT_VERSION, __vector_table }, PLAT, //uint8_t platInfo; // type of platform, defined in micro.h MICRO, //uint8_t microInfo; // type of micro, defined in micro.h PHY, //uint8_t phyInfo; // type of phy, defined in micro.h sizeof(HalAppAddressTableType), // size of aat itself SOFTWARE_VERSION, // uint16_t softwareVersion EMBER_BUILD_NUMBER, // uint16_t softwareBuild 0, //uint32_t timestamp; // Unix epoch time of .ebl file, filled in by ebl gen "", //uint8_t imageInfo[IMAGE_INFO_SZ]; // string, filled in by ebl generation 0, //uint32_t imageCrc; // CRC over following pageRanges, filled in by ebl gen { {UNUSED_AAT_PAGE_NUMBER, UNUSED_AAT_PAGE_NUMBER}, //pageRange_t pageRanges[NUM_AAT_PAGE_RANGES]; {UNUSED_AAT_PAGE_NUMBER, UNUSED_AAT_PAGE_NUMBER}, // Flash pages used by app, filled in by ebl gen {UNUSED_AAT_PAGE_NUMBER, UNUSED_AAT_PAGE_NUMBER}, {UNUSED_AAT_PAGE_NUMBER, UNUSED_AAT_PAGE_NUMBER}, {UNUSED_AAT_PAGE_NUMBER, UNUSED_AAT_PAGE_NUMBER}, {UNUSED_AAT_PAGE_NUMBER, UNUSED_AAT_PAGE_NUMBER} }, _SIMEE_SEGMENT_BEGIN, //void *simeeBottom; CUSTOMER_APPLICATION_VERSION, //uint32_t customerApplicationVersion; _INTERNAL_STORAGE_SEGMENT_BEGIN, //void *internalStorageBottom; { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0XFF }, // image stamp (filled in by em3xx_convert) FAMILY, //uint8_t familyInfo; (defined in micro.h) { 0 }, //uint8_t bootloaderReserved[] (zero fill, previously was 0xFF fill) _DEBUG_CHANNEL_SEGMENT_BEGIN, //void *debugChannelBottom; _NO_INIT_SEGMENT_BEGIN, //void *noInitBottom; _BSS_SEGMENT_END, //void *appRamTop; NO LONGER USED! (set to __BSS__ for 3xx convert) _BSS_SEGMENT_END, //void *globalTop; _CSTACK_SEGMENT_END, //void *cstackTop; _DATA_INIT_SEGMENT_END, //void *initcTop; _TEXT_SEGMENT_END, //void *codeTop; _CSTACK_SEGMENT_BEGIN, //void *cstackBottom; _EMHEAP_OVERLAY_SEGMENT_END, //void *heapTop; _SIMEE_SEGMENT_END, //void *simeeTop; _DEBUG_CHANNEL_SEGMENT_END //void *debugChannelTop; }; //============================================================================= // Define the vector table as a HalVectorTableType. NO_STRIPPING ensures the // compiler will not strip the table. const ensures the table is placed into // flash. The VAR_AT_SEGMENT() macro tells the compiler/linker to place the // vector table in the INTVEC segment which holds the reset/interrupt vectors // at address 0x00000000. // // All Handlers point to a corresponding ISR. The ISRs are prototyped above. // The file isr-stubs.s79 provides a weak definition for all ISRs. To // "register" its own ISR, an application simply has to define the function // and the weak stub will be overridden. // // The list of handlers are extracted from the NVIC configuration file. The // order of the handlers in the NVIC configuration file is critical since it // translates to the order they are placed into the vector table here. //============================================================================= VAR_AT_SEGMENT(NO_STRIPPING const HalVectorTableType __vector_table[], __INTVEC__) = { { .topOfStack = _CSTACK_SEGMENT_END }, #ifndef INTERRUPT_DEBUGGING #define EXCEPTION(vectorNumber, functionName, priorityLevel, subpriority) \ functionName, #else //INTERRUPT_DEBUGGING // The interrupt debug behavior inserts a special shim handler before // the actual interrupt. The shim handler then redirects to the // actual table, defined below #define EXCEPTION(vectorNumber, functionName, priorityLevel, subpriority) \ halInternalIntDebuggingIsr, // PERM_EXCEPTION is used for any vectors that cannot be redirected // throught the shim handler. (such as the reset vector) #define PERM_EXCEPTION(vectorNumber, functionName, priorityLevel) \ functionName, #endif //INTERRUPT_DEBUGGING #include NVIC_CONFIG #undef EXCEPTION #undef PERM_EXCEPTION }; // halInternalClassifyReset() records the cause of the last reset and any // assert information here. If the last reset was not due to an assert, // the saved assert filename and line number will be NULL and 0 respectively. static uint16_t savedResetCause; static HalAssertInfoType savedAssertInfo; void halInternalClassifyReset(void) { // Table used to convert from RESET_EVENT register bits to reset types static const uint16_t resetEventTable[] = { #ifdef _EZR_DEVICE RESET_POWERON_HV, // bit 0: PORST RESET_BROWNOUT_UNREGPOWER, // bit 1: BODUNREGRST RESET_BROWNOUT_REGPOWER, // bit 2: BODREGRST RESET_EXTERNAL_PIN, // bit 3: EXTRST RESET_WATCHDOG_EXPIRED, // bit 4: WDOGRST RESET_FATAL_LOCKUP, // bit 5: LOCKUPRST RESET_SOFTWARE, // bit 6: SYSREQRST RESET_SOFTWARE_EM4, // bit 7: EM4RST RESET_EXTERNAL_EM4PIN, // bit 8: EM4WURST RESET_BROWNOUT_AVDD0, // bit 9: BODAVDD0 RESET_BROWNOUT_AVDD1, // bit 10: BODAVDD1 #elif defined _EFR_DEVICE RESET_POWERON_HV, // bit 0: PORST RESET_UNKNOWN_UNKNOWN, // bit 1: RESERVED RESET_BROWNOUT_AVDD, // bit 2: AVDDBOD RESET_BROWNOUT_DVDD, // bit 3: DVDDBOD RESET_BROWNOUT_DEC, // bit 4: DECBOD RESET_UNKNOWN_UNKNOWN, // bit 5: RESERVED RESET_UNKNOWN_UNKNOWN, // bit 6: RESERVED RESET_UNKNOWN_UNKNOWN, // bit 7: RESERVED RESET_EXTERNAL_PIN, // bit 8: EXTRST RESET_FATAL_LOCKUP, // bit 9: LOCKUPRST RESET_SOFTWARE, // bit 10: SYSREQRST RESET_WATCHDOG_EXPIRED, // bit 11: WDOGRST RESET_UNKNOWN_UNKNOWN, // bit 12: RESERVED RESET_UNKNOWN_UNKNOWN, // bit 13: RESERVED RESET_UNKNOWN_UNKNOWN, // bit 14: RESERVED RESET_UNKNOWN_UNKNOWN, // bit 15: RESERVED RESET_SOFTWARE_EM4, // bit 16: EM4RST #endif }; uint32_t resetEvent = RMU_ResetCauseGet(); RMU_ResetCauseClear(); uint16_t cause = RESET_UNKNOWN; uint16_t i; for (i = 0; i < sizeof(resetEventTable)/sizeof(resetEventTable[0]); i++) { if (resetEvent & (1 << i)) { cause = resetEventTable[i]; break; } } if (cause == RESET_SOFTWARE) { if((halResetInfo.crash.resetSignature == RESET_VALID_SIGNATURE) && (RESET_BASE_TYPE(halResetInfo.crash.resetReason) < NUM_RESET_BASE_TYPES)) { // The extended reset cause is recovered from RAM // This can be trusted because the hardware reset event was software // and additionally because the signature is valid savedResetCause = halResetInfo.crash.resetReason; } else { savedResetCause = RESET_SOFTWARE_UNKNOWN; } // mark the signature as invalid halResetInfo.crash.resetSignature = RESET_INVALID_SIGNATURE; } else if ( (cause == RESET_BOOTLOADER_DEEPSLEEP) && (halResetInfo.crash.resetSignature == RESET_VALID_SIGNATURE) && (halResetInfo.crash.resetReason == RESET_BOOTLOADER_DEEPSLEEP)) { // Save the crash info for bootloader deep sleep (even though it's not used // yet) and invalidate the resetSignature. halResetInfo.crash.resetSignature = RESET_INVALID_SIGNATURE; savedResetCause = halResetInfo.crash.resetReason; } else { savedResetCause = cause; } // If the last reset was due to an assert, save the assert info. if (savedResetCause == RESET_CRASH_ASSERT) { savedAssertInfo = halResetInfo.crash.data.assertInfo; } } uint8_t halGetResetInfo(void) { return RESET_BASE_TYPE(savedResetCause); } uint16_t halGetExtendedResetInfo(void) { return savedResetCause; } const HalAssertInfoType *halGetAssertInfo(void) { return &savedAssertInfo; } #ifdef INTERRUPT_DEBUGGING //============================================================================= // If interrupt debugging is enabled, the actual ISRs are listed in this // secondary interrupt table. The halInternalIntDebuggingIsr will use this // table to jump to the appropriate handler //============================================================================= NO_STRIPPING const HalVectorTableType __real_vector_table[] = { { .topOfStack = _CSTACK_SEGMENT_END }, #define EXCEPTION(vectorNumber, functionName, priorityLevel, subpriority) \ functionName, #include NVIC_CONFIG #undef EXCEPTION }; #error INTERRUPT_DEBUGGING not yet supported on efm32! #endif //INTERRUPT_DEBUGGING
mhufflep/msh
libft/src/ft_itoa.c
#include "libft.h" static int count_digits(int n) { int digits; digits = 1; if (n < 0) digits++; while (n / 10 != 0) { n /= 10; digits++; } return (digits); } char *ft_itoa(int n) { int digits; char *str; int sign; sign = 1 * (n < 0) + 0 * (n >= 0); digits = count_digits(n); str = (char *)malloc(digits + 1); if (!str) return (NULL); str[0] = '-'; str[digits] = '\0'; while (digits > sign) { if (n < 0) str[--digits] = (n % 10) * (-1) + '0'; else str[--digits] = (n % 10) + '0'; n /= 10; } return (str); }
raphaelchang/quadthingy-software
base_station/ChibiOS_16.1.4/test/lib/templates/test_root.h
<reponame>raphaelchang/quadthingy-software<filename>base_station/ChibiOS_16.1.4/test/lib/templates/test_root.h /* ChibiOS - Copyright (C) 2006..2015 <NAME> Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ /** * @file test_root.h * @brief Test Suite root structures header. * * @addtogroup CH_TEST_ROOT * @{ */ #ifndef _TEST_ROOT_H_ #define _TEST_ROOT_H_ #include "test_sequence_000.h" /*===========================================================================*/ /* Default definitions. */ /*===========================================================================*/ /* Global test suite name, it is printed on top of the test report header.*/ #define TEST_SUITE_NAME "Test Suite" /*===========================================================================*/ /* External declarations. */ /*===========================================================================*/ extern const testcase_t * const *test_suite[]; #ifdef __cplusplus extern "C" { #endif #ifdef __cplusplus } #endif /*===========================================================================*/ /* Shared definitions. */ /*===========================================================================*/ #endif /* _TEST_ROOT_H_ */ /** @} */
datchung/blog
src/Components/Posts/PostPage.js
import React, { useEffect, useState } from "react"; import { getPost } from "../../Api/PostApi"; import T from '../../Localization/i18n'; import Back from "../Common/Back"; import PropTypes from "prop-types"; // import Spinner from "../Common/Spinner"; import { toast } from "react-toastify"; import marked from "marked"; import {TwitterTweetEmbed} from "react-twitter-embed"; export function PostPage({ history, ...props }) { const [post, setPost] = useState({}); useEffect(() => { var promise = new Promise(function(resolve, reject) { var id = decodeURIComponent(props.match.params.link); try { resolve(getPost(id)); } catch(error) { reject(error); } }); promise .then(response => { setPost(response); }) .catch(error => { toast.error( String.format(T.t("loadingPostFailed"), error)); }); }, []); return ( <> <Back history={history} /> <div dangerouslySetInnerHTML={{ __html: post.content && marked(post.content) }}></div> <TwitterTweetEmbed tweetId={'933354946111705097'} /> </> ); } PostPage.propTypes = { match: PropTypes.shape({ params: PropTypes.shape({ link: PropTypes.string.isRequired }) }), history: PropTypes.object.isRequired }; export default PostPage;
josephmancuso/masonite
tests/core/exceptions/test_exception_handler.py
<reponame>josephmancuso/masonite<gh_stars>10-100 from tests import TestCase from src.masonite.routes import Route from src.masonite.controllers import Controller from src.masonite.exceptions import RouteNotFoundException class TestController(Controller): def simple(self): 1 / 0 def http(self): raise RouteNotFoundException() class TestExceptionHandlerInDebug(TestCase): def setUp(self): super().setUp() self.handler = self.application.make("exception_handler") self.setRoutes( Route.get("/simple", TestController.simple), Route.get("/http", TestController.http), ) # enable exceptions handling during all the tests of this class # because it is what is tested here self.withExceptionsHandling() def test_that_exception_event_is_fired(self): # with self.debugMode(): # self.get("/simple") pass def test_raising_simple_exception_renders_debug_error_page(self): with self.debugMode(): self.get("/simple").assertError().assertContains( "ZeroDivisionError" ).assertContains("Exceptionite") def test_raising_http_exception_renders_debug_error_page(self): with self.debugMode(): self.get("/http").assertError().assertContains( "RouteNotFoundException" ).assertContains("Exceptionite") def test_raising_exception_output_stack_trace_to_console(self): with self.debugMode(): self.get("/simple") self.assertConsoleOutputContains( "ZeroDivisionError: division by zero" ).assertConsoleOutputContains("Stack Trace") def test_accepting_json_returns_debug_error_payload(self): with self.debugMode(): self.withHeaders({"Accept": "application/json"}).get( "/simple" ).assertError().assertJsonPath("exception.type", "ZeroDivisionError") class TestExceptionHandler(TestCase): """Test error handling in production mode, debug is False.""" def setUp(self): super().setUp() self.handler = self.application.make("exception_handler") self.setRoutes(Route.get("/simple", TestController.simple)) # enable exceptions handling during all the tests of this class # because it is what is tested here self.withExceptionsHandling() def test_that_exception_event_is_fired(self): pass def test_raising_simple_exception_renders_500_error_template(self): with self.debugMode(False): self.get("/simple").assertError().assertContains("Error 500") def test_raising_http_exception_renders_404_error_page(self): with self.debugMode(False): self.get("/http").assertNotFound().assertContains("Page Not Found") def test_raising_exception_does_not_output_stack_trace_to_console(self): with self.debugMode(False): self.get("/simple") self.assertConsoleEmpty() def test_accepting_json_returns_500_error_payload(self): with self.debugMode(False): self.withHeaders({"Accept": "application/json"}).get( "/simple" ).assertError().assertJson({"status": 500, "message": "division by zero"})
KPTechnologyLab/spring-data-crate
src/main/java/org/springframework/data/crate/config/CrateNamespaceHandler.java
/* * Copyright 2014 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.data.crate.config; import org.springframework.beans.factory.xml.NamespaceHandlerSupport; import org.springframework.data.crate.repository.config.CrateRepositoryConfigExtension; import org.springframework.data.repository.config.RepositoryBeanDefinitionParser; import org.springframework.data.repository.config.RepositoryConfigurationExtension; /** * CrateNamespaceHandler * * @author <NAME> * @author <NAME> */ public class CrateNamespaceHandler extends NamespaceHandlerSupport { @Override public void init() { RepositoryConfigurationExtension extension = new CrateRepositoryConfigExtension(); RepositoryBeanDefinitionParser parser = new RepositoryBeanDefinitionParser(extension); registerBeanDefinitionParser("repositories", parser); registerBeanDefinitionParser("client", new CrateClientBeanDefinitionParser()); registerBeanDefinitionParser("schema-export", new CratePersistentEntitySchemaManagerBeanDefinitionParser()); } }