repo_name stringlengths 6 101 | path stringlengths 4 300 | text stringlengths 7 1.31M |
|---|---|---|
slusek/OG-Platform | projects/OG-Util/src/main/java/com/opengamma/util/fudgemsg/timeseries/DoubleTimeSeriesFudgeBuilder.java | <reponame>slusek/OG-Platform<gh_stars>1-10
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.util.fudgemsg.timeseries;
import org.fudgemsg.FudgeMsg;
import org.fudgemsg.MutableFudgeMsg;
import org.fudgemsg.mapping.FudgeBuilder;
import org.fudgemsg.mapping.FudgeBuilderFor;
import org.fudgemsg.mapping.FudgeDeserializer;
import org.fudgemsg.mapping.FudgeSerializer;
import org.fudgemsg.wire.types.FudgeWireType;
import org.threeten.bp.LocalDate;
import org.threeten.bp.ZoneId;
import org.threeten.bp.ZoneOffset;
import com.opengamma.timeseries.DoubleTimeSeries;
import com.opengamma.timeseries.date.DateDoubleTimeSeries;
import com.opengamma.timeseries.date.localdate.ImmutableLocalDateDoubleTimeSeries;
import com.opengamma.timeseries.date.localdate.LocalDateToIntConverter;
import com.opengamma.timeseries.precise.PreciseDoubleTimeSeries;
import com.opengamma.timeseries.precise.instant.ImmutableInstantDoubleTimeSeries;
import com.opengamma.timeseries.precise.zdt.ImmutableZonedDateTimeDoubleTimeSeries;
import com.opengamma.timeseries.precise.zdt.ZonedDateTimeDoubleTimeSeries;
/**
* Fudge message encoder/decoder (builder) for DoubleTimeSeries
*/
@FudgeBuilderFor(DoubleTimeSeries.class)
public class DoubleTimeSeriesFudgeBuilder implements FudgeBuilder<DoubleTimeSeries<?>> {
/** Field name. */
public static final String DATES = "dates";
/** Field name. */
public static final String INSTANTS = "instants";
/** Field name. */
public static final String VALUES = "values";
/** Field name. */
public static final String ZONE = "zone";
/**
* Singleton instance.
*/
static final DoubleTimeSeriesFudgeBuilder INSTANCE = new DoubleTimeSeriesFudgeBuilder();
@Override
public MutableFudgeMsg buildMessage(FudgeSerializer serializer, DoubleTimeSeries<?> object) {
final MutableFudgeMsg message = serializer.newMessage();
message.add(null, 0, FudgeWireType.STRING, DoubleTimeSeries.class.getName()); // we need to stick the class name in so receiver knows.
if (object instanceof DateDoubleTimeSeries) {
buildMessage(message, (DateDoubleTimeSeries<?>) object);
} else if (object instanceof ZonedDateTimeDoubleTimeSeries) {
buildMessage(message, (ZonedDateTimeDoubleTimeSeries) object);
} else if (object instanceof PreciseDoubleTimeSeries) {
buildMessage(message, (PreciseDoubleTimeSeries<?>) object);
} else {
throw new IllegalArgumentException("Unknown time-series type");
}
return message;
}
void buildMessage(final MutableFudgeMsg message, DateDoubleTimeSeries<?> series) {
message.add(DATES, null, FudgeWireType.INT_ARRAY, series.timesArrayFast());
message.add(VALUES, null, FudgeWireType.DOUBLE_ARRAY, series.valuesArrayFast());
}
void buildMessage(final MutableFudgeMsg message, PreciseDoubleTimeSeries<?> series) {
message.add(INSTANTS, null, FudgeWireType.LONG_ARRAY, series.timesArrayFast());
message.add(VALUES, null, FudgeWireType.DOUBLE_ARRAY, series.valuesArrayFast());
}
void buildMessage(final MutableFudgeMsg message, ZonedDateTimeDoubleTimeSeries series) {
message.add(INSTANTS, null, FudgeWireType.LONG_ARRAY, series.timesArrayFast());
message.add(VALUES, null, FudgeWireType.DOUBLE_ARRAY, series.valuesArrayFast());
message.add(ZONE, null, FudgeWireType.STRING, series.getZone().getId());
}
@Override
public DoubleTimeSeries<?> buildObject(FudgeDeserializer deserializer, FudgeMsg message) {
// read old LocalDateDoubleTimeSeries, see OpenGammaFudgeContext
if (message.getByOrdinal(0).toString().contains("ArrayLocalDateDoubleTimeSeries") ||
message.getByOrdinal(0).toString().contains("ListLocalDateDoubleTimeSeries") ||
message.getByOrdinal(0).toString().contains("MapLocalDateDoubleTimeSeries")) {
FudgeMsg fastSeries = message.getMessage(2);
String encoding = fastSeries.getMessage(1).getString(1);
int[] dates = (int[]) fastSeries.getValue(2);
double[] values = (double[]) fastSeries.getValue(3);
if (encoding.equals("DATE_DDMMYYYY")) { // CSIGNORE
// correct encoding
} else if (encoding.equals("DATE_EPOCH_DAYS")) {
for (int i = 0; i < dates.length; i++) {
LocalDate d = LocalDate.ofEpochDay(dates[i]);
dates[i] = LocalDateToIntConverter.convertToInt(d);
}
} else {
throw new IllegalArgumentException("Unknown time-series encoding");
}
return ImmutableLocalDateDoubleTimeSeries.of(dates, values);
}
// read old ZonedDateTimeDoubleTimeSeries, see OpenGammaFudgeContext
if (message.getByOrdinal(0).toString().contains("ZonedDateTimeDoubleTimeSeries")) {
ZoneId zone = ZoneOffset.UTC;
try {
FudgeMsg converter = message.getMessage(1);
zone = ZoneId.of(converter.getString(1));
} catch (RuntimeException ex) {
// ignore
}
FudgeMsg fastSeries = message.getMessage(2);
String encoding = fastSeries.getMessage(1).getString(1);
long[] instants = (long[]) fastSeries.getValue(2);
double[] values = (double[]) fastSeries.getValue(3);
if (encoding.equals("TIME_EPOCH_NANOS")) { // CSIGNORE
// correct encoding
} else if (encoding.equals("TIME_EPOCH_MILLIS")) {
for (int i = 0; i < instants.length; i++) {
instants[i] = instants[i] * 1_000_000;
}
} else if (encoding.equals("TIME_EPOCH_SECONDS")) {
for (int i = 0; i < instants.length; i++) {
instants[i] = instants[i] * 1_000_000_000;
}
} else {
throw new IllegalArgumentException("Unknown time-series encoding");
}
return ImmutableZonedDateTimeDoubleTimeSeries.of(instants, values, zone);
}
// read new format
int[] dates = (int[]) message.getValue(DATES);
long[] instants = (long[]) message.getValue(INSTANTS);
double[] values = (double[]) message.getValue(VALUES);
String zoneId = message.getString(ZONE);
if (dates != null) {
return ImmutableLocalDateDoubleTimeSeries.of(dates, values);
}
if (instants != null) {
if (zoneId != null) {
ZoneId zone = ZoneId.of(zoneId);
return ImmutableZonedDateTimeDoubleTimeSeries.of(instants, values, zone);
} else {
return ImmutableInstantDoubleTimeSeries.of(instants, values);
}
}
throw new IllegalArgumentException("Unrecognized Fudge message: " + dates + " " + instants + " " + zoneId);
}
}
|
pengfei99/JavaBasic | LearningJava/src/main/java/org/pengfei/Lesson00_Java_Basics/S10_MultiThread/source/SumArrayWithoutSynchronizedMethod.java | <filename>LearningJava/src/main/java/org/pengfei/Lesson00_Java_Basics/S10_MultiThread/source/SumArrayWithoutSynchronizedMethod.java
package org.pengfei.Lesson00_Java_Basics.S10_MultiThread.source;
public class SumArrayWithoutSynchronizedMethod {
private int sum;
// this method is not synchronized
int sumArray(int nums[]){
sum=0;
for (int i=0; i<nums.length;i++) {
sum+=nums[i];
// Get the current thread which called this method
System.out.println("Running total for "+Thread.currentThread().getName()+" is "+sum);
// allow thread task switch
try{Thread.sleep(10);
} catch (InterruptedException e) {
System.out.println("Thread interrupted.");
}
}
return sum;
}
}
|
dzhemriza/goffi | org.goffi.fx.core/src/main/java/org/goffi/fx/core/AlertUtils.java | <reponame>dzhemriza/goffi
/*
* org.goffi.fx.core
*
* File Name: AlertUtils.java
*
* Copyright 2018 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.goffi.fx.core;
import javafx.scene.control.Alert;
import javafx.scene.control.ButtonType;
import javafx.scene.control.TextArea;
import javafx.scene.layout.GridPane;
import javafx.scene.layout.Priority;
import javafx.stage.Window;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.util.Optional;
public class AlertUtils {
public static void error(Window owner, String error) {
Alert alert = new Alert(Alert.AlertType.ERROR);
alert.setContentText(error);
alert.initOwner(owner);
alert.showAndWait();
}
public static void error(Window owner, String error, Throwable throwable) {
Alert alert = new Alert(Alert.AlertType.ERROR);
alert.setContentText(error);
alert.initOwner(owner);
StringWriter stringWriter = new StringWriter();
PrintWriter printWriter = new PrintWriter(stringWriter);
throwable.printStackTrace(printWriter);
String exceptionText = stringWriter.toString();
TextArea textArea = new TextArea(exceptionText);
textArea.setEditable(false);
textArea.setWrapText(true);
textArea.setMaxWidth(Double.MAX_VALUE);
textArea.setMaxHeight(Double.MAX_VALUE);
GridPane.setVgrow(textArea, Priority.ALWAYS);
GridPane.setHgrow(textArea, Priority.ALWAYS);
GridPane expContent = new GridPane();
expContent.setMaxWidth(Double.MAX_VALUE);
expContent.add(textArea, 0, 0);
// Set expandable Exception into the dialog pane.
alert.getDialogPane().setExpandableContent(expContent);
alert.showAndWait();
}
public static boolean confirm(Window owner, String msg) {
Alert alert = new Alert(Alert.AlertType.CONFIRMATION);
alert.setContentText(msg);
alert.initOwner(owner);
Optional<ButtonType> result = alert.showAndWait();
return result.get() == ButtonType.OK;
}
public static boolean question(Window owner, String msg) {
Alert alert = new Alert(Alert.AlertType.INFORMATION, msg,
ButtonType.YES, ButtonType.NO);
alert.initOwner(owner);
Optional<ButtonType> result = alert.showAndWait();
return result.get() == ButtonType.YES;
}
public static void info(Window owner, String msg) {
Alert alert = new Alert(Alert.AlertType.INFORMATION, msg,
ButtonType.CLOSE);
alert.initOwner(owner);
alert.showAndWait();
}
}
|
plasmo-foss/pkg | test/test-79-npm/ws/ws.js | 'use strict';
var WebSocket = require('ws');
var ws = new WebSocket('ws://127.0.0.1/');
ws.on('error', function () {
console.log('ok');
});
|
asac/tpm2-tools | test/unit/test_tpm2_util.c | <gh_stars>10-100
/* SPDX-License-Identifier: BSD-3-Clause */
#include <stdbool.h>
#include <stdio.h>
#include <setjmp.h>
#include <cmocka.h>
#include "tpm2_hierarchy.h"
#include "tpm2_util.h"
static void test_tpm2_util_handle_from_optarg_NULL(void **state) {
UNUSED(state);
TPMI_RH_PROVISION h;
bool result = tpm2_util_handle_from_optarg(NULL, &h,
TPM2_HANDLE_FLAGS_ALL_HIERACHIES);
assert_false(result);
}
static void test_tpm2_util_handle_from_optarg_empty(void **state) {
UNUSED(state);
TPMI_RH_PROVISION h;
bool result = tpm2_util_handle_from_optarg("", &h,
TPM2_HANDLE_FLAGS_ALL_HIERACHIES);
assert_false(result);
}
static void test_tpm2_util_handle_from_optarg_invalid_id(void **state) {
UNUSED(state);
TPMI_RH_PROVISION h;
bool result = tpm2_util_handle_from_optarg("q", &h,
TPM2_HANDLE_FLAGS_ALL_HIERACHIES);
assert_false(result);
}
static void test_tpm2_util_handle_from_optarg_invalid_str(void **state) {
UNUSED(state);
TPMI_RH_PROVISION h;
bool result = tpm2_util_handle_from_optarg("nope", &h,
TPM2_HANDLE_FLAGS_ALL_HIERACHIES);
assert_false(result);
}
static void test_tpm2_util_handle_from_optarg_valid_ids(void **state) {
UNUSED(state);
TPMI_RH_PROVISION h;
bool result = tpm2_util_handle_from_optarg("o", &h,
TPM2_HANDLE_FLAGS_ALL_HIERACHIES);
assert_true(result);
assert_int_equal(h, TPM2_RH_OWNER);
result = tpm2_util_handle_from_optarg("p", &h,
TPM2_HANDLE_FLAGS_ALL_HIERACHIES);
assert_true(result);
assert_int_equal(h, TPM2_RH_PLATFORM);
result = tpm2_util_handle_from_optarg("e", &h,
TPM2_HANDLE_FLAGS_ALL_HIERACHIES);
assert_true(result);
assert_int_equal(h, TPM2_RH_ENDORSEMENT);
result = tpm2_util_handle_from_optarg("n", &h,
TPM2_HANDLE_FLAGS_ALL_HIERACHIES);
assert_true(result);
assert_int_equal(h, TPM2_RH_NULL);
result = tpm2_util_handle_from_optarg("0x81010009", &h,
TPM2_HANDLE_ALL_W_NV);
assert_true(result);
assert_int_equal(h, 0x81010009);
}
static void test_tpm2_util_handle_from_optarg_valid_ids_disabled(void **state) {
UNUSED(state);
TPMI_RH_PROVISION h;
bool result = tpm2_util_handle_from_optarg("o", &h, TPM2_HANDLE_FLAGS_N);
assert_false(result);
result = tpm2_util_handle_from_optarg("p", &h, TPM2_HANDLE_FLAGS_O);
assert_false(result);
result = tpm2_util_handle_from_optarg("e", &h, TPM2_HANDLE_FLAGS_P);
assert_false(result);
result = tpm2_util_handle_from_optarg("n", &h, TPM2_HANDLE_FLAGS_E);
assert_false(result);
result = tpm2_util_handle_from_optarg("0x81010009", &h,
TPM2_HANDLE_FLAGS_ALL_HIERACHIES);
assert_false(result);
}
static void test_tpm2_util_handle_from_optarg_valid_ids_enabled(void **state) {
UNUSED(state);
TPMI_RH_PROVISION h;
bool result = tpm2_util_handle_from_optarg("o", &h, TPM2_HANDLE_FLAGS_O);
assert_true(result);
assert_int_equal(h, TPM2_RH_OWNER);
result = tpm2_util_handle_from_optarg("p", &h, TPM2_HANDLE_FLAGS_P);
assert_true(result);
assert_int_equal(h, TPM2_RH_PLATFORM);
result = tpm2_util_handle_from_optarg("e", &h, TPM2_HANDLE_FLAGS_E);
assert_true(result);
assert_int_equal(h, TPM2_RH_ENDORSEMENT);
result = tpm2_util_handle_from_optarg("n", &h, TPM2_HANDLE_FLAGS_N);
assert_true(result);
assert_int_equal(h, TPM2_RH_NULL);
}
static void test_tpm2_util_handle_from_optarg_nv_valid_range(void **state) {
UNUSED(state);
TPMI_RH_PROVISION h;
/*
* NV index specified as NV:offset
*/
bool result = tpm2_util_handle_from_optarg("1", &h, TPM2_HANDLE_FLAGS_NV);
assert_true(result);
assert_int_equal(h, 0x01000001);
/*
* NV index specified as full raw handle
*/
result = tpm2_util_handle_from_optarg("0x01000002", &h,
TPM2_HANDLE_FLAGS_NV);
assert_true(result);
assert_int_equal(h, 0x01000002);
}
static void test_tpm2_util_handle_from_optarg_nv_invalid_offset(void **state) {
UNUSED(state);
TPMI_RH_PROVISION h;
/*
* No offset specified
*/
bool result = tpm2_util_handle_from_optarg("", &h, TPM2_HANDLE_FLAGS_NV);
assert_false(result);
/*
* Offset is non hex string
*/
result = tpm2_util_handle_from_optarg("random", &h, TPM2_HANDLE_FLAGS_NV);
assert_false(result);
/*
* Offset is larger than TPM2_HR_HANDLE_MASK
*/
result = tpm2_util_handle_from_optarg("0x12345678", &h,
TPM2_HANDLE_FLAGS_NV);
assert_false(result);
/*
* Wrongly specify NV index as raw handle and disable NV in flags
*/
result = tpm2_util_handle_from_optarg("0x01000001", &h,
TPM2_HANDLE_FLAGS_O);
assert_false(result);
}
/* link required symbol, but tpm2_tool.c declares it AND main, which
* we have a main below for cmocka tests.
*/
bool output_enabled = true;
int main(int argc, char* argv[]) {
(void) argc;
(void) argv;
const struct CMUnitTest tests[] = {
cmocka_unit_test(test_tpm2_util_handle_from_optarg_NULL),
cmocka_unit_test(test_tpm2_util_handle_from_optarg_empty),
cmocka_unit_test(test_tpm2_util_handle_from_optarg_invalid_id),
cmocka_unit_test(test_tpm2_util_handle_from_optarg_invalid_str),
cmocka_unit_test(test_tpm2_util_handle_from_optarg_valid_ids),
cmocka_unit_test(test_tpm2_util_handle_from_optarg_valid_ids_disabled),
cmocka_unit_test(test_tpm2_util_handle_from_optarg_valid_ids_enabled),
cmocka_unit_test(test_tpm2_util_handle_from_optarg_nv_valid_range),
cmocka_unit_test(test_tpm2_util_handle_from_optarg_nv_invalid_offset),
};
return cmocka_run_group_tests(tests, NULL, NULL);
}
|
miw-upm/apaw-practice | src/main/java/es/upm/miw/apaw_practice/domain/services/zoo/CaretakerService.java | <filename>src/main/java/es/upm/miw/apaw_practice/domain/services/zoo/CaretakerService.java<gh_stars>1-10
package es.upm.miw.apaw_practice.domain.services.zoo;
import es.upm.miw.apaw_practice.domain.models.zoo.Caretaker;
import es.upm.miw.apaw_practice.domain.persistence_ports.zoo.CaretakerPersistence;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
@Service
public class CaretakerService {
private final CaretakerPersistence caretakerPersistence;
@Autowired
public CaretakerService(CaretakerPersistence caretakerPersistence) {
this.caretakerPersistence = caretakerPersistence;
}
public Caretaker findByDni(String dni) {
return this.caretakerPersistence.findByDni(dni);
}
}
|
kryo4096/NPDECODES | homeworks/SimpleLinearFiniteElements/templates/test/simplelinearfiniteelements_test.cc | <reponame>kryo4096/NPDECODES<gh_stars>10-100
#include "../simplelinearfiniteelements.h"
#include <gtest/gtest.h>
#include <Eigen/Core>
#include <Eigen/SparseLU>
#include <cmath>
#include <tuple>
#include "../tria_mesh_2D.h"
namespace SimpleLinearFiniteElements::test {
constexpr double pi = 3.1415926535897;
constexpr char meshfile1[] = CURRENT_SOURCE_DIR "/../../meshes/Square1.txt";
constexpr char meshfile3[] = CURRENT_SOURCE_DIR "/../../meshes/Square3.txt";
/**
* @brief test ElmentMatrix_Mass_LFE implementation
*/
TEST(SimpleLinearFiniteElements, ElementMatrix_Mass_LFE) {
// check the produced matrix for a fairly standard triangle
Eigen::Matrix<double, 2, 3> test;
test << 0, 1, 0, 0, 0, 1;
Eigen::Matrix3d M;
M = ElementMatrix_Mass_LFE(test);
Eigen::Matrix3d ref_M;
ref_M << 0.0833333, 0.0416667, 0.0416667, 0.0416667, 0.0833333, 0.0416667,
0.0416667, 0.0416667, 0.0833333;
double tol = 1e-8;
ASSERT_NEAR(ref_M.norm(), M.norm(), tol);
}
/**
* @brief test L2Error implementation
*/
TEST(SimpleLinearFiniteElements, L2Error) {
// read coarsest mesh
TriaMesh2D square_mesh(meshfile1);
// exact solution
auto uExact = [](const Eigen::Vector2d &x) {
return std::cos(2 * pi * x(0)) * std::cos(2 * pi * x(1));
};
// source function
auto f = [](const Eigen::Vector2d &x) {
return (8.0 * pi * pi + 1) * std::cos(2 * pi * x(0)) *
std::cos(2 * pi * x(1));
};
// assemble galerkin matrix and load vector
Eigen::SparseMatrix<double> A =
GalerkinAssembly(square_mesh, ElementMatrix_LaplMass_LFE);
Eigen::VectorXd L = assemLoad_LFE(square_mesh, f);
// solve linear system of equations
Eigen::SparseLU<Eigen::SparseMatrix<double>, Eigen::COLAMDOrdering<int>>
solver;
solver.analyzePattern(A);
solver.factorize(A);
Eigen::VectorXd U = solver.solve(L);
// compare to expected error
double error = L2Error(square_mesh, U, uExact);
ASSERT_NEAR(error, 0.232547, 0.1);
}
/**
* @brief test H1Serror implementation
*/
TEST(SimpleLinearFiniteElements, H1Serror) {
// read coarsest mesh
TriaMesh2D square_mesh(meshfile3);
// exact gradient
auto gradUExact = [](const Eigen::Vector2d &x) {
Eigen::Vector2d gradient;
gradient << -2 * pi * std::sin(2 * pi * x(0)) * std::cos(2 * pi * x(1)),
-2 * pi * std::cos(2 * pi * x(0)) * std::sin(2 * pi * x(1));
return gradient;
};
// source function
auto f = [](const Eigen::Vector2d &x) {
return (8.0 * pi * pi + 1) * std::cos(2 * pi * x(0)) *
std::cos(2 * pi * x(1));
};
// compute galerkin matrix and load vector
Eigen::SparseMatrix<double> A =
GalerkinAssembly(square_mesh, ElementMatrix_LaplMass_LFE);
Eigen::VectorXd L = assemLoad_LFE(square_mesh, f);
// solve linear system of equations
Eigen::SparseLU<Eigen::SparseMatrix<double>, Eigen::COLAMDOrdering<int>>
solver;
solver.analyzePattern(A);
solver.factorize(A);
Eigen::VectorXd U = solver.solve(L);
// compare to expected error
// high tolerance as the procedure is affected by many rounding errors
double error = H1Serror(square_mesh, U, gradUExact);
ASSERT_NEAR(error, 1.32457, 0.1);
}
/**
* @brief test solve implementation
*/
TEST(SimpleLinearFiniteElements, Solve) {
TriaMesh2D square_mesh(meshfile1);
std::tuple<Eigen::VectorXd, double, double> solution = Solve(square_mesh);
Eigen::VectorXd solution_ref(25);
solution_ref << 0.625091201454706, 1.82631947928602, 0.625091201454444,
1.82631947928602, -1.25162595870465, -1.25162595870465, 1.22710538961478,
-1.25162595870452, -1.25162595870452, -0.205607688673624,
-0.205607688673623, 0.0791862852828167, 0.180931017968087,
0.18093101796835, -0.205607688673888, -0.205607688673888,
0.180931017968087, 0.079186285282817, 0.180931017968351,
-0.104406878994863, -0.0124922491782952, -0.012492249178295,
-0.0124922491780322, -0.0124922491780322, -0.104406878994864;
double L2Error_ref = 0.232547;
double H1Serror_ref = 4.95432;
ASSERT_EQ(std::get<0>(solution).size(), solution_ref.size());
double tol = 1.0e-5;
ASSERT_NEAR(0.0,
(std::get<0>(solution) - solution_ref).lpNorm<Eigen::Infinity>(),
tol);
ASSERT_NEAR(0.0, std::get<1>(solution) - L2Error_ref, tol);
ASSERT_NEAR(0.0, std::get<2>(solution) - H1Serror_ref, tol);
}
} // namespace SimpleLinearFiniteElements::test
|
Sinsstranger/ReactJS_Learning | lesson_3/src/layouts/Posts.js | <reponame>Sinsstranger/ReactJS_Learning<filename>lesson_3/src/layouts/Posts.js
import React from 'react';
import Post1 from "../components/Post1";
import Post2 from "../components/Post2";
export default class Posts extends React.Component{
render(){
return <div className={this.props.postsClass}>
<Post1 class={this.props.children}/>
<Post2 class={this.props.children}/>
</div>;
}
} |
chinanala/echo | echo-proxy-lib/src/main/java/com/virjar/echo/nat/log/ILogger.java | package com.virjar.echo.nat.log;
public interface ILogger {
void info(String msg);
void info(String msg, Throwable throwable);
void warn(String msg);
void warn(String msg, Throwable throwable);
void error(String msg);
void error(String msg, Throwable throwable);
void debug(String msg);
void debug(String msg, Throwable throwable);
}
|
seakers/ExtUtils | dakota-6.3.0.Windows.x86/include/interfaces/NPSOL.h | <reponame>seakers/ExtUtils
/* _________________________________________________________________________
*
* Acro: A Common Repository for Optimizers
* Copyright (c) 2008 Sandia Corporation.
* This software is distributed under the BSD License.
* Under the terms of Contract DE-AC04-94AL85000 with Sandia Corporation,
* the U.S. Government retains certain rights in this software.
* For more information, see the README.txt file in the top Acro directory.
* _________________________________________________________________________
*/
#include <acro_config.h>
#ifdef ACRO_USING_NPSOL
#ifndef interfaces_NPSOL_h
#define interfaces_NPSOL_h
#include <colin/Solver.h>
#include <colin/ColinUtilib.h>
namespace interfaces {
///
/// An interface to the NPSOL optimization
///
class NPSOL : public colin::Solver<utilib::BasicArray<double> >
{
public:
/// Constructor
NPSOL();
/// Destructor
virtual ~NPSOL();
/// Setup NPSOL for optimization. Call immediately before \ref{minimize}
void reset();
/// Perform minimization
void minimize();
/// Write out NPSOL parameter information
void write(std::ostream& os) const;
protected:
///
void initialize_best_point()
{
if ((best().point.size() > 0) &&
(best().point.size() != problem.num_real_vars()))
EXCEPTION_MNGR(std::runtime_error, "initialize_best_point - user-provided best point has length " << best().point.size() << " but the problem size is " << problem.num_real_vars() << std::endl);
best().point.resize(problem.num_real_vars());
}
/// Constraint function
static int confun(int *MODE, int *NCNLN, int *N, int *NROWJ, int *NEEDC,
double *X, double *C, double *CJAC, int *NSTATE);
/// Objective function
static int objfun(int *MODE, int *N, double *X, double *F, double *G, int *NSTATE);
/// For use by the above functions
static NPSOL *THIS;
///
std::vector<int> asv;
///
utilib::BasicArray<double> point;
///
utilib::BasicArray<colin::real> gradient;
///
colin::AppResponse_Utilib response;
//
#if 0
///
double ftol;
///
double fdss;
///
int numConstraints;
///
int dotInfo;
///
int dotFDSinfo;
///
int dotMethod;
///
int printControl;
///
int optimizationType;
///
std::vector<double> realWorkSpace;
///
vector<int> intWorkSpace;
///
vector<double> constraintArray;
///
vector<double> realCntlParmArray;
///
vector<int> intCntlParmArray;
///
vector<double> X;
///
vector<double> bestX;
///
vector<double> lowerBoundsArray;
///
vector<double> upperBoundsArray;
#endif
};
} // namespace interfaces
#endif
#endif
|
radoslav-tomov/k8s-config-connector | pkg/apis/pubsub/v1beta1/zz_generated.deepcopy.go | //go:build !ignore_autogenerated
// +build !ignore_autogenerated
// Copyright 2020 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// *** DISCLAIMER ***
// Config Connector's go-client for CRDs is currently in ALPHA, which means
// that future versions of the go-client may include breaking changes.
// Please try it out and give us feedback!
// Code generated by main. DO NOT EDIT.
package v1beta1
import (
v1alpha1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/apis/k8s/v1alpha1"
runtime "k8s.io/apimachinery/pkg/runtime"
)
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *PubSubSubscription) DeepCopyInto(out *PubSubSubscription) {
*out = *in
out.TypeMeta = in.TypeMeta
in.ObjectMeta.DeepCopyInto(&out.ObjectMeta)
in.Spec.DeepCopyInto(&out.Spec)
in.Status.DeepCopyInto(&out.Status)
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PubSubSubscription.
func (in *PubSubSubscription) DeepCopy() *PubSubSubscription {
if in == nil {
return nil
}
out := new(PubSubSubscription)
in.DeepCopyInto(out)
return out
}
// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (in *PubSubSubscription) DeepCopyObject() runtime.Object {
if c := in.DeepCopy(); c != nil {
return c
}
return nil
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *PubSubSubscriptionList) DeepCopyInto(out *PubSubSubscriptionList) {
*out = *in
out.TypeMeta = in.TypeMeta
in.ListMeta.DeepCopyInto(&out.ListMeta)
if in.Items != nil {
in, out := &in.Items, &out.Items
*out = make([]PubSubSubscription, len(*in))
for i := range *in {
(*in)[i].DeepCopyInto(&(*out)[i])
}
}
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PubSubSubscriptionList.
func (in *PubSubSubscriptionList) DeepCopy() *PubSubSubscriptionList {
if in == nil {
return nil
}
out := new(PubSubSubscriptionList)
in.DeepCopyInto(out)
return out
}
// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (in *PubSubSubscriptionList) DeepCopyObject() runtime.Object {
if c := in.DeepCopy(); c != nil {
return c
}
return nil
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *PubSubSubscriptionSpec) DeepCopyInto(out *PubSubSubscriptionSpec) {
*out = *in
if in.AckDeadlineSeconds != nil {
in, out := &in.AckDeadlineSeconds, &out.AckDeadlineSeconds
*out = new(int)
**out = **in
}
if in.DeadLetterPolicy != nil {
in, out := &in.DeadLetterPolicy, &out.DeadLetterPolicy
*out = new(SubscriptionDeadLetterPolicy)
(*in).DeepCopyInto(*out)
}
if in.EnableMessageOrdering != nil {
in, out := &in.EnableMessageOrdering, &out.EnableMessageOrdering
*out = new(bool)
**out = **in
}
if in.ExpirationPolicy != nil {
in, out := &in.ExpirationPolicy, &out.ExpirationPolicy
*out = new(SubscriptionExpirationPolicy)
**out = **in
}
if in.Filter != nil {
in, out := &in.Filter, &out.Filter
*out = new(string)
**out = **in
}
if in.MessageRetentionDuration != nil {
in, out := &in.MessageRetentionDuration, &out.MessageRetentionDuration
*out = new(string)
**out = **in
}
if in.PushConfig != nil {
in, out := &in.PushConfig, &out.PushConfig
*out = new(SubscriptionPushConfig)
(*in).DeepCopyInto(*out)
}
if in.ResourceID != nil {
in, out := &in.ResourceID, &out.ResourceID
*out = new(string)
**out = **in
}
if in.RetainAckedMessages != nil {
in, out := &in.RetainAckedMessages, &out.RetainAckedMessages
*out = new(bool)
**out = **in
}
if in.RetryPolicy != nil {
in, out := &in.RetryPolicy, &out.RetryPolicy
*out = new(SubscriptionRetryPolicy)
(*in).DeepCopyInto(*out)
}
out.TopicRef = in.TopicRef
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PubSubSubscriptionSpec.
func (in *PubSubSubscriptionSpec) DeepCopy() *PubSubSubscriptionSpec {
if in == nil {
return nil
}
out := new(PubSubSubscriptionSpec)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *PubSubSubscriptionStatus) DeepCopyInto(out *PubSubSubscriptionStatus) {
*out = *in
if in.Conditions != nil {
in, out := &in.Conditions, &out.Conditions
*out = make([]v1alpha1.Condition, len(*in))
copy(*out, *in)
}
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PubSubSubscriptionStatus.
func (in *PubSubSubscriptionStatus) DeepCopy() *PubSubSubscriptionStatus {
if in == nil {
return nil
}
out := new(PubSubSubscriptionStatus)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *PubSubTopic) DeepCopyInto(out *PubSubTopic) {
*out = *in
out.TypeMeta = in.TypeMeta
in.ObjectMeta.DeepCopyInto(&out.ObjectMeta)
in.Spec.DeepCopyInto(&out.Spec)
in.Status.DeepCopyInto(&out.Status)
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PubSubTopic.
func (in *PubSubTopic) DeepCopy() *PubSubTopic {
if in == nil {
return nil
}
out := new(PubSubTopic)
in.DeepCopyInto(out)
return out
}
// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (in *PubSubTopic) DeepCopyObject() runtime.Object {
if c := in.DeepCopy(); c != nil {
return c
}
return nil
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *PubSubTopicList) DeepCopyInto(out *PubSubTopicList) {
*out = *in
out.TypeMeta = in.TypeMeta
in.ListMeta.DeepCopyInto(&out.ListMeta)
if in.Items != nil {
in, out := &in.Items, &out.Items
*out = make([]PubSubTopic, len(*in))
for i := range *in {
(*in)[i].DeepCopyInto(&(*out)[i])
}
}
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PubSubTopicList.
func (in *PubSubTopicList) DeepCopy() *PubSubTopicList {
if in == nil {
return nil
}
out := new(PubSubTopicList)
in.DeepCopyInto(out)
return out
}
// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (in *PubSubTopicList) DeepCopyObject() runtime.Object {
if c := in.DeepCopy(); c != nil {
return c
}
return nil
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *PubSubTopicSpec) DeepCopyInto(out *PubSubTopicSpec) {
*out = *in
if in.KmsKeyRef != nil {
in, out := &in.KmsKeyRef, &out.KmsKeyRef
*out = new(v1alpha1.ResourceRef)
**out = **in
}
if in.MessageRetentionDuration != nil {
in, out := &in.MessageRetentionDuration, &out.MessageRetentionDuration
*out = new(string)
**out = **in
}
if in.MessageStoragePolicy != nil {
in, out := &in.MessageStoragePolicy, &out.MessageStoragePolicy
*out = new(TopicMessageStoragePolicy)
(*in).DeepCopyInto(*out)
}
if in.ResourceID != nil {
in, out := &in.ResourceID, &out.ResourceID
*out = new(string)
**out = **in
}
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PubSubTopicSpec.
func (in *PubSubTopicSpec) DeepCopy() *PubSubTopicSpec {
if in == nil {
return nil
}
out := new(PubSubTopicSpec)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *PubSubTopicStatus) DeepCopyInto(out *PubSubTopicStatus) {
*out = *in
if in.Conditions != nil {
in, out := &in.Conditions, &out.Conditions
*out = make([]v1alpha1.Condition, len(*in))
copy(*out, *in)
}
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PubSubTopicStatus.
func (in *PubSubTopicStatus) DeepCopy() *PubSubTopicStatus {
if in == nil {
return nil
}
out := new(PubSubTopicStatus)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *SubscriptionDeadLetterPolicy) DeepCopyInto(out *SubscriptionDeadLetterPolicy) {
*out = *in
if in.DeadLetterTopicRef != nil {
in, out := &in.DeadLetterTopicRef, &out.DeadLetterTopicRef
*out = new(v1alpha1.ResourceRef)
**out = **in
}
if in.MaxDeliveryAttempts != nil {
in, out := &in.MaxDeliveryAttempts, &out.MaxDeliveryAttempts
*out = new(int)
**out = **in
}
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SubscriptionDeadLetterPolicy.
func (in *SubscriptionDeadLetterPolicy) DeepCopy() *SubscriptionDeadLetterPolicy {
if in == nil {
return nil
}
out := new(SubscriptionDeadLetterPolicy)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *SubscriptionExpirationPolicy) DeepCopyInto(out *SubscriptionExpirationPolicy) {
*out = *in
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SubscriptionExpirationPolicy.
func (in *SubscriptionExpirationPolicy) DeepCopy() *SubscriptionExpirationPolicy {
if in == nil {
return nil
}
out := new(SubscriptionExpirationPolicy)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *SubscriptionOidcToken) DeepCopyInto(out *SubscriptionOidcToken) {
*out = *in
if in.Audience != nil {
in, out := &in.Audience, &out.Audience
*out = new(string)
**out = **in
}
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SubscriptionOidcToken.
func (in *SubscriptionOidcToken) DeepCopy() *SubscriptionOidcToken {
if in == nil {
return nil
}
out := new(SubscriptionOidcToken)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *SubscriptionPushConfig) DeepCopyInto(out *SubscriptionPushConfig) {
*out = *in
if in.Attributes != nil {
in, out := &in.Attributes, &out.Attributes
*out = make(map[string]string, len(*in))
for key, val := range *in {
(*out)[key] = val
}
}
if in.OidcToken != nil {
in, out := &in.OidcToken, &out.OidcToken
*out = new(SubscriptionOidcToken)
(*in).DeepCopyInto(*out)
}
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SubscriptionPushConfig.
func (in *SubscriptionPushConfig) DeepCopy() *SubscriptionPushConfig {
if in == nil {
return nil
}
out := new(SubscriptionPushConfig)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *SubscriptionRetryPolicy) DeepCopyInto(out *SubscriptionRetryPolicy) {
*out = *in
if in.MaximumBackoff != nil {
in, out := &in.MaximumBackoff, &out.MaximumBackoff
*out = new(string)
**out = **in
}
if in.MinimumBackoff != nil {
in, out := &in.MinimumBackoff, &out.MinimumBackoff
*out = new(string)
**out = **in
}
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SubscriptionRetryPolicy.
func (in *SubscriptionRetryPolicy) DeepCopy() *SubscriptionRetryPolicy {
if in == nil {
return nil
}
out := new(SubscriptionRetryPolicy)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *TopicMessageStoragePolicy) DeepCopyInto(out *TopicMessageStoragePolicy) {
*out = *in
if in.AllowedPersistenceRegions != nil {
in, out := &in.AllowedPersistenceRegions, &out.AllowedPersistenceRegions
*out = make([]string, len(*in))
copy(*out, *in)
}
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TopicMessageStoragePolicy.
func (in *TopicMessageStoragePolicy) DeepCopy() *TopicMessageStoragePolicy {
if in == nil {
return nil
}
out := new(TopicMessageStoragePolicy)
in.DeepCopyInto(out)
return out
}
|
goodmind/FlowDefinitelyTyped | flow-types/types/iferr_vx.x.x/flow_v0.25.x-/iferr.js | declare module "iferr" {
declare type nodeCallback<T> = (err: Error | void, ...a: T[]) => any;
declare function iferr<T>(
fail: (err: Error) => void,
succ: (...result: T[]) => void
): nodeCallback<T>;
declare var npm$namespace$iferr: {
iferr: typeof iferr$iferr,
tiferr: typeof iferr$tiferr,
throwerr: typeof iferr$throwerr,
printerr: typeof iferr$printerr
};
declare function iferr$iferr<T>(
fail: (err: Error) => void,
succ: (...result: T[]) => void
): nodeCallback<T>;
declare function iferr$tiferr<T>(
fail: (err: Error) => void,
succ: (...result: T[]) => void
): nodeCallback<T>;
declare function iferr$throwerr<T>(
succ: (...result: T[]) => void
): nodeCallback<T>;
declare function iferr$printerr(): nodeCallback<any>;
declare export default typeof iferr;
}
|
tizenorg/profile.ivi.smartdevicelink | SDL_Android/SmartDeviceLinkProxyAndroid/src/com/smartdevicelink/proxy/rpc/PerformInteractionResponse.java | //
// Copyright (c) 2013 Ford Motor Company
//
package com.smartdevicelink.proxy.rpc;
import java.util.Hashtable;
import com.smartdevicelink.proxy.RPCResponse;
import com.smartdevicelink.proxy.constants.Names;
import com.smartdevicelink.proxy.rpc.enums.TriggerSource;
import com.smartdevicelink.util.DebugTool;
public class PerformInteractionResponse extends RPCResponse {
public PerformInteractionResponse() {
super("PerformInteraction");
}
public PerformInteractionResponse(Hashtable hash) {
super(hash);
}
public Integer getChoiceID() {
return (Integer) parameters.get( Names.choiceID );
}
public void setChoiceID( Integer choiceID ) {
if (choiceID != null) {
parameters.put(Names.choiceID, choiceID );
}
}
public TriggerSource getTriggerSource() {
Object obj = parameters.get(Names.triggerSource);
if (obj instanceof TriggerSource) {
return (TriggerSource) obj;
} else if (obj instanceof String) {
TriggerSource theCode = null;
try {
theCode = TriggerSource.valueForString((String) obj);
} catch (Exception e) {
DebugTool.logError("Failed to parse " + getClass().getSimpleName() + "." + Names.triggerSource, e);
}
return theCode;
}
return null;
}
public void setTriggerSource( TriggerSource triggerSource ) {
if (triggerSource != null) {
parameters.put(Names.triggerSource, triggerSource );
}
}
}
|
androidworx/andworx | android-core/plugins/org.eclipse.andworx.build/src/org/eclipse/andworx/build/task/RenderscriptCompileTask.java | /*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.eclipse.andworx.build.task;
import java.io.File;
import java.io.IOException;
import java.util.Collection;
import java.util.Set;
import java.util.concurrent.Future;
import org.eclipse.andworx.api.attributes.AndroidArtifacts.ArtifactType;
import org.eclipse.andworx.context.VariantContext;
import org.eclipse.andworx.core.AndworxVariantConfiguration;
import org.eclipse.andworx.helper.BuildHelper;
import org.eclipse.andworx.log.SdkLogger;
import org.eclipse.andworx.task.StandardBuildTask;
import org.eclipse.andworx.task.TaskFactory;
import com.android.annotations.NonNull;
import com.android.builder.core.AndroidBuilder;
import com.android.builder.internal.compiler.DirectoryWalker;
import com.android.ide.common.process.LoggedProcessOutputHandler;
import com.android.ide.common.process.ProcessException;
import com.android.utils.ILogger;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.google.common.util.concurrent.SettableFuture;
/**
* Compile Renderscript files
*/
public class RenderscriptCompileTask extends StandardBuildTask {
public static final String TASK_NAME = "renderscript compile";
private static ILogger logger = SdkLogger.getLogger(RenderscriptCompileTask.class.getName());
private final VariantContext variantScope;
private final BuildHelper buildHelper;
private final AndroidBuilder androidBuilder;
public RenderscriptCompileTask(VariantContext variantScope, BuildHelper buildHelper, AndroidBuilder androidBuilder, TaskFactory taskFactory) {
super(taskFactory);
this.variantScope = variantScope;
this.buildHelper = buildHelper;
this.androidBuilder = androidBuilder;
}
@Override
public String getTaskName() {
return TASK_NAME;
}
@Override
public Future<Void> doFullTaskAction() {
final SettableFuture<Void> actualResult = SettableFuture.create();
try {
AndworxVariantConfiguration variantConfig = variantScope.getVariantConfiguration();
compileAllRenderscriptFiles(variantConfig);
actualResult.set(null);
} catch (Exception e) {
actualResult.setException(e);
}
return actualResult;
}
private void compileAllRenderscriptFiles(AndworxVariantConfiguration variantConfig) throws IOException, InterruptedException, ProcessException {
Collection<File> sourceDirectories = variantConfig.getRenderscriptSourceList();
File sourceDestDir = variantScope.getRenderscriptSourceOutputDir();
buildHelper.prepareDir(sourceDestDir);
File resDestDir = variantScope.getRenderscriptResOutputDir();
buildHelper.prepareDir(resDestDir);
File objDestDir = variantScope.getRenderscriptObjOutputDir();
buildHelper.prepareDir(objDestDir);
File libDestDir = variantScope.getRenderscriptLibOutputDir();
buildHelper.prepareDir(libDestDir);
androidBuilder.compileAllRenderscriptFiles(
sourceDirectories,
getImportFolders(sourceDirectories),
sourceDestDir,
resDestDir,
objDestDir,
libDestDir,
variantConfig.getRenderscriptTarget(),
variantConfig.getBuildType().isRenderscriptDebuggable(),
variantConfig.getBuildType().getRenderscriptOptimLevel(),
variantConfig.getRenderscriptNdkModeEnabled(),
variantConfig.getRenderscriptSupportModeEnabled(),
// TODO - NDK config
null, //getNdkConfig() == null ? null : getNdkConfig().getAbiFilters(),
new LoggedProcessOutputHandler(logger));
}
// Returns the import folders. If the .rsh files are not directly under the import folders,
// we need to get the leaf folders, as this is what llvm-rs-cc expects.
@NonNull
private Collection<File> getImportFolders(Collection<File> sourceDirectories) throws IOException {
Set<File> results = Sets.newHashSet();
Collection<File> dirs = Lists.newArrayList();
Collection<File> importDirs = variantScope.getArtifactFileCollection(ArtifactType.RENDERSCRIPT);
dirs.addAll(importDirs);
dirs.addAll(sourceDirectories);
for (File dir : dirs) {
// TODO(samwho): should "rsh" be a constant somewhere?
DirectoryWalker.builder()
.root(dir.toPath())
.extensions("rsh")
.action((start, path) -> results.add(path.getParent().toFile()))
.build()
.walk();
}
return results;
}
}
|
ArrogantWombatics/openbsd-src | sys/miscfs/fuse/fuse_vfsops.c | <reponame>ArrogantWombatics/openbsd-src<gh_stars>1-10
/* $OpenBSD: fuse_vfsops.c,v 1.23 2016/06/19 11:54:33 natano Exp $ */
/*
* Copyright (c) 2012-2013 <NAME> <<EMAIL>>
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
#include <sys/param.h>
#include <sys/systm.h>
#include <sys/file.h>
#include <sys/filedesc.h>
#include <sys/malloc.h>
#include <sys/mount.h>
#include <sys/pool.h>
#include <sys/proc.h>
#include <sys/specdev.h>
#include <sys/statvfs.h>
#include <sys/sysctl.h>
#include <sys/vnode.h>
#include <sys/fusebuf.h>
#include "fusefs_node.h"
#include "fusefs.h"
int fusefs_mount(struct mount *, const char *, void *, struct nameidata *,
struct proc *);
int fusefs_start(struct mount *, int, struct proc *);
int fusefs_unmount(struct mount *, int, struct proc *);
int fusefs_root(struct mount *, struct vnode **);
int fusefs_quotactl(struct mount *, int, uid_t, caddr_t, struct proc *);
int fusefs_statfs(struct mount *, struct statfs *, struct proc *);
int fusefs_sync(struct mount *, int, struct ucred *, struct proc *);
int fusefs_vget(struct mount *, ino_t, struct vnode **);
int fusefs_fhtovp(struct mount *, struct fid *, struct vnode **);
int fusefs_vptofh(struct vnode *, struct fid *);
int fusefs_init(struct vfsconf *);
int fusefs_sysctl(int *, u_int, void *, size_t *, void *, size_t,
struct proc *);
int fusefs_checkexp(struct mount *, struct mbuf *, int *,
struct ucred **);
const struct vfsops fusefs_vfsops = {
fusefs_mount,
fusefs_start,
fusefs_unmount,
fusefs_root,
fusefs_quotactl,
fusefs_statfs,
fusefs_sync,
fusefs_vget,
fusefs_fhtovp,
fusefs_vptofh,
fusefs_init,
fusefs_sysctl,
fusefs_checkexp
};
struct pool fusefs_fbuf_pool;
int
fusefs_mount(struct mount *mp, const char *path, void *data,
struct nameidata *ndp, struct proc *p)
{
struct fusefs_mnt *fmp;
struct fusebuf *fbuf;
struct fusefs_args args;
struct vnode *vp;
struct file *fp;
int error;
if (mp->mnt_flag & MNT_UPDATE)
return (EOPNOTSUPP);
error = copyin(data, &args, sizeof(struct fusefs_args));
if (error)
return (error);
if ((fp = fd_getfile(p->p_fd, args.fd)) == NULL)
return (EBADF);
if (fp->f_type != DTYPE_VNODE)
return (EINVAL);
vp = fp->f_data;
if (vp->v_type != VCHR)
return (EBADF);
fmp = malloc(sizeof(*fmp), M_FUSEFS, M_WAITOK | M_ZERO);
fmp->mp = mp;
fmp->sess_init = 0;
fmp->dev = vp->v_rdev;
if (args.max_read > 0)
fmp->max_read = MIN(args.max_read, FUSEBUFMAXSIZE);
else
fmp->max_read = FUSEBUFMAXSIZE;
mp->mnt_data = fmp;
mp->mnt_flag |= MNT_LOCAL;
vfs_getnewfsid(mp);
bzero(mp->mnt_stat.f_mntonname, MNAMELEN);
strlcpy(mp->mnt_stat.f_mntonname, path, MNAMELEN);
bzero(mp->mnt_stat.f_mntfromname, MNAMELEN);
strlcpy(mp->mnt_stat.f_mntfromname, "fusefs", MNAMELEN);
bzero(mp->mnt_stat.f_mntfromspec, MNAMELEN);
strlcpy(mp->mnt_stat.f_mntfromspec, "fusefs", MNAMELEN);
fuse_device_set_fmp(fmp, 1);
fbuf = fb_setup(0, 0, FBT_INIT, p);
/* cannot tsleep on mount */
fuse_device_queue_fbuf(fmp->dev, fbuf);
return (0);
}
int
fusefs_start(struct mount *mp, int flags, struct proc *p)
{
return (0);
}
int
fusefs_unmount(struct mount *mp, int mntflags, struct proc *p)
{
struct fusefs_mnt *fmp;
struct fusebuf *fbuf;
int flags = 0;
int error;
fmp = VFSTOFUSEFS(mp);
if (mntflags & MNT_FORCE)
flags |= FORCECLOSE;
if ((error = vflush(mp, NULLVP, flags)))
return (error);
if (fmp->sess_init) {
fmp->sess_init = 0;
fbuf = fb_setup(0, 0, FBT_DESTROY, p);
error = fb_queue(fmp->dev, fbuf);
if (error)
printf("fusefs: error %d on destroy\n", error);
fb_delete(fbuf);
}
fuse_device_cleanup(fmp->dev, NULL);
fuse_device_set_fmp(fmp, 0);
free(fmp, M_FUSEFS, 0);
mp->mnt_data = NULL;
return (0);
}
int
fusefs_root(struct mount *mp, struct vnode **vpp)
{
struct vnode *nvp;
struct fusefs_node *ip;
int error;
if ((error = VFS_VGET(mp, (ino_t)FUSE_ROOTINO, &nvp)) != 0)
return (error);
ip = VTOI(nvp);
nvp->v_type = VDIR;
ip->vtype = VDIR;
*vpp = nvp;
return (0);
}
int
fusefs_quotactl(struct mount *mp, int cmds, uid_t uid, caddr_t arg,
struct proc *p)
{
return (EOPNOTSUPP);
}
int
fusefs_statfs(struct mount *mp, struct statfs *sbp, struct proc *p)
{
struct fusefs_mnt *fmp;
struct fusebuf *fbuf;
int error;
fmp = VFSTOFUSEFS(mp);
copy_statfs_info(sbp, mp);
if (fmp->sess_init) {
fbuf = fb_setup(0, FUSE_ROOT_ID, FBT_STATFS, p);
error = fb_queue(fmp->dev, fbuf);
if (error) {
fb_delete(fbuf);
return (error);
}
sbp->f_bavail = fbuf->fb_stat.f_bavail;
sbp->f_bfree = fbuf->fb_stat.f_bfree;
sbp->f_blocks = fbuf->fb_stat.f_blocks;
sbp->f_files = fbuf->fb_stat.f_files;
sbp->f_ffree = fbuf->fb_stat.f_ffree;
sbp->f_favail = fbuf->fb_stat.f_favail;
sbp->f_bsize = fbuf->fb_stat.f_frsize;
sbp->f_iosize = fbuf->fb_stat.f_bsize;
sbp->f_namemax = fbuf->fb_stat.f_namemax;
fb_delete(fbuf);
} else {
sbp->f_bavail = 0;
sbp->f_bfree = 0;
sbp->f_blocks = 0;
sbp->f_ffree = 0;
sbp->f_favail = 0;
sbp->f_files = 0;
sbp->f_bsize = 0;
sbp->f_iosize = 0;
sbp->f_namemax = 0;
}
return (0);
}
int
fusefs_sync(struct mount *mp, int waitfor, struct ucred *cred,
struct proc *p)
{
return (0);
}
int
fusefs_vget(struct mount *mp, ino_t ino, struct vnode **vpp)
{
struct fusefs_mnt *fmp;
struct fusefs_node *ip;
struct vnode *nvp;
int i;
int error;
retry:
fmp = VFSTOFUSEFS(mp);
/*
* check if vnode is in hash.
*/
if ((*vpp = ufs_ihashget(fmp->dev, ino)) != NULLVP)
return (0);
/*
* if not create it
*/
if ((error = getnewvnode(VT_FUSEFS, mp, &fusefs_vops, &nvp)) != 0) {
printf("fusefs: getnewvnode error\n");
*vpp = NULLVP;
return (error);
}
ip = malloc(sizeof(*ip), M_FUSEFS, M_WAITOK | M_ZERO);
rrw_init(&ip->ufs_ino.i_lock, "fuseinode");
nvp->v_data = ip;
ip->ufs_ino.i_vnode = nvp;
ip->ufs_ino.i_dev = fmp->dev;
ip->ufs_ino.i_number = ino;
ip->parent = 0;
for (i = 0; i < FUFH_MAXTYPE; i++)
ip->fufh[i].fh_type = FUFH_INVALID;
error = ufs_ihashins(&ip->ufs_ino);
if (error) {
vrele(nvp);
if (error == EEXIST)
goto retry;
return (error);
}
ip->ufs_ino.i_ump = (struct ufsmount *)fmp;
if (ino == FUSE_ROOTINO)
nvp->v_flag |= VROOT;
*vpp = nvp;
return (0);
}
int
fusefs_fhtovp(struct mount *mp, struct fid *fhp, struct vnode **vpp)
{
struct ufid *ufhp;
ufhp = (struct ufid *)fhp;
if (ufhp->ufid_len != sizeof(struct ufid) ||
ufhp->ufid_ino < FUSE_ROOTINO)
return (ESTALE);
return (VFS_VGET(mp, ufhp->ufid_ino, vpp));
}
int
fusefs_vptofh(struct vnode *vp, struct fid *fhp)
{
struct fusefs_node *ip;
struct ufid *ufhp;
ip = VTOI(vp);
ufhp = (struct ufid *)fhp;
ufhp->ufid_len = sizeof(struct ufid);
ufhp->ufid_ino = ip->ufs_ino.i_number;
return (0);
}
int
fusefs_init(struct vfsconf *vfc)
{
pool_init(&fusefs_fbuf_pool, sizeof(struct fusebuf), 0, 0, PR_WAITOK,
"fmsg", NULL);
return (0);
}
int
fusefs_sysctl(int *name, u_int namelen, void *oldp, size_t *oldlenp,
void *newp, size_t newlen, struct proc *p)
{
extern int stat_fbufs_in, stat_fbufs_wait, stat_opened_fusedev;
/* all sysctl names at this level are terminal */
if (namelen != 1)
return (ENOTDIR); /* overloaded */
switch (name[0]) {
case FUSEFS_OPENDEVS:
return (sysctl_rdint(oldp, oldlenp, newp,
stat_opened_fusedev));
case FUSEFS_INFBUFS:
return (sysctl_rdint(oldp, oldlenp, newp, stat_fbufs_in));
case FUSEFS_WAITFBUFS:
return (sysctl_rdint(oldp, oldlenp, newp, stat_fbufs_wait));
case FUSEFS_POOL_NBPAGES:
return (sysctl_rdint(oldp, oldlenp, newp,
fusefs_fbuf_pool.pr_npages));
default:
return (EOPNOTSUPP);
}
}
int
fusefs_checkexp(struct mount *mp, struct mbuf *nam, int *extflagsp,
struct ucred **credanonp)
{
return (EOPNOTSUPP);
}
|
tototoshi/scalaflavor4j | src/main/java/com/m3/scalaflavor4j/Function4.java | /*
* Copyright 2012 M3, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*/
package com.m3.scalaflavor4j;
/**
* A function of 4 parameters.
*
* @see "http://www.scala-lang.org/api/2.9.1/index.html#scala.Function4"
*/
public abstract class Function4<T1, T2, T3, T4, R> {
public R apply(T1 v1, T2 v2, T3 v3, T4 v4) throws Exception {
return _(v1, v2, v3, v4);
}
/**
* Apply the body of this function to the arguments.
*/
public abstract R _(T1 v1, T2 v2, T3 v3, T4 v4) throws Exception;
/**
* Creates a tupled version of this function: instead of 4 arguments, it
* accepts a single Tuple4 argument.
*/
public F1<Tuple4<T1, T2, T3, T4>, R> tupled() {
final Function4<T1, T2, T3, T4, R> _this = this;
return new F1<Tuple4<T1, T2, T3, T4>, R>() {
public R _(Tuple4<T1, T2, T3, T4> tuple) throws Exception {
return _this.apply(tuple._1(), tuple._2(), tuple._3(), tuple._4());
}
};
}
/**
* Creates a curried version of this function.
*/
public F1<T1, Function1<T2, Function1<T3, Function1<T4, R>>>> curried() {
final Function4<T1, T2, T3, T4, R> _this = this;
return new F1<T1, Function1<T2, Function1<T3, Function1<T4, R>>>>() {
public Function1<T2, Function1<T3, Function1<T4, R>>> _(final T1 v1) {
return new F1<T2, Function1<T3, Function1<T4, R>>>() {
public Function1<T3, Function1<T4, R>> _(final T2 v2) {
return new F1<T3, Function1<T4, R>>() {
public Function1<T4, R> _(final T3 v3) {
return new F1<T4, R>() {
public R _(T4 v4) throws Exception {
return _this.apply(v1, v2, v3, v4);
}
};
}
};
}
};
}
};
}
/**
* Creates a String representation of this object.
*/
@Override
public String toString() {
return "<function4>";
}
}
|
MFazio23/ComicReader | ComicReader/src/com/blogspot/applications4android/comicreader/comics/GoComics/BiffnRiley.java | package com.blogspot.applications4android.comicreader.comics.GoComics;
import java.util.Calendar;
import com.blogspot.applications4android.comicreader.comictypes.DailyGoComicsCom;
public class BiffnRiley extends DailyGoComicsCom {
public BiffnRiley() {
super();
mComicName = "biff-and-riley";
mFirstCal = Calendar.getInstance();
mFirstCal.set(2010, 0, 12);
}
}
|
minskylab/framework-core | ent/deathrecord.go | // Code generated by entc, DO NOT EDIT.
package ent
import (
"fmt"
"opencensus/core/ent/deathrecord"
"strings"
"time"
"entgo.io/ent/dialect/sql"
)
// DeathRecord is the model entity for the DeathRecord schema.
type DeathRecord struct {
config `json:"-"`
// ID of the ent.
ID int `json:"id,omitempty"`
// ReportedDate holds the value of the "reportedDate" field.
ReportedDate time.Time `json:"reportedDate,omitempty"`
// CollectedDate holds the value of the "collectedDate" field.
CollectedDate time.Time `json:"collectedDate,omitempty"`
// SinadefRegisters holds the value of the "sinadefRegisters" field.
SinadefRegisters int `json:"sinadefRegisters,omitempty"`
// MinsaRegisters holds the value of the "minsaRegisters" field.
MinsaRegisters int `json:"minsaRegisters,omitempty"`
// Edges holds the relations/edges for other nodes in the graph.
// The values are being populated by the DeathRecordQuery when eager-loading is set.
Edges DeathRecordEdges `json:"edges"`
}
// DeathRecordEdges holds the relations/edges for other nodes in the graph.
type DeathRecordEdges struct {
// Places holds the value of the places edge.
Places []*Place `json:"places,omitempty"`
// loadedTypes holds the information for reporting if a
// type was loaded (or requested) in eager-loading or not.
loadedTypes [1]bool
}
// PlacesOrErr returns the Places value or an error if the edge
// was not loaded in eager-loading.
func (e DeathRecordEdges) PlacesOrErr() ([]*Place, error) {
if e.loadedTypes[0] {
return e.Places, nil
}
return nil, &NotLoadedError{edge: "places"}
}
// scanValues returns the types for scanning values from sql.Rows.
func (*DeathRecord) scanValues(columns []string) ([]interface{}, error) {
values := make([]interface{}, len(columns))
for i := range columns {
switch columns[i] {
case deathrecord.FieldID, deathrecord.FieldSinadefRegisters, deathrecord.FieldMinsaRegisters:
values[i] = &sql.NullInt64{}
case deathrecord.FieldReportedDate, deathrecord.FieldCollectedDate:
values[i] = &sql.NullTime{}
default:
return nil, fmt.Errorf("unexpected column %q for type DeathRecord", columns[i])
}
}
return values, nil
}
// assignValues assigns the values that were returned from sql.Rows (after scanning)
// to the DeathRecord fields.
func (dr *DeathRecord) assignValues(columns []string, values []interface{}) error {
if m, n := len(values), len(columns); m < n {
return fmt.Errorf("mismatch number of scan values: %d != %d", m, n)
}
for i := range columns {
switch columns[i] {
case deathrecord.FieldID:
value, ok := values[i].(*sql.NullInt64)
if !ok {
return fmt.Errorf("unexpected type %T for field id", value)
}
dr.ID = int(value.Int64)
case deathrecord.FieldReportedDate:
if value, ok := values[i].(*sql.NullTime); !ok {
return fmt.Errorf("unexpected type %T for field reportedDate", values[i])
} else if value.Valid {
dr.ReportedDate = value.Time
}
case deathrecord.FieldCollectedDate:
if value, ok := values[i].(*sql.NullTime); !ok {
return fmt.Errorf("unexpected type %T for field collectedDate", values[i])
} else if value.Valid {
dr.CollectedDate = value.Time
}
case deathrecord.FieldSinadefRegisters:
if value, ok := values[i].(*sql.NullInt64); !ok {
return fmt.Errorf("unexpected type %T for field sinadefRegisters", values[i])
} else if value.Valid {
dr.SinadefRegisters = int(value.Int64)
}
case deathrecord.FieldMinsaRegisters:
if value, ok := values[i].(*sql.NullInt64); !ok {
return fmt.Errorf("unexpected type %T for field minsaRegisters", values[i])
} else if value.Valid {
dr.MinsaRegisters = int(value.Int64)
}
}
}
return nil
}
// QueryPlaces queries the "places" edge of the DeathRecord entity.
func (dr *DeathRecord) QueryPlaces() *PlaceQuery {
return (&DeathRecordClient{config: dr.config}).QueryPlaces(dr)
}
// Update returns a builder for updating this DeathRecord.
// Note that you need to call DeathRecord.Unwrap() before calling this method if this DeathRecord
// was returned from a transaction, and the transaction was committed or rolled back.
func (dr *DeathRecord) Update() *DeathRecordUpdateOne {
return (&DeathRecordClient{config: dr.config}).UpdateOne(dr)
}
// Unwrap unwraps the DeathRecord entity that was returned from a transaction after it was closed,
// so that all future queries will be executed through the driver which created the transaction.
func (dr *DeathRecord) Unwrap() *DeathRecord {
tx, ok := dr.config.driver.(*txDriver)
if !ok {
panic("ent: DeathRecord is not a transactional entity")
}
dr.config.driver = tx.drv
return dr
}
// String implements the fmt.Stringer.
func (dr *DeathRecord) String() string {
var builder strings.Builder
builder.WriteString("DeathRecord(")
builder.WriteString(fmt.Sprintf("id=%v", dr.ID))
builder.WriteString(", reportedDate=")
builder.WriteString(dr.ReportedDate.Format(time.ANSIC))
builder.WriteString(", collectedDate=")
builder.WriteString(dr.CollectedDate.Format(time.ANSIC))
builder.WriteString(", sinadefRegisters=")
builder.WriteString(fmt.Sprintf("%v", dr.SinadefRegisters))
builder.WriteString(", minsaRegisters=")
builder.WriteString(fmt.Sprintf("%v", dr.MinsaRegisters))
builder.WriteByte(')')
return builder.String()
}
// DeathRecords is a parsable slice of DeathRecord.
type DeathRecords []*DeathRecord
func (dr DeathRecords) config(cfg config) {
for _i := range dr {
dr[_i].config = cfg
}
}
|
ehealth-ua/ehealth.web | packages/auth/jest.config.js | <reponame>ehealth-ua/ehealth.web
module.exports = {
displayName: "auth:unit",
preset: "@ehealth/test-preset/unit"
};
|
vborisoff/reladomo | reladomo/src/test/java/com/gs/fw/common/mithra/test/TestThreadExecutor.java | /*
Copyright 2016 <NAME>.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
// Portions copyright <NAME>. Licensed under Apache 2.0 license
package com.gs.fw.common.mithra.test;
import com.gs.fw.common.mithra.util.AutoShutdownThreadExecutor;
import com.gs.fw.common.mithra.util.CpuBoundTask;
import com.gs.fw.common.mithra.util.FixedCountTaskFactory;
import junit.framework.TestCase;
import org.eclipse.collections.impl.set.mutable.primitive.LongHashSet;
public class TestThreadExecutor extends TestCase
{
public void testOneThread()
{
runExecutor(1);
}
public void testTwoThreads()
{
runExecutor(2);
}
public void testFourThreads()
{
runExecutor(4);
}
public void testSlowQueuing()
{
AutoShutdownThreadExecutor executor = new AutoShutdownThreadExecutor(1, "TestThreadExecutor");
executor.setTimeoutInMilliseconds(10);
TestRunnable[] runnables = new TestRunnable[100];
for(int i=0;i<runnables.length;i++)
{
runnables[i] = new TestRunnable();
}
for(int i=0;i<runnables.length;i++)
{
executor.submit(runnables[i]);
sleep(20);
}
executor.shutdownAndWaitUntilDone();
LongHashSet set = new LongHashSet(100);
for(int i=0;i<runnables.length;i++)
{
assertTrue(runnables[i].isDone());
set.add(runnables[i].getThreadId());
}
assertTrue(set.size() > 10);
}
public void runExecutor(int threads)
{
AutoShutdownThreadExecutor executor = new AutoShutdownThreadExecutor(threads, "TestThreadExecutor"+threads);
TestRunnable[] runnables = new TestRunnable[10000];
for(int i=0;i<runnables.length;i++)
{
runnables[i] = new TestRunnable();
}
for(int i=0;i<runnables.length;i++)
{
executor.submit(runnables[i]);
}
executor.shutdownAndWaitUntilDone();
LongHashSet set = new LongHashSet(threads);
for(int i=0;i<runnables.length;i++)
{
assertTrue(runnables[i].isDone());
set.add(runnables[i].getThreadId());
}
assertEquals(threads, set.size());
}
public void testExceptionHandling()
{
int threads = 4;
AutoShutdownThreadExecutor executor = new AutoShutdownThreadExecutor(threads, "TestThreadExecutor"+threads);
Runnable[] runnables = new Runnable[3];
for(int i=0;i<2;i++)
{
runnables[i] = new TestRunnableWithWait();
}
runnables[2] = new TestRunnableWithException();
for(int i=0;i<runnables.length;i++)
{
executor.submit(runnables[i]);
}
executor.shutdownAndWaitUntilDone();
assertTrue(((TestRunnableWithWait)runnables[0]).isDone());
assertTrue(executor.isAborted());
}
public static void sleep(long millis)
{
long now = System.currentTimeMillis();
long target = now + millis;
while(now < target)
{
try
{
Thread.sleep(target-now);
}
catch (InterruptedException e)
{
fail("why were we interrupted?");
}
now = System.currentTimeMillis();
}
}
private static class TestRunnable implements Runnable
{
private long threadId = -12345678;
public void run()
{
threadId = Thread.currentThread().getId();
}
public long getThreadId()
{
return threadId;
}
public boolean isDone()
{
return threadId != -12345678;
}
}
private static class TestRunnableWithWait implements Runnable
{
private long threadId = -12345678;
public void run()
{
sleep(1000);
threadId = Thread.currentThread().getId();
}
public long getThreadId()
{
return threadId;
}
public boolean isDone()
{
return threadId != -12345678;
}
}
private static class TestRunnableWithException implements Runnable
{
private long threadId = -12345678;
public void run()
{
throw new RuntimeException("for testing only");
}
public long getThreadId()
{
return threadId;
}
public boolean isDone()
{
return threadId != -12345678;
}
}
public void testCpuThreadPool()
{
CpuBoundTask[] tasks = new CpuBoundTask[60];
final boolean[] todo = new boolean[60];
for(int i=0;i<tasks.length;i++)
{
final int count = i;
tasks[i] = new CpuBoundTask()
{
@Override
public void execute()
{
todo[count] = true;
}
};
}
new FixedCountTaskFactory(tasks).startAndWorkUntilFinished();
for(int i=0;i<todo.length;i++) assertTrue(todo[i]);
}
}
|
sriram18981/core-java-learnings | src/learnings/sriram/sort/BubbleSort.java | <filename>src/learnings/sriram/sort/BubbleSort.java
package learnings.sriram.sort;
public class BubbleSort {
public static void main(String[] args) {
int[] intArray = { 1, 23, 12, 34, 56, 65, 43, 56, 78, 43, 54, 765, 5433, 234 };
int[] sortedArr = bubbleSort(intArray);
printArray(sortedArr);
}
private static int[] bubbleSort(int[] array) {
boolean swap = false;
do {
swap = false;
for(int i = 0; i < array.length - 1; i++) {
if(array[i] > array[i+1]) {
int temp = array[i];
array[i] = array[i+1];
array[i+1] = temp;
swap = true;
}
}
} while (swap);
return array;
}
public static void printArray(int[] array) {
StringBuilder sb = new StringBuilder();
for (int i = 0; i < array.length; i++) {
sb.append(array[i]).append(',');
}
System.out.println(sb);
}
}
|
KonduitAI/konduit-serving | konduit-serving-build/src/main/java/ai/konduit/serving/build/dependencies/AllRequirement.java | /*
* ******************************************************************************
* * Copyright (c) 2020 <NAME>.
* *
* * This program and the accompanying materials are made available under the
* * terms of the Apache License, Version 2.0 which is available at
* * https://www.apache.org/licenses/LICENSE-2.0.
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* * License for the specific language governing permissions and limitations
* * under the License.
* *
* * SPDX-License-Identifier: Apache-2.0
* *****************************************************************************
*/
package ai.konduit.serving.build.dependencies;
import ai.konduit.serving.build.config.Target;
import ai.konduit.serving.build.dependencies.nativedep.NativeDependency;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.experimental.Accessors;
import java.util.*;
@AllArgsConstructor
@Data
@Accessors(fluent = true)
public class AllRequirement implements DependencyRequirement {
private final String name;
private Set<Dependency> set;
public AllRequirement(String name, List<Dependency> dependencies) {
this(name, new HashSet<>(dependencies));
}
public AllRequirement(String name, Dependency... dependencies){
this.name = name;
this.set = new HashSet<>(Arrays.asList(dependencies));
}
@Override
public String name() {
return name;
}
@Override
public boolean satisfiedBy(Target target, Collection<Dependency> currDeps) {
//We need ALL of the requirements to be satisfied (considering native code + target)
for (Dependency need : set) {
boolean matchFound = false;
for (Dependency d : currDeps) {
if (need.equals(d)) {
//GAV(C) match, but maybe it's a native dependency, and platform doesn't match
if (need.isNativeDependency()) {
NativeDependency nd = need.getNativeDependency();
if (nd.supports(target)) {
matchFound = true;
break;
}
} else {
//Pure Java dependency
matchFound = true;
break;
}
}
}
if(!matchFound)
return false;
}
return true;
}
@Override
public List<DependencyAddition> suggestDependencies(Target target, Collection<Dependency> currDeps) {
if(satisfiedBy(target, currDeps))
return null;
//We need ALL of the requirements to be satisfied (considering native code + target)
Set<Dependency> notFound = new HashSet<>();
for (Dependency need : set) {
boolean matchFound = false;
for (Dependency d : currDeps) {
if (need.equals(d)) {
//GAV(C) match, but maybe it's a native dependency, and platform doesn't match
if (need.isNativeDependency()) {
NativeDependency nd = need.getNativeDependency();
if (nd.supports(target)) {
matchFound = true;
break;
}
} else {
//Pure Java dependency
matchFound = true;
break;
}
}
}
if(!matchFound){
if(need.isNativeDependency()){
//Don't suggest a native dependency that can't be run on this target, even if it's a requirement for
// other targets that it _does_ run on
NativeDependency nd = need.getNativeDependency();
if(nd.supports(target)){
notFound.add(need);
}
} else {
notFound.add(need);
}
}
}
if(notFound.isEmpty())
return null;
return Collections.singletonList(new AllAddition(new ArrayList<>(notFound), this));
}
}
|
shartte/Mekanism | src/minecraft/ic2/api/energy/EnergyNet.java | <gh_stars>0
package ic2.api.energy;
import java.lang.reflect.Method;
import net.minecraft.tileentity.TileEntity;
import net.minecraft.world.World;
import ic2.api.energy.tile.IEnergySource;
/**
* Provides access to the energy network.
*/
public final class EnergyNet {
/**
* Gets the EnergyNet instance for the specified world.
*
* @param world world
* @return EnergyNet instance for the world
*/
public static EnergyNet getForWorld(World world) {
try {
if (EnergyNet_getForWorld == null) EnergyNet_getForWorld = Class.forName(getPackage() + ".core.EnergyNet").getMethod("getForWorld", World.class);
return new EnergyNet(EnergyNet_getForWorld.invoke(null, world));
} catch (Exception e) {
throw new RuntimeException(e);
}
}
private EnergyNet(Object energyNetInstance) {
this.energyNetInstance = energyNetInstance;
}
/**
* Add a tile entity to the energy network.
* The tile entity has to be valid and initialized.
*
* @param addedTileEntity tile entity to add
*
* @deprecated use EnergyTileLoadEvent instead
*/
@Deprecated
public void addTileEntity(TileEntity addedTileEntity) {
try {
if (EnergyNet_addTileEntity == null) EnergyNet_addTileEntity = Class.forName(getPackage() + ".core.EnergyNet").getMethod("addTileEntity", TileEntity.class);
EnergyNet_addTileEntity.invoke(energyNetInstance, addedTileEntity);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
/**
* Removes a tile entity from the energy network.
* The tile entity has to be still valid.
*
* @param removedTileEntity tile entity to remove
*
* @deprecated use EnergyTileUnloadEvent instead
*/
@Deprecated
public void removeTileEntity(TileEntity removedTileEntity) {
try {
if (EnergyNet_removeTileEntity == null) EnergyNet_removeTileEntity = Class.forName(getPackage() + ".core.EnergyNet").getMethod("removeTileEntity", TileEntity.class);
EnergyNet_removeTileEntity.invoke(energyNetInstance, removedTileEntity);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
/**
* Emit energy from an energy source to the energy network.
*
* @param energySource energy source to emit energy from
* @param amount amount of energy to emit in EU
* @return Leftover (unused) power
*
* @deprecated use EnergyTileSourceEvent instead
*/
@Deprecated
public int emitEnergyFrom(IEnergySource energySource, int amount) {
try {
if (EnergyNet_emitEnergyFrom == null) EnergyNet_emitEnergyFrom = Class.forName(getPackage() + ".core.EnergyNet").getMethod("emitEnergyFrom", IEnergySource.class, Integer.TYPE);
return ((Integer) EnergyNet_emitEnergyFrom.invoke(energyNetInstance, energySource, amount)).intValue();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
/**
* Get the amount of energy currently being conducted by a conductor.
* Call this twice with a delay to get the average conducted power by doing (call2 - call1) / 2.
*
* @param tileEntity conductor
*
* @deprecated use getTotalEnergyEmitted and getTotalEnergySunken instead
*/
@Deprecated
public long getTotalEnergyConducted(TileEntity tileEntity) {
try {
if (EnergyNet_getTotalEnergyConducted == null) EnergyNet_getTotalEnergyConducted = Class.forName(getPackage() + ".core.EnergyNet").getMethod("getTotalEnergyConducted", TileEntity.class);
return ((Long) EnergyNet_getTotalEnergyConducted.invoke(energyNetInstance, tileEntity)).longValue();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
/**
* determine how much energy has been emitted by the EnergyEmitter specified
*
* @note call this twice with x ticks delay to get the avg. emitted power p = (call2 - call1) / x EU/tick
*
* @param tileEntity energy emitter
*/
public long getTotalEnergyEmitted(TileEntity tileEntity) {
try {
if (EnergyNet_getTotalEnergyEmitted == null) EnergyNet_getTotalEnergyEmitted = Class.forName(getPackage() + ".core.EnergyNet").getMethod("getTotalEnergyEmitted", TileEntity.class);
return ((Long) EnergyNet_getTotalEnergyEmitted.invoke(energyNetInstance, tileEntity)).longValue();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
/**
* determine how much energy has been sunken by the EnergySink specified
*
* @note call this twice with x ticks delay to get the avg. sunken power p = (call2 - call1) / x EU/tick
*
* @param tileEntity energy emitter
*/
public long getTotalEnergySunken(TileEntity tileEntity) {
try {
if (EnergyNet_getTotalEnergySunken == null) EnergyNet_getTotalEnergySunken = Class.forName(getPackage() + ".core.EnergyNet").getMethod("getTotalEnergySunken", TileEntity.class);
return ((Long) EnergyNet_getTotalEnergySunken.invoke(energyNetInstance, tileEntity)).longValue();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
/**
* Get the base IC2 package name, used internally.
*
* @return IC2 package name, if unable to be determined defaults to ic2
*/
private static String getPackage() {
Package pkg = EnergyNet.class.getPackage();
if (pkg != null) {
String packageName = pkg.getName();
return packageName.substring(0, packageName.length() - ".api.energy".length());
}
return "ic2";
}
/**
* Instance of the energy network.
*/
Object energyNetInstance;
private static Method EnergyNet_getForWorld;
private static Method EnergyNet_addTileEntity;
private static Method EnergyNet_removeTileEntity;
private static Method EnergyNet_emitEnergyFrom;
private static Method EnergyNet_getTotalEnergyConducted;
private static Method EnergyNet_getTotalEnergyEmitted;
private static Method EnergyNet_getTotalEnergySunken;
}
|
kokizzu/terminusdb | tests/lib/triples.js | const assert = require('assert')
const api = require('./api')
const { Params } = require('./params.js')
const util = require('./util.js')
function get (agent, params) {
params = new Params(params)
const descriptor = params.string('descriptor')
const path = params.string(
'path',
util.isDefined(descriptor) ? api.path.triples(descriptor) : undefined,
)
params.assertEmpty()
util.assertDefined('descriptor or path', path)
const request = agent.get(path)
return {
then (resolve) {
resolve(request.then(api.response.verify(api.response.triples.getSuccess)))
},
fails (error) {
return request.then(api.response.verify(api.response.triples.failure(error)))
},
unverified () {
return request
},
}
}
function getFromBranch (agent, params) {
const path = api.path.triplesBranch(agent, params)
return get(agent, { path, ...params })
}
function getFromSystem (agent, params) {
const path = api.path.triplesSystem()
return get(agent, { path, ...params })
}
function insert (agent, path, turtle, params) {
params = new Params(params)
const author = params.string('author', 'default_author')
const message = params.string('message', 'default_message')
params.assertEmpty()
assert(
util.isString(turtle),
`Unexpected type for 'turtle'. Expected string, got: ${util.typeString(turtle)}`,
)
const body = { commit_info: { author, message }, turtle }
const request = agent.put(path).send(body)
return {
then (resolve) {
resolve(request.then(api.response.verify(api.response.triples.insertSuccess)))
},
fails (error) {
return request.then(api.response.verify(api.response.triples.failure(error)))
},
unverified () {
return request
},
}
}
function insertIntoBranch (agent, turtle, params) {
return insert(agent, api.path.triplesBranch(agent, params), turtle, params)
}
module.exports = {
get,
getFromBranch,
getFromSystem,
insert,
insertIntoBranch,
}
|
kbingman/bkr | vendor/radiant/vendor/plugins/haml/test/profile.rb | <reponame>kbingman/bkr<filename>vendor/radiant/vendor/plugins/haml/test/profile.rb
require 'rubygems'
require 'active_support'
require 'action_controller'
require 'action_view'
require File.dirname(__FILE__) + '/../lib/haml'
require 'haml/template'
require 'profiler'
require 'stringio'
module Haml
# Used by both Haml::Profiler and Sass::Profiler.
# Encapsulates profiling behavior.
module AbstractProfiler
def self.profile(times, &block)
# Runs the profiler, collects information
Profiler__::start_profile
times.times &block
Profiler__::stop_profile
# Outputs information to a StringIO, returns result
io = StringIO.new
Profiler__::print_profile(io)
io.pos = 0
result = io.read
io.close
result
end
end
# A profiler for Haml, mostly for development use. This simply implements
# the Ruby profiler for profiling haml code.
class Profiler
# Creates a new profiler that looks for templates in the base
# directory.
def initialize(base = File.join(File.dirname(__FILE__), 'haml', 'templates'))
unless base.class == ActionView::Base
@base = ActionView::Base.new(base)
else
@base = base
end
end
# Profiles haml on the given template with the given number of runs.
# The template name shouldn't have a file extension; this will
# automatically look for a haml template.
#
# Returns the results of the profiling as a string.
def profile(runs = 100, template_name = 'standard')
AbstractProfiler.profile(runs) { @base.render template_name }
end
end
end
module Sass
class Profiler
def profile(runs = 100, template_name = 'complex')
Haml::AbstractProfiler.profile(runs) do
Sass::Engine.new("#{File.dirname(__FILE__)}/sass/templates/#{template_name}.sass").render
end
end
end
end
|
Krzyciu/A3CS | addons/nametags/ranks/usaf/ranks.hpp | <filename>addons/nametags/ranks/usaf/ranks.hpp
class USAF_lotnik {
name = "Lotnik";
icon = QPATHTOF(ranks\usaf\Lotnik.paa);
importance = 0;
};
class USAF_st_lotnik {
name = "<NAME>";
icon = QPATHTOF(ranks\usaf\St_Lotnik.paa);
importance = 1;
};
class USAF_ml_sierzant {
name = CSTRING(Ranks_Ml_Sierzant);
icon = QPATHTOF(ranks\usaf\Mlodszy_sierzant.paa);
importance = 2;
};
class USAF_sierzant {
name = CSTRING(Ranks_Sierzant);
icon = QPATHTOF(ranks\usaf\Sierzant.paa);
importance = 3;
};
class USAF_st_sierzant {
name = CSTRING(Ranks_St_Sierzant);
icon = QPATHTOF(ranks\usaf\St_sierzant.paa);
importance = 4;
};
class USAF_podporucznik {
name = "Podporucznik";
icon = QPATHTOF(ranks\usaf\podporucznik.paa);
importance = 5;
};
class USAF_kapitan {
name = "Kapitan";
icon = QPATHTOF(ranks\usaf\kapitan.paa);
importance = 6;
};
|
trilisen/Examensarbete | server/models/poll.js | <gh_stars>0
import mongoose from "mongoose"
const Schema = mongoose.Schema
const pollSchema = new Schema(
{
title: {
type: String,
required: true,
trim: true,
},
description: {
type: String,
trim: true,
},
creator: {
type: Schema.Types.ObjectId,
required: true,
ref: "User",
},
},
{ timestamps: true }
)
export default mongoose.model("Poll", pollSchema)
|
mrcao20/McQuickBoot | wizards/mcwizards/classes/cpp/file.h | <gh_stars>1-10
%{Cpp:LicenseTemplate}\
@if '%{Cpp:PragmaOnce}'
#pragma once
@else
#ifndef %{GUARD}
#define %{GUARD}
@endif
%{JS: QtSupport.qtIncludes([ ( '%{IncludeQObject}' ) ? 'QtCore/%{IncludeQObject}' : ''],
[ ( '%{IncludeQObject}' ) ? 'QtCore/%{IncludeQObject}' : ''])}\
@if '%{IncludeIocGlobal}'
#include <%{IncludeIocGlobal}>
@endif
%{JS: Cpp.openNamespaces('%{Class}')}
MC_FORWARD_DECL_PRIVATE_DATA(%{CN});
@if '%{Base}'
class %{CN} : public %{Base}
@else
class %{CN}
@endif
{
Q_OBJECT
MC_DECL_INIT(%{CN})
MC_INTERFACES()
public:
@if '%{Base}' === 'QObject'
explicit %{CN}(QObject *parent = nullptr) noexcept;
~%{CN}() override;
@else
%{CN}() noexcept;
~%{CN}();
@endif
private:
MC_DECL_PRIVATE(%{CN})
};
MC_DECL_METATYPE(%{CN})
%{JS: Cpp.closeNamespaces('%{Class}')}
@if ! '%{Cpp:PragmaOnce}'
#endif // %{GUARD}
@endif
|
lqt93/simple-chat-app | src/components/pages/NotFound/index.js | <filename>src/components/pages/NotFound/index.js
import React from "react";
import { Link } from "react-router-dom";
import { makeStyles } from "@material-ui/core/styles";
const useStyles = makeStyles({
root: {
display: "flex",
justifyContent: "center",
alignItems: "center"
},
msgContainer: {
textAlign: "center"
}
});
function NotFoundPage() {
const classes = useStyles();
return (
<div className={classes.root}>
<div className={classes.msgContainer}>
<h1>
<strong>404</strong>
</h1>
<p>
<strong>Page not found</strong>
</p>
<p>
Back to <Link to="/">Home page</Link>
</p>
</div>
</div>
);
}
export default NotFoundPage;
|
lihongwu19921215/mz-g | src/main/java/band/wukong/mz/g/sku/bean/Goods.java | <filename>src/main/java/band/wukong/mz/g/sku/bean/Goods.java<gh_stars>10-100
package band.wukong.mz.g.sku.bean;
import org.nutz.dao.entity.annotation.Column;
import org.nutz.dao.entity.annotation.Id;
import org.nutz.dao.entity.annotation.Name;
import org.nutz.dao.entity.annotation.Table;
import java.io.File;
import java.util.Date;
/**
* As you see...
*
* @author wukong(<EMAIL>)
*/
@Table("t_goods")
public class Goods {
/**
* 商品状态-删除
*/
public static final String STATE_RM = "0";
/**
* 商品状态-有效
*/
public static final String STATE_OK = "1";
/**
* 默认图片文件名
*/
public static final String IMG_DFT = "goods_default.png";
@Id
private long id;
@Name
private String gname;
@Column("cate_code")
private String cateCode;
@Column
private String img; //图片名称:flower.png
@Column
private String words;
@Column
private Date ctime;
@Column
private Date utime;
@Column
private String state;
private File gimg; //module中用到的,用于接收http request中的file
public long getId() {
return id;
}
public void setId(long id) {
this.id = id;
}
public String getCateCode() {
return cateCode;
}
public void setCateCode(String cateCode) {
this.cateCode = cateCode;
}
public String getGname() {
return gname;
}
public void setGname(String gname) {
this.gname = gname;
}
public String getImg() {
return img;
}
public void setImg(String img) {
this.img = img;
}
public String getWords() {
return words;
}
public void setWords(String words) {
this.words = words;
}
public Date getUtime() {
return utime;
}
public void setUtime(Date utime) {
this.utime = utime;
}
public Date getCtime() {
return ctime;
}
public void setCtime(Date ctime) {
this.ctime = ctime;
}
public String getState() {
return state;
}
public void setState(String state) {
this.state = state;
}
public File getGimg() {
return gimg;
}
public void setGimg(File gimg) {
this.gimg = gimg;
}
@Override
public String toString() {
return "Goods{" +
"id=" + id +
", cateCode='" + cateCode + '\'' +
", gname='" + gname + '\'' +
", img='" + img + '\'' +
", words='" + words + '\'' +
", ctime=" + ctime +
", utime=" + utime +
", state='" + state + '\'' +
", gimg=" + gimg +
'}';
}
}
|
simtopy/ore | test/migration/models/v4/reference.js | <reponame>simtopy/ore<gh_stars>0
module.exports = {
type: "reference",
fields: {
x: "real",
y: "real",
z: "real",
}
} |
nirvanarsc/CompetitiveProgramming | Java/leetcode/weekly_contests/weekly_257/P_3.java | <filename>Java/leetcode/weekly_contests/weekly_257/P_3.java
package leetcode.weekly_contests.weekly_257;
public class P_3 {
private static final int MOD = (int) (1e9 + 7);
public int firstDayBeenInAllRooms(int[] nextVisit) {
final int n = nextVisit.length;
final long[] dp = new long[n];
for (int i = 1; i < n; i++) {
dp[i] = (2 + 2 * dp[i - 1] - dp[nextVisit[i - 1]] + MOD) % MOD;
}
return (int) dp[n - 1];
}
}
|
pablobacho/XBee-PSoC | PSoC4200 Tx Example.cydsn/Generated_Source/PSoC4/XBEE_RESET_aliases.h | /*******************************************************************************
* File Name: XBEE_RESET.h
* Version 2.20
*
* Description:
* This file contains the Alias definitions for Per-Pin APIs in cypins.h.
* Information on using these APIs can be found in the System Reference Guide.
*
* Note:
*
********************************************************************************
* Copyright 2008-2015, Cypress Semiconductor Corporation. All rights reserved.
* You may use this file only in accordance with the license, terms, conditions,
* disclaimers, and limitations in the end user license agreement accompanying
* the software package with which this file was provided.
*******************************************************************************/
#if !defined(CY_PINS_XBEE_RESET_ALIASES_H) /* Pins XBEE_RESET_ALIASES_H */
#define CY_PINS_XBEE_RESET_ALIASES_H
#include "cytypes.h"
#include "cyfitter.h"
#include "cypins.h"
/***************************************
* Constants
***************************************/
#define XBEE_RESET_0 (XBEE_RESET__0__PC)
#define XBEE_RESET_0_PS (XBEE_RESET__0__PS)
#define XBEE_RESET_0_PC (XBEE_RESET__0__PC)
#define XBEE_RESET_0_DR (XBEE_RESET__0__DR)
#define XBEE_RESET_0_SHIFT (XBEE_RESET__0__SHIFT)
#define XBEE_RESET_0_INTR ((uint16)((uint16)0x0003u << (XBEE_RESET__0__SHIFT*2u)))
#define XBEE_RESET_INTR_ALL ((uint16)(XBEE_RESET_0_INTR))
#endif /* End Pins XBEE_RESET_ALIASES_H */
/* [] END OF FILE */
|
mmghasemi/Vega | platform/com.subgraph.vega.ui.web/src/com/subgraph/vega/ui/web/views/ExcludeScopeAction.java | package com.subgraph.vega.ui.web.views;
import java.net.URI;
import java.util.List;
import org.eclipse.jface.viewers.StructuredViewer;
import com.subgraph.vega.api.model.IModel;
import com.subgraph.vega.api.model.scope.ITargetScope;
public class ExcludeScopeAction extends AbstractScopeAction {
private final static String EXCLUDE_TEXT = "Exclude from current scope";
ExcludeScopeAction(StructuredViewer viewer, IModel model) {
super(viewer, model, EXCLUDE_TEXT);
}
@Override
protected void handleSelectedURI(ITargetScope scope, URI uri) {
if(scope.isInsideScope(uri)) {
scope.addExclusionURI(uri);
}
}
@Override
protected boolean isEnabledForURIs(ITargetScope scope, List<URI> uris) {
for(URI u: uris) {
if(scope.isInsideScope(u)) {
return true;
}
}
return false;
}
}
|
puer99miss/Xapiand | src/xapian/net/remoteserver.cc | <reponame>puer99miss/Xapiand
/** @file remoteserver.cc
* @brief Xapian remote backend server base class
*/
/* Copyright (C) 2006,2007,2008,2009,2010,2011,2012,2013,2014,2015,2016,2017,2018 <NAME>
* Copyright (C) 2006,2007,2009,2010 Lemur Consulting Ltd
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*/
#include "config.h"
#include "xapian/net/remoteserver.h"
#include "xapian/constants.h"
#include "xapian/database.h"
#include "xapian/enquire.h"
#include "xapian/error.h"
#include "xapian/matchspy.h"
#include "xapian/query.h"
#include "xapian/rset.h"
#include "xapian/mset.h"
#include "xapian/valueiterator.h"
#include <signal.h>
#include <cerrno>
#include <cstdlib>
#include <memory>
#include "xapian/net/length.h"
#include "xapian/common/omassert.h"
#include "xapian/common/realtime.h"
#include "xapian/net/serialise.h"
#include "xapian/common/serialise-double.h"
#include "xapian/net/serialise-error.h"
#include "xapian/common/str.h"
#include "xapian/common/stringutils.h"
#include "xapian/api/enquireinternal.h"
using namespace std;
[[noreturn]]
static void
throw_read_only()
{
throw Xapian::InvalidOperationError("Server is read-only");
}
[[noreturn]]
static void
throw_no_db()
{
throw Xapian::InvalidOperationError("Server has no open database");
}
/// Class to throw when we receive the connection closing message.
struct ConnectionClosed { };
RemoteServer::RemoteServer(const vector<string>& dbpaths_,
int fdin_, int fdout_,
double active_timeout_, double idle_timeout_,
bool writable_)
: RemoteConnection(fdin_, fdout_, string()),
db(NULL), wdb(NULL), writable(writable_),
active_timeout(active_timeout_), idle_timeout(idle_timeout_)
{
// Catch errors opening the database and propagate them to the client.
try {
if (!dbpaths_.empty()) {
select_db(dbpaths_, false, Xapian::DB_OPEN);
}
} catch (const Xapian::Error &err) {
// Propagate the exception to the client.
send_message(REPLY_EXCEPTION, serialise_error(err));
// And rethrow it so our caller can log it and close the connection.
throw;
}
#ifndef __WIN32__
// It's simplest to just ignore SIGPIPE. We'll still know if the
// connection dies because we'll get EPIPE back from write().
if (signal(SIGPIPE, SIG_IGN) == SIG_ERR)
throw Xapian::NetworkError("Couldn't set SIGPIPE to SIG_IGN", errno);
#endif
// Send greeting message.
msg_update(string());
}
RemoteServer::~RemoteServer()
{
delete db;
// wdb is either NULL or equal to db, so we shouldn't delete it too!
}
message_type
RemoteServer::get_message(double timeout, string & result,
message_type required_type)
{
double end_time = RealTime::end_time(timeout);
int type = RemoteConnection::get_message(result, end_time);
// Handle "shutdown connection" message here. Treat EOF here for a read-only
// database the same way since a read-only client just closes the
// connection when done.
if (type == MSG_SHUTDOWN || (type < 0 && wdb == NULL))
throw ConnectionClosed();
if (type < 0)
throw Xapian::NetworkError("Connection closed unexpectedly");
if (type >= MSG_MAX) {
string errmsg("Invalid message type ");
errmsg += str(type);
throw Xapian::NetworkError(errmsg);
}
if (required_type != MSG_MAX && type != int(required_type)) {
string errmsg("Expecting message type ");
errmsg += str(int(required_type));
errmsg += ", got ";
errmsg += str(type);
throw Xapian::NetworkError(errmsg);
}
return static_cast<message_type>(type);
}
void
RemoteServer::send_message(reply_type type, const string &message)
{
double end_time = RealTime::end_time(active_timeout);
unsigned char type_as_char = static_cast<unsigned char>(type);
RemoteConnection::send_message(type_as_char, message, end_time);
}
typedef void (RemoteServer::* dispatch_func)(const string &);
void
RemoteServer::run()
{
while (true) {
try {
string message;
size_t type = get_message(idle_timeout, message);
switch (type) {
case MSG_ALLTERMS:
msg_allterms(message);
continue;
case MSG_COLLFREQ:
msg_collfreq(message);
continue;
case MSG_DOCUMENT:
msg_document(message);
continue;
case MSG_TERMEXISTS:
msg_termexists(message);
continue;
case MSG_TERMFREQ:
msg_termfreq(message);
continue;
case MSG_VALUESTATS:
msg_valuestats(message);
continue;
case MSG_KEEPALIVE:
msg_keepalive(message);
continue;
case MSG_DOCLENGTH:
msg_doclength(message);
continue;
case MSG_QUERY:
msg_query(message);
continue;
case MSG_TERMLIST:
msg_termlist(message);
continue;
case MSG_POSITIONLIST:
msg_positionlist(message);
continue;
case MSG_POSTLIST:
msg_postlist(message);
continue;
case MSG_REOPEN:
msg_reopen(message);
continue;
case MSG_UPDATE:
msg_update(message);
continue;
case MSG_ADDDOCUMENT:
msg_adddocument(message);
continue;
case MSG_CANCEL:
msg_cancel(message);
continue;
case MSG_DELETEDOCUMENTTERM:
msg_deletedocumentterm(message);
continue;
case MSG_COMMIT:
msg_commit(message);
continue;
case MSG_REPLACEDOCUMENT:
msg_replacedocument(message);
continue;
case MSG_REPLACEDOCUMENTTERM:
msg_replacedocumentterm(message);
continue;
case MSG_DELETEDOCUMENT:
msg_deletedocument(message);
continue;
case MSG_WRITEACCESS:
msg_writeaccess(message);
continue;
case MSG_GETMETADATA:
msg_getmetadata(message);
continue;
case MSG_SETMETADATA:
msg_setmetadata(message);
continue;
case MSG_ADDSPELLING:
msg_addspelling(message);
continue;
case MSG_REMOVESPELLING:
msg_removespelling(message);
continue;
case MSG_METADATAKEYLIST:
msg_metadatakeylist(message);
continue;
case MSG_FREQS:
msg_freqs(message);
continue;
case MSG_UNIQUETERMS:
msg_uniqueterms(message);
continue;
case MSG_POSITIONLISTCOUNT:
msg_positionlistcount(message);
continue;
case MSG_READACCESS:
msg_readaccess(message);
continue;
default: {
// MSG_GETMSET - used during a conversation.
// MSG_SHUTDOWN - handled by get_message().
string errmsg("Unexpected message type ");
errmsg += str(type);
throw Xapian::InvalidArgumentError(errmsg);
}
}
} catch (const Xapian::NetworkTimeoutError & e) {
try {
// We've had a timeout, so the client may not be listening, so
// set the end_time to 1 and if we can't send the message right
// away, just exit and the client will cope.
send_message(REPLY_EXCEPTION, serialise_error(e), 1.0);
} catch (...) {
}
// And rethrow it so our caller can log it and close the
// connection.
throw;
} catch (const Xapian::NetworkError &) {
// All other network errors mean we are fatally confused and are
// unlikely to be able to communicate further across this
// connection. So we don't try to propagate the error to the
// client, but instead just rethrow the exception so our caller can
// log it and close the connection.
throw;
} catch (const Xapian::Error &e) {
// Propagate the exception to the client, then return to the main
// message handling loop.
send_message(REPLY_EXCEPTION, serialise_error(e));
} catch (ConnectionClosed &) {
return;
} catch (...) {
// Propagate an unknown exception to the client.
send_message(REPLY_EXCEPTION, string());
// And rethrow it so our caller can log it and close the
// connection.
throw;
}
}
}
void
RemoteServer::msg_allterms(const string& message)
{
if (!db)
throw_no_db();
string reply;
string prev = message;
const string& prefix = message;
for (Xapian::TermIterator t = db->allterms_begin(prefix);
t != db->allterms_end(prefix);
++t) {
if (rare(prev.size() > 255))
prev.resize(255);
const string& term = *t;
size_t reuse = common_prefix_length(prev, term);
reply += encode_length(t.get_termfreq());
reply.append(1, char(reuse));
reply += encode_length(term.size() - reuse);
reply.append(term, reuse, string::npos);
prev = term;
}
send_message(REPLY_ALLTERMS, reply);
}
void
RemoteServer::msg_termlist(const string &message)
{
if (!db)
throw_no_db();
const char *p = message.data();
const char *p_end = p + message.size();
Xapian::docid did;
decode_length(&p, p_end, did);
Xapian::TermIterator t = db->termlist_begin(did);
Xapian::termcount num_terms = t.get_approx_size();
send_message(REPLY_TERMLIST0, encode_length(db->get_doclength(did)) +
encode_length(num_terms));
string reply;
string prev;
while (t != db->termlist_end(did)) {
if (rare(prev.size() > 255))
prev.resize(255);
const string& term = *t;
size_t reuse = common_prefix_length(prev, term);
reply += encode_length(t.get_wdf());
reply += encode_length(t.get_termfreq());
reply.append(1, char(reuse));
reply += encode_length(term.size() - reuse);
reply.append(term, reuse, string::npos);
prev = term;
++t;
}
send_message(REPLY_TERMLIST, reply);
}
void
RemoteServer::msg_positionlist(const string &message)
{
if (!db)
throw_no_db();
const char *p = message.data();
const char *p_end = p + message.size();
Xapian::docid did;
decode_length(&p, p_end, did);
string term(p, p_end - p);
Xapian::termpos lastpos = static_cast<Xapian::termpos>(-1);
const Xapian::PositionIterator end = db->positionlist_end(did, term);
for (Xapian::PositionIterator i = db->positionlist_begin(did, term);
i != end; ++i) {
Xapian::termpos pos = *i;
send_message(REPLY_POSITIONLIST, encode_length(pos - lastpos - 1));
lastpos = pos;
}
send_message(REPLY_DONE, string());
}
void
RemoteServer::msg_positionlistcount(const string &message)
{
if (!db)
throw_no_db();
const char *p = message.data();
const char *p_end = p + message.size();
Xapian::docid did;
decode_length(&p, p_end, did);
// This is kind of clumsy, but what the public API requires.
Xapian::termcount result = 0;
Xapian::TermIterator termit = db->termlist_begin(did);
if (termit != db->termlist_end(did)) {
string term(p, p_end - p);
termit.skip_to(term);
if (termit != db->termlist_end(did)) {
result = termit.positionlist_count();
}
}
send_message(REPLY_POSITIONLISTCOUNT, encode_length(result));
}
void
RemoteServer::msg_postlist(const string &message)
{
if (!db)
throw_no_db();
const string & term = message;
Xapian::doccount termfreq = db->get_termfreq(term);
Xapian::termcount collfreq = db->get_collection_freq(term);
send_message(REPLY_POSTLISTSTART, encode_length(termfreq) + encode_length(collfreq));
Xapian::docid lastdocid = 0;
const Xapian::PostingIterator end = db->postlist_end(term);
for (Xapian::PostingIterator i = db->postlist_begin(term);
i != end; ++i) {
Xapian::docid newdocid = *i;
string reply = encode_length(newdocid - lastdocid - 1);
reply += encode_length(i.get_wdf());
send_message(REPLY_POSTLISTITEM, reply);
lastdocid = newdocid;
}
send_message(REPLY_DONE, string());
}
void
RemoteServer::msg_readaccess(const string & msg)
{
int flags = Xapian::DB_OPEN;
const char *p = msg.c_str();
const char *p_end = p + msg.size();
if (p != p_end) {
unsigned flag_bits;
decode_length(&p, p_end, flag_bits);
flags |= flag_bits &~ Xapian::DB_ACTION_MASK_;
}
if (p != p_end) {
std::vector<string> dbpaths_;
while (p != p_end) {
size_t len;
decode_length_and_check(&p, p_end, len);
dbpaths_.emplace_back(p, len);
p += len;
}
select_db(dbpaths_, false, flags);
} else {
select_db(dbpaths, false, flags);
}
msg_update(msg);
}
void
RemoteServer::msg_writeaccess(const string & msg)
{
if (!writable)
throw_read_only();
int flags = Xapian::DB_OPEN;
const char *p = msg.c_str();
const char *p_end = p + msg.size();
if (p != p_end) {
unsigned flag_bits;
decode_length(&p, p_end, flag_bits);
flags |= flag_bits &~ Xapian::DB_ACTION_MASK_;
}
if (p != p_end) {
std::vector<string> dbpaths_;
size_t len;
decode_length_and_check(&p, p_end, len);
dbpaths_.emplace_back(p, len);
p += len;
if (p != p_end) {
throw Xapian::NetworkError("only one database directory allowed on writable databases");
}
select_db(dbpaths_, false, flags);
} else {
select_db(dbpaths, false, flags);
}
msg_update(msg);
}
void
RemoteServer::msg_reopen(const string & msg)
{
if (!db)
throw_no_db();
if (!db->reopen()) {
send_message(REPLY_DONE, string());
return;
}
msg_update(msg);
}
void
RemoteServer::msg_update(const string &)
{
static const char protocol[2] = {
char(XAPIAN_REMOTE_PROTOCOL_MAJOR_VERSION),
char(XAPIAN_REMOTE_PROTOCOL_MINOR_VERSION)
};
string message(protocol, 2);
if (db) {
Xapian::doccount num_docs = db->get_doccount();
message += encode_length(num_docs);
message += encode_length(db->get_lastdocid() - num_docs);
Xapian::termcount doclen_lb = db->get_doclength_lower_bound();
message += encode_length(doclen_lb);
message += encode_length(db->get_doclength_upper_bound() - doclen_lb);
message += (db->has_positions() ? '1' : '0');
message += encode_length(db->get_total_length());
string uuid = db->get_uuid();
message += uuid;
}
send_message(REPLY_UPDATE, message);
}
void
RemoteServer::msg_query(const string &message_in)
{
if (!db)
throw_no_db();
const char *p = message_in.c_str();
const char *p_end = p + message_in.size();
Xapian::Enquire enquire(*db);
// Unserialise the Query.
size_t len;
decode_length_and_check(&p, p_end, len);
Xapian::Query query(Xapian::Query::unserialise(string(p, len), reg));
p += len;
// Unserialise assorted Enquire settings.
Xapian::termcount qlen;
decode_length(&p, p_end, qlen);
enquire.set_query(query, qlen);
Xapian::valueno collapse_max;
decode_length(&p, p_end, collapse_max);
Xapian::valueno collapse_key = Xapian::BAD_VALUENO;
if (collapse_max)
decode_length(&p, p_end, collapse_key);
enquire.set_collapse_key(collapse_key, collapse_max);
if (p_end - p < 4 || *p < '0' || *p > '2') {
throw Xapian::NetworkError("bad message (docid_order)");
}
Xapian::Enquire::docid_order order;
order = static_cast<Xapian::Enquire::docid_order>(*p++ - '0');
enquire.set_docid_order(order);
Xapian::valueno sort_key;
decode_length(&p, p_end, sort_key);
if (*p < '0' || *p > '4') {
throw Xapian::NetworkError("bad message (sort_by)");
}
Xapian::Enquire::Internal::sort_setting sort_by;
sort_by = static_cast<Xapian::Enquire::Internal::sort_setting>(*p++ - '0');
if (*p < '0' || *p > '1') {
throw Xapian::NetworkError("bad message (sort_value_forward)");
}
bool sort_value_forward(*p++ != '0');
switch (sort_by) {
case Xapian::Enquire::Internal::REL:
enquire.set_sort_by_relevance();
break;
case Xapian::Enquire::Internal::VAL:
enquire.set_sort_by_value(sort_key, sort_value_forward);
break;
case Xapian::Enquire::Internal::VAL_REL:
enquire.set_sort_by_value_then_relevance(sort_key, sort_value_forward);
break;
case Xapian::Enquire::Internal::REL_VAL:
enquire.set_sort_by_relevance_then_value(sort_key, sort_value_forward);
break;
case Xapian::Enquire::Internal::DOCID:
enquire.set_weighting_scheme(Xapian::BoolWeight());
break;
}
double time_limit = unserialise_double(&p, p_end);
enquire.set_time_limit(time_limit);
int percent_threshold = *p++;
if (percent_threshold < 0 || percent_threshold > 100) {
throw Xapian::NetworkError("bad message (percent_threshold)");
}
double weight_threshold = unserialise_double(&p, p_end);
if (weight_threshold < 0) {
throw Xapian::NetworkError("bad message (weight_threshold)");
}
enquire.set_cutoff(percent_threshold, weight_threshold);
// Unserialise the Weight object.
decode_length_and_check(&p, p_end, len);
string wtname(p, len);
p += len;
const Xapian::Weight * wttype = reg.get_weighting_scheme(wtname);
if (wttype == NULL) {
// Note: user weighting schemes should be registered by adding them to
// a Registry, and setting the context using
// RemoteServer::set_registry().
throw Xapian::InvalidArgumentError("Weighting scheme " +
wtname + " not registered");
}
decode_length_and_check(&p, p_end, len);
unique_ptr<Xapian::Weight> wt(wttype->unserialise(string(p, len)));
p += len;
enquire.set_weighting_scheme(*wt);
// Unserialise the RSet object.
decode_length_and_check(&p, p_end, len);
Xapian::RSet rset = unserialise_rset(string(p, len));
p += len;
// Unserialise any MatchSpy objects.
vector<Xapian::MatchSpy*> matchspies;
while (p != p_end) {
decode_length_and_check(&p, p_end, len);
string spytype(p, len);
const Xapian::MatchSpy * spyclass = reg.get_match_spy(spytype);
if (spyclass == NULL) {
throw Xapian::InvalidArgumentError("Match spy " + spytype +
" not registered");
}
p += len;
decode_length_and_check(&p, p_end, len);
Xapian::MatchSpy *spy = spyclass->unserialise(string(p, len), reg);
matchspies.push_back(spy);
enquire.add_matchspy(spy->release());
p += len;
}
enquire.prepare_mset(&rset, nullptr);
send_message(REPLY_STATS, enquire.serialise_stats());
string message;
get_message(active_timeout, message, MSG_GETMSET);
p = message.c_str();
p_end = p + message.size();
Xapian::termcount first;
decode_length(&p, p_end, first);
Xapian::termcount maxitems;
decode_length(&p, p_end, maxitems);
Xapian::termcount check_at_least;
decode_length(&p, p_end, check_at_least);
enquire.unserialise_stats(std::string(p, p_end));
Xapian::MSet mset = enquire.get_mset(first, maxitems, check_at_least);
message.resize(0);
for (auto i : matchspies) {
string spy_results = i->serialise_results();
message += encode_length(spy_results.size());
message += spy_results;
}
message += mset.serialise();
send_message(REPLY_RESULTS, message);
}
void
RemoteServer::msg_document(const string &message)
{
if (!db)
throw_no_db();
const char *p = message.data();
const char *p_end = p + message.size();
Xapian::docid did;
decode_length(&p, p_end, did);
Xapian::Document doc = db->get_document(did);
send_message(REPLY_DOCDATA, doc.get_data());
Xapian::ValueIterator i;
for (i = doc.values_begin(); i != doc.values_end(); ++i) {
string item = encode_length(i.get_valueno());
item += *i;
send_message(REPLY_VALUE, item);
}
send_message(REPLY_DONE, string());
}
void
RemoteServer::msg_keepalive(const string &)
{
if (!db)
throw_no_db();
// Ensure *our* database stays alive, as it may contain remote databases!
db->keep_alive();
send_message(REPLY_DONE, string());
}
void
RemoteServer::msg_termexists(const string &term)
{
if (!db)
throw_no_db();
send_message((db->term_exists(term) ? REPLY_TERMEXISTS : REPLY_TERMDOESNTEXIST), string());
}
void
RemoteServer::msg_collfreq(const string &term)
{
if (!db)
throw_no_db();
send_message(REPLY_COLLFREQ, encode_length(db->get_collection_freq(term)));
}
void
RemoteServer::msg_termfreq(const string &term)
{
if (!db)
throw_no_db();
send_message(REPLY_TERMFREQ, encode_length(db->get_termfreq(term)));
}
void
RemoteServer::msg_freqs(const string &term)
{
if (!db)
throw_no_db();
string msg = encode_length(db->get_termfreq(term));
msg += encode_length(db->get_collection_freq(term));
send_message(REPLY_FREQS, msg);
}
void
RemoteServer::msg_valuestats(const string & message)
{
if (!db)
throw_no_db();
const char *p = message.data();
const char *p_end = p + message.size();
while (p != p_end) {
Xapian::valueno slot;
decode_length(&p, p_end, slot);
string message_out;
message_out += encode_length(db->get_value_freq(slot));
string bound = db->get_value_lower_bound(slot);
message_out += encode_length(bound.size());
message_out += bound;
bound = db->get_value_upper_bound(slot);
message_out += encode_length(bound.size());
message_out += bound;
send_message(REPLY_VALUESTATS, message_out);
}
}
void
RemoteServer::msg_doclength(const string &message)
{
if (!db)
throw_no_db();
const char *p = message.data();
const char *p_end = p + message.size();
Xapian::docid did;
decode_length(&p, p_end, did);
send_message(REPLY_DOCLENGTH, encode_length(db->get_doclength(did)));
}
void
RemoteServer::msg_uniqueterms(const string &message)
{
if (!db)
throw_no_db();
const char *p = message.data();
const char *p_end = p + message.size();
Xapian::docid did;
decode_length(&p, p_end, did);
send_message(REPLY_UNIQUETERMS, encode_length(db->get_unique_terms(did)));
}
void
RemoteServer::msg_commit(const string &)
{
if (!wdb)
throw_read_only();
wdb->commit();
send_message(REPLY_DONE, string());
}
void
RemoteServer::msg_cancel(const string &)
{
if (!wdb)
throw_read_only();
// We can't call cancel since that's an internal method, but this
// has the same effect with minimal additional overhead.
wdb->begin_transaction(false);
wdb->cancel_transaction();
}
void
RemoteServer::msg_adddocument(const string & message)
{
if (!wdb)
throw_read_only();
Xapian::docid did = wdb->add_document(unserialise_document(message));
send_message(REPLY_ADDDOCUMENT, encode_length(did));
}
void
RemoteServer::msg_deletedocument(const string & message)
{
if (!wdb)
throw_read_only();
const char *p = message.data();
const char *p_end = p + message.size();
Xapian::docid did;
decode_length(&p, p_end, did);
wdb->delete_document(did);
send_message(REPLY_DONE, string());
}
void
RemoteServer::msg_deletedocumentterm(const string & message)
{
if (!wdb)
throw_read_only();
wdb->delete_document(message);
}
void
RemoteServer::msg_replacedocument(const string & message)
{
if (!wdb)
throw_read_only();
const char *p = message.data();
const char *p_end = p + message.size();
Xapian::docid did;
decode_length(&p, p_end, did);
wdb->replace_document(did, unserialise_document(string(p, p_end)));
}
void
RemoteServer::msg_replacedocumentterm(const string & message)
{
if (!wdb)
throw_read_only();
const char *p = message.data();
const char *p_end = p + message.size();
size_t len;
decode_length_and_check(&p, p_end, len);
string unique_term(p, len);
p += len;
Xapian::docid did = wdb->replace_document(unique_term, unserialise_document(string(p, p_end)));
send_message(REPLY_ADDDOCUMENT, encode_length(did));
}
void
RemoteServer::msg_getmetadata(const string & message)
{
if (!db)
throw_no_db();
send_message(REPLY_METADATA, db->get_metadata(message));
}
void
RemoteServer::msg_metadatakeylist(const string& message)
{
if (!db)
throw_no_db();
string reply;
string prev = message;
const string& prefix = message;
for (Xapian::TermIterator t = db->metadata_keys_begin(prefix);
t != db->metadata_keys_end(prefix);
++t) {
if (rare(prev.size() > 255))
prev.resize(255);
const string& term = *t;
size_t reuse = common_prefix_length(prev, term);
reply.append(1, char(reuse));
reply += encode_length(term.size() - reuse);
reply.append(term, reuse, string::npos);
prev = term;
}
send_message(REPLY_METADATAKEYLIST, reply);
}
void
RemoteServer::msg_setmetadata(const string & message)
{
if (!wdb)
throw_read_only();
const char *p = message.data();
const char *p_end = p + message.size();
size_t keylen;
decode_length_and_check(&p, p_end, keylen);
string key(p, keylen);
p += keylen;
string val(p, p_end - p);
wdb->set_metadata(key, val);
}
void
RemoteServer::msg_addspelling(const string & message)
{
if (!wdb)
throw_read_only();
const char *p = message.data();
const char *p_end = p + message.size();
Xapian::termcount freqinc;
decode_length(&p, p_end, freqinc);
wdb->add_spelling(string(p, p_end - p), freqinc);
}
void
RemoteServer::msg_removespelling(const string & message)
{
if (!wdb)
throw_read_only();
const char *p = message.data();
const char *p_end = p + message.size();
Xapian::termcount freqdec;
decode_length(&p, p_end, freqdec);
auto result = wdb->remove_spelling(string(p, p_end - p), freqdec);
send_message(REPLY_REMOVESPELLING, encode_length(result));
}
void
RemoteServer::select_db(const std::vector<std::string> &dbpaths_, bool writable_, int flags) {
if (writable_) {
AssertEq(dbpaths_.size(), 1); // Expecting exactly one database.
Xapian::WritableDatabase * wdb_ = new Xapian::WritableDatabase(dbpaths_[0], flags);
context = dbpaths_[0];
delete db;
db = wdb_;
wdb = wdb_;
} else {
Assert(!dbpaths_.empty()); // Expecting at least one database.
Xapian::Database * db_;
if (dbpaths_.size() == 1) {
db_ = new Xapian::Database(dbpaths_[0], flags);
context = dbpaths_[0];
} else {
db_ = new Xapian::Database();
// Build a better description than Database::get_description() gives
// in the variable context. FIXME: improve Database::get_description()
// and then just use that instead.
context.clear();
for (auto& path : dbpaths_) {
db->add_database(Xapian::Database(path, flags));
context += ' ';
context += path;
}
}
delete db;
db = db_;
wdb = nullptr;
}
dbpaths = dbpaths_;
}
|
ShafranEugene/irida-telegrambot | src/main/java/com/github/iridatelegrambot/service/buttons/InlineAdminButtonServiceImpl.java | package com.github.iridatelegrambot.service.buttons;
import com.github.iridatelegrambot.entity.UserTelegram;
import com.github.iridatelegrambot.service.UserTelegramService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.telegram.telegrambots.meta.api.objects.replykeyboard.InlineKeyboardMarkup;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import static com.github.iridatelegrambot.command.CallbackCommand.CallbackCommandName.ADD_STATUS_USER;
import static com.github.iridatelegrambot.command.CallbackCommand.CallbackCommandName.ADMIN_MENU_SET_STATUS;
@Service
public class InlineAdminButtonServiceImpl implements InlineAdminButtonService {
private final InlineKeyboardService inlineKeyboardService;
private final UserTelegramService userTelegramService;
@Autowired
public InlineAdminButtonServiceImpl(InlineKeyboardService inlineKeyboardService, UserTelegramService userTelegramService) {
this.inlineKeyboardService = inlineKeyboardService;
this.userTelegramService = userTelegramService;
}
@Override
public InlineKeyboardMarkup inviteForAdmin(Long chatIdUser){
Map<String,String> buttonsMap = new HashMap<>();
buttonsMap.put("Дать доступ",ADD_STATUS_USER.getNameForService() + "chatId:" + chatIdUser + ":true");
buttonsMap.put("Отклонить",ADD_STATUS_USER.getNameForService() + "chatId:" + chatIdUser + ":false");
return inlineKeyboardService.createMenu(buttonsMap);
}
@Override
public Optional<InlineKeyboardMarkup> showAllUsersForSetStatus(boolean status) {
Map<String, String> buttonsMap = new HashMap<>();
for (UserTelegram userTelegram : userTelegramService.getAllUser()) {
if (status) {
if (!userTelegram.isActive()) {
buttonsMap.put(userTelegram.getUserName(), ADMIN_MENU_SET_STATUS.getNameForService() + "setActive:" +
userTelegram.getChatId() + ":true");
}
} else {
if (userTelegram.isActive()) {
buttonsMap.put(userTelegram.getUserName(), ADMIN_MENU_SET_STATUS.getNameForService() + "setActive:" +
userTelegram.getChatId() + ":false");
}
}
}
if(buttonsMap.size() == 0){
return Optional.empty();
}
return Optional.of(inlineKeyboardService.createMenu(buttonsMap));
}
@Override
public InlineKeyboardMarkup showAllUsersForSetAdmin(){
Map<String,String> buttonsMap = new HashMap<>();
for(UserTelegram userTelegram : userTelegramService.getAllUser()){
buttonsMap.put(userTelegram.getUserName(),ADMIN_MENU_SET_STATUS.getNameForService() + "setAdmin:" +
userTelegram.getChatId() + ":true");
}
return inlineKeyboardService.createMenu(buttonsMap);
}
}
|
SUNET/eduid-automated-tests | src/main/java/se/sunet/eduid/dashboard/PhoneNumber.java | <reponame>SUNET/eduid-automated-tests
package se.sunet.eduid.dashboard;
import org.testng.Assert;
import se.sunet.eduid.utils.Common;
import se.sunet.eduid.utils.TestData;
public class PhoneNumber {
private final Common common;
private final TestData testData;
public PhoneNumber(Common common, TestData testData){
this.common = common;
this.testData = testData;
}
public void runPhoneNumber(){
verifyLabelsSwedish();
verifyLabelsEnglish();
}
private void verifyLabelsSwedish() {
//Heading
common.verifyStringOnPage("Telefonnummer");
//Text
common.verifyStringOnPage( "Du kan koppla ett " +
"eller flera av dina mobiltelefonnummer till ditt eduID-konto och därefter välja vilket av dem som ska vara primär.");
//Add more phone numbers
common.verifyStringOnPage( "+ lägg till fler");
}
private void verifyLabelsEnglish() {
//Click on English
common.findWebElementByXpath("//*[@id=\"language-selector\"]/p[1]/a").click();
//Heading
common.verifyStringOnPage("Mobile phone numbers");
//Text
common.verifyStringOnPage("You can connect " +
"one or more mobile phone numbers to your eduID, but one has to be set as the primary one.");
//Add more phone numbers
common.verifyStringOnPage("+ add more");
//Click on Swedish
common.findWebElementByXpath("//*[@id=\"language-selector\"]/p[2]/a").click();
}
public void addPhoneNumber(){
//Press settings
common.findWebElementByXpath("//*[@id=\"dashboard-nav\"]/ul/a[3]/li/span").click();
//TODO temp fix to get swedish language
if(common.findWebElementByXpath("//div/footer/nav/ul/li[2]").getText().contains("Svenska"))
common.findWebElementByLinkText("Svenska").click();
//Click add phone number button
common.findWebElementByXpath("//div/section[2]/div[2]/div/div/div[2]/div[2]/button").click();
//Check message when phone number is not valid
checkMessageFaultyNumber();
//Enter phone number
common.findWebElementById("number").clear();
common.findWebElementById("number").sendKeys(testData.getPhoneNumber());
//Click Add
common.findWebElementById("mobile-button").click();
//Check status info
common.verifyStatusMessage("Telefonnummer sparades");
}
public void confirmNewPhoneNumber(){
//Add cookie for back doors
common.addMagicCookie();
//Back door can not handle phone number with +, replacing it.
String phoneNumber = testData.getPhoneNumber();
Common.log.info("Adding phone number: " +phoneNumber);
if(phoneNumber.contains("+")) {
phoneNumber = phoneNumber.replace("+", "%2b");
}
else if(phoneNumber.startsWith("070")) {
phoneNumber = phoneNumber.replace("070", "%2b4670");
}
//Fetch eppen - click on advanced settings
common.findWebElementByXpath("//*[@id=\"dashboard-nav\"]/ul/a[4]/li/span").click();
//Store eppen
String eppen = common.findWebElementByXpath("//*[@id=\"uniqueId-container\"]/div[2]/p[1]").getText();
//Fetch the code
common.navigateToUrl("https://dashboard.dev.eduid.se/services/phone/get-code?eppn=" +eppen +"&phone=" +phoneNumber);
Common.log.info("Fetching phone code: " +"https://dashboard.dev.eduid.se/services/phone/get-code?eppn=" +eppen +"&phone=" +phoneNumber);
String phoneCode = common.findWebElementByXpath("/html/body").getText();
if(phoneCode.contains("Bad Request"))
Assert.fail("Got Bad request instead of a phone code");
else
Common.log.info("Phone code: " +phoneCode);
//Navigate back to settings page
common.navigateToUrl("https://dashboard.dev.eduid.se/profile/settings/personaldata");
//Press confirm phone number link
common.timeoutSeconds(2);
common.findWebElementByXpathContainingText("Bekräfta").click();
//Enter the code
common.findWebElementById("phoneConfirmDialogControl").clear();
common.findWebElementById("phoneConfirmDialogControl").sendKeys(phoneCode);
//Press OK button
common.findWebElementByXpath("//*[@id=\"confirm-user-data-modal\"]/div/div[3]/button[1]/span").click();
//Check status info
common.explicitWaitVisibilityElement("//*[@id=\"panel\"]/div[1]/div/span");
//TODO temp fix to get swedish language
if(common.findWebElementByXpath("//div/footer/nav/ul/li[2]").getText().contains("Svenska"))
common.findWebElementByLinkText("Svenska").click();
//Verify status message
common.verifyStatusMessage("Telefonnummer har bekräftats");
//Hide the info message
common.findWebElementByXpath("//*[@id=\"panel\"]/div[1]/div/button/span").click();
}
private void checkMessageFaultyNumber(){
//Enter a phone number on incorrect format to check message
common.findWebElementById("number").clear();
common.findWebElementById("number").sendKeys("1223456789");
//Click outside to trigger validation of phone number
common.findWebElementByXpath("//div/section[2]/div[2]/div/div").click();
common.verifyStringByXpath("//div/section[2]/div[2]/div/div/div[2]/div[2]/div[2]/form/fieldset/div/small/span/span",
"Ogiltigt telefonnummer. Skriv ett svensk nummer eller ett internationellt nummer " +
"som börjar med '+' följt av 6-20 siffror.");
}
} |
opengauss-mirror/Yat | openGaussBase/testcase/SECURITY/MASKING/Opengauss_Function_Security_Masking_Case0062.py | <reponame>opengauss-mirror/Yat
"""
Copyright (c) 2022 Huawei Technologies Co.,Ltd.
openGauss is licensed under Mulan PSL v2.
You can use this software according to the terms and conditions of the Mulan PSL v2.
You may obtain a copy of Mulan PSL v2 at:
http://license.coscl.org.cn/MulanPSL2
THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
See the Mulan PSL v2 for more details.
"""
"""
Case Type : security_masking
Case Name : 最多支持创建98个动态数据脱敏策略
Description :
1.poladmin用户创建表,并赋予表的所有操作权限给用户user001
2.poladmin用户将敏感字段加到资源标签
3.poladmin用户创建脱敏策略99个
4.清理资源:删除资源标签,删除脱敏策略,删除表,删除用户,关闭安全策略开关
Expect :
1.创表成功,赋权成功
2.资源标签创建成功:CREATE RESOURCE LABEL
3.第98个脱敏策略创建成功,第99个创建失败
4.清理资源成功
History :
"""
import unittest
from yat.test import Node
from yat.test import macro
from testcase.utils.Common import Common
from testcase.utils.CommonSH import CommonSH
from testcase.utils.Constant import Constant
from testcase.utils.Logger import Logger
class Security(unittest.TestCase):
def setUp(self):
self.logger = Logger()
text = '---Opengauss_Function_Security_Masking_Case0062 start---'
self.logger.info(text)
self.userNode = Node('PrimaryDbUser')
self.sh_primy = CommonSH('PrimaryDbUser')
self.common = Common()
self.constant = Constant()
self.poladmin = 'poladmin_security_masking_0062'
self.com_user = 'com_security_masking_0062'
self.resource_label = 'rl_security_masking_0062'
self.masking_policy = 'mp_security_masking_0062'
self.table = 'table_security_masking_0062'
self.logger.info(
'-----检查参数,修改配置:enable_security_policy=on-----')
self.config_item = 'enable_security_policy'
self.sql_cmd = f'show {self.config_item};'
check_res = self.sh_primy.execut_db_sql(self.sql_cmd)
self.logger.info(check_res)
self.check_default = check_res.splitlines()[-2].strip()
if 'on' != self.check_default:
result = self.sh_primy.execute_gsguc('reload',
self.constant.GSGUC_SUCCESS_MSG,
f'{self.config_item}=on')
self.assertTrue(result, '参数修改失败')
msg = self.sh_primy.execut_db_sql(self.sql_cmd)
self.logger.info(msg)
self.common.equal_sql_mdg(msg, f'{self.config_item}', 'on',
'(1 row)', flag='1')
def test_masking(self):
text = '---step1.1:创建poladmin及普通用户 expect:创建成功,权限赋予成功---'
self.logger.info(text)
sql_cmd1 = f'drop owned by {self.poladmin} cascade;' \
f'drop user if exists {self.poladmin};' \
f'drop user if exists {self.com_user};' \
f'create user {self.poladmin} with POLADMIN ' \
f'password \'{macro.<PASSWORD>}\';' \
f'create user {self.com_user} with password ' \
f'\'{macro.COMMON_PASSWD}\';' \
f'grant all privileges to {self.com_user};'
msg1 = self.sh_primy.execut_db_sql(sql_cmd1)
self.logger.info(msg1)
assert_1 = msg1.count(self.constant.CREATE_ROLE_SUCCESS_MSG) == 2
assert_2 = self.constant.ALTER_ROLE_SUCCESS_MSG in msg1
self.assertTrue(assert_1 and assert_2, '执行失败:' + text)
for i in range(99):
text = '---step1.2:poladmin用户创建表 expect:创表成功---'
self.logger.info(text)
table = '_'.join([self.table, str(i)])
sql_cmd4 = f'''drop table if exists {table};
create table {table}(id int,name char(10),
creditcard varchar(25),address varchar(60))
with (orientation =column);
insert into {table} values
(1,'张三','3214-5260-0070-1456-226',
'Shanxi,Xian,yuhuazhai'),
(2,'李四','5677-5260-7655-1456-776',
'Shanxi,hanzhong,yangxian');'''
excute_cmd4 = f'source {macro.DB_ENV_PATH};' \
f'gsql -d {self.userNode.db_name} ' \
f'-p {self.userNode.db_port} ' \
f'-U {self.poladmin} ' \
f'-W {macro.COMMON_PASSWD} ' \
f'-c "{sql_cmd4}"'
self.logger.info(excute_cmd4)
msg4 = self.userNode.sh(excute_cmd4).result()
self.logger.info(msg4)
assert_3 = self.constant.CREATE_TABLE_SUCCESS in msg4
assert_4 = self.constant.INSERT_SUCCESS_MSG in msg4
self.assertTrue(assert_3 and assert_4, '执行失败:' + text)
text = '---step2: poladmin用户将敏感字段加到资源标签,' \
'except: 资源标签创建成功---'
self.logger.info(text)
resource_label = '_'.join([self.resource_label, str(i)])
sql_cmd5 = f'drop resource label if exists {resource_label};' \
f'create resource label {resource_label} ' \
f'add column({table}.creditcard);'
excute_cmd5 = f'source {macro.DB_ENV_PATH};' \
f'gsql -d {self.userNode.db_name} ' \
f'-p {self.userNode.db_port} ' \
f'-U {self.poladmin} ' \
f'-W {macro.COMMON_PASSWD} ' \
f'-c "{sql_cmd5}"'
self.logger.info(excute_cmd5)
msg5 = self.userNode.sh(excute_cmd5).result()
self.logger.info(msg5)
assert_5 = self.constant.resource_label_create_success_msg in msg5
self.assertTrue(assert_5, '执行失败:' + text)
text = '---step3:poladmin用户创建脱敏策略99个,' \
'except: 前98个脱敏策略创建成功,第99个创建失败---'
self.logger.info(text)
masking_policy = '_'.join([self.masking_policy, str(i)])
sql_cmd6 = f'drop masking policy if exists {masking_policy};' \
f'create masking policy {masking_policy} ' \
f'maskall on label({resource_label}) ' \
f'filter on roles({self.com_user});'
excute_cmd6 = f'source {macro.DB_ENV_PATH};' \
f'gsql -d {self.userNode.db_name} ' \
f'-p {self.userNode.db_port} ' \
f'-U {self.poladmin} ' \
f'-W {macro.COMMON_PASSWD} ' \
f'-c "{sql_cmd6}"'
self.logger.info(excute_cmd6)
msg6 = self.userNode.sh(excute_cmd6).result()
self.logger.info(msg6)
if i < 98:
assert_6 = self.constant.masking_policy_create_success_msg \
in msg6
else:
assert_6 = 'Too many policies, adding new policiy is ' \
'restricted' in msg6
self.assertTrue(assert_6, '执行失败:' + text)
def tearDown(self):
self.logger.info('-------step4: 清理资源 expect:清理资源成功-------')
for i in range(99):
resource_label = '_'.join([self.resource_label, str(i)])
masking_policy = '_'.join([self.masking_policy, str(i)])
sql_cmd1 = f'drop masking policy {masking_policy};' \
f'drop resource label {resource_label};'
excute_cmd1 = f'source {macro.DB_ENV_PATH};' \
f'gsql -d {self.userNode.db_name} ' \
f'-p {self.userNode.db_port} ' \
f'-U {self.poladmin} ' \
f'-W {macro.COMMON_PASSWD} ' \
f'-c "{sql_cmd1}"'
self.logger.info(excute_cmd1)
msg1 = self.userNode.sh(excute_cmd1).result()
self.logger.info(msg1)
sql_cmd2 = f'select * from GS_MASKING_POLICY_ACTIONS ' \
f'where actlabelname like \'{self.resource_label}%\';' \
f'select * from gs_policy_label where labelname ' \
f'like \'{self.resource_label}%\';'
excute_cmd2 = f'source {macro.DB_ENV_PATH};' \
f'gsql -d {self.userNode.db_name} ' \
f'-p {self.userNode.db_port} ' \
f'-U {self.poladmin} ' \
f'-W {macro.COMMON_PASSWD} ' \
f'-c "{sql_cmd2}"'
self.logger.info(excute_cmd2)
msg2 = self.userNode.sh(excute_cmd2).result()
self.logger.info(msg2)
sql_cmd3 = f'drop owned by {self.poladmin} cascade;' \
f'drop user {self.com_user};' \
f'drop user {self.poladmin};'
self.logger.info(sql_cmd3)
msg3 = self.sh_primy.execut_db_sql(sql_cmd3)
self.logger.info(msg3)
self.logger.info(
'----检查参数,恢复默认配置:enable_security_policy----')
check_res = self.sh_primy.execut_db_sql(self.sql_cmd)
self.logger.info(check_res)
current = check_res.splitlines()[-2].strip()
if self.check_default != current:
result = self.sh_primy.execute_gsguc('reload',
self.constant.GSGUC_SUCCESS_MSG,
f'{self.config_item}={self.check_default}')
self.assertTrue(result, '参数恢复失败')
msg4 = self.sh_primy.execut_db_sql(self.sql_cmd)
self.logger.info(msg4)
self.assertTrue(msg2.count('(0 rows)') == 2)
self.assertIn(self.constant.DROP_OWNED_SUCCESS,
msg3.splitlines()[0].strip())
self.assertTrue(msg3.count(self.constant.DROP_ROLE_SUCCESS_MSG) == 2)
self.common.equal_sql_mdg(msg4, f'{self.config_item}',
f'{self.check_default}', '(1 row)', flag='1')
self.logger.info(
'---Opengauss_Function_Security_Masking_Case0062 finish---') |
vol4tim/DAO-IPCI | dapp/src/shared/dao/market/containers/main.js | <reponame>vol4tim/DAO-IPCI<gh_stars>10-100
import React, { Component } from 'react'
import { connect } from 'react-redux'
import { bindActionCreators } from 'redux'
import _ from 'lodash'
import { Main } from '../components'
import Spin from '../../../../shared/components/common/spin'
import { loadModule, approveLot, dealLot, removeLot } from '../../../../modules/market/actions';
class Container extends Component {
componentWillMount() {
this.props.loadModule(this.props.address)
}
render() {
if (this.props.isModule) {
return <Main {...this.props} />
}
return <Spin />
}
}
function mapStateToProps(store, props) {
const module = _.find(store.market.modules, ['address', props.address])
let lots = [];
if (!_.isEmpty(module)) {
lots = [...module.lots];
if (!_.isEmpty(store.market.search.sale)) {
lots = _.filter(lots, {
sale_address: store.market.search.sale
});
}
if (!_.isEmpty(store.market.search.buy)) {
lots = _.filter(lots, {
buy_address: store.market.search.buy
});
}
}
return {
...module,
address: props.address,
lots,
search: store.market.search,
role: store.app.role,
isModule: !_.isEmpty(module)
}
}
function mapDispatchToProps(dispatch, props) {
const actions = bindActionCreators({
loadModule, approveLot, dealLot, removeLot
}, dispatch)
return {
loadModule: actions.loadModule,
approveLot: (lot, token, value) => actions.approveLot(props.address, lot, token, value),
dealLot: address => actions.dealLot(props.address, address),
removeLot: address => actions.removeLot(props.address, address),
}
}
export default connect(mapStateToProps, mapDispatchToProps)(Container)
|
lysyjakk/neuralchess | src/gameManager.cc | #include "../inc/gameManager.hh"
#define COORD_TO_BIT_POS(x , y) (x * 8 + y)
void GameManager::start_new_game()
{
TRACE_INFO("Initialization new game...");
TRACE_INFO("Initialization bitboards...");
m_black_board.king = Bitboard(0x1000000000000000ULL);
m_black_board.queens = Bitboard(0x0800000000000000ULL);
m_black_board.rooks = Bitboard(0x8100000000000000ULL);
m_black_board.knights = Bitboard(0x4200000000000000ULL);
m_black_board.bishops = Bitboard(0x2400000000000000ULL);
m_black_board.pawns = Bitboard(0x00ff000000000000ULL);
m_white_board.king = Bitboard(0x0000000000000010ULL);
m_white_board.queens = Bitboard(0x0000000000000008ULL);
m_white_board.rooks = Bitboard(0x0000000000000081ULL);
m_white_board.knights = Bitboard(0x0000000000000042ULL);
m_white_board.bishops = Bitboard(0x0000000000000024ULL);
m_white_board.pawns = Bitboard(0x000000000000ff00ULL);
TRACE_INFO("Initialization bitboards done");
TRACE_INFO("Setting necessary moves varables...");
m_bot = NegaMax();
m_move_checker = MoveLookup();
m_move_checker.init();
m_player_turn = Site::WHITE;
m_special_moves.en_passant = ENPASS_NO_SQ;
m_special_moves.castle = BLACK_KING_SIDE | BLACK_QUEEN_SIDE |
WHITE_KING_SIDE | WHITE_QUEEN_SIDE;
m_special_moves.w_double_mv_pawn = Bitboard(Mask["FIELD_2"]);
m_special_moves.b_double_mv_pawn = Bitboard(Mask["FIELD_7"]);
//Clear GUI board
for (int itr = 0; itr < MAX_BOARD_SQ; ++itr)
{
m_pieces_pos[itr] = NONE;
}
__set_up_GUI_board(m_white_board, Site::WHITE);
__set_up_GUI_board(m_black_board, Site::BLACK);
TRACE_INFO("Setting necessary moves varables done");
TRACE_INFO("Initialization new game done");
return;
}
void GameManager::move_piece(uint8_t x_src,
uint8_t y_src,
uint8_t x_dest,
uint8_t y_dest)
{
std::size_t sq_src = COORD_TO_BIT_POS(y_src, x_src);
std::size_t sq_dest = COORD_TO_BIT_POS(y_dest, x_dest);
if (m_player_turn == Site::WHITE)
{
MoveToMake move;
move.sq_src = sq_src;
move.sq_dest = sq_dest;
move.piece_type = m_pieces_pos[sq_src];
move.site = Site::WHITE;
move.special_mv = m_special_moves;
if ( m_move_checker.is_move_valid(move, m_white_board, m_black_board) )
{
m_move_checker.make_move(&move, &m_white_board, &m_black_board);
m_special_moves = move.special_mv;
change_player_turn(m_player_turn);
//Clear GUI board
for (int itr = 0; itr < MAX_BOARD_SQ; ++itr)
{
m_pieces_pos[itr] = NONE;
}
__set_up_GUI_board(m_white_board, Site::WHITE);
__set_up_GUI_board(m_black_board, Site::BLACK);
}
}
else
{
MoveToMake move;
BestMove best_move;
best_move = m_bot.get_best_move(m_white_board,
m_black_board,
m_player_turn,
m_special_moves);
move.sq_src = std::get<0>(best_move);
move.sq_dest = std::get<1>(best_move);
move.piece_type = m_pieces_pos[std::get<0>(best_move)];
move.site = Site::BLACK;
move.special_mv = m_special_moves;
m_move_checker.make_move(&move, &m_black_board, &m_white_board);
m_special_moves = move.special_mv;
change_player_turn(m_player_turn);
//Clear GUI board
for (int itr = 0; itr < MAX_BOARD_SQ; ++itr)
{
m_pieces_pos[itr] = NONE;
}
__set_up_GUI_board(m_white_board, Site::WHITE);
__set_up_GUI_board(m_black_board, Site::BLACK);
}
return;
}
BitBoardToGUI GameManager::get_board() const
{
return m_pieces_pos;
}
void GameManager::__set_up_GUI_board(ChessBoard board, Site site)
{
//Set up all white pieces
int index = 1;
for (Bitboard* element = get_begin(&board);
element < get_end(&board);
++element)
{
switch (index)
{
case 1: //Pawn
{
std::vector< std::size_t > sq_vector = element -> scan_for_bit_index();
for (auto sq : sq_vector)
{
m_pieces_pos[sq] = site == Site::BLACK ? BLACK_PAWN
: WHITE_PAWN;
}
}
break;
case 2: //Rook
{
std::vector< std::size_t > sq_vector = element -> scan_for_bit_index();
for (auto sq : sq_vector)
{
m_pieces_pos[sq] = site == Site::BLACK ? BLACK_ROOK
: WHITE_ROOK;
}
}
break;
case 3: //Knight
{
std::vector< std::size_t > sq_vector = element -> scan_for_bit_index();
for (auto sq : sq_vector)
{
m_pieces_pos[sq] = site == Site::BLACK ? BLACK_KNIGHT
: WHITE_KNIGHT;
}
}
break;
case 4: //Bishop
{
std::vector< std::size_t > sq_vector = element -> scan_for_bit_index();
for (auto sq : sq_vector)
{
m_pieces_pos[sq] = site == Site::BLACK ? BLACK_BISHOP
: WHITE_BISHOP;
}
}
break;
case 5: //Queen
{
std::vector< std::size_t > sq_vector = element -> scan_for_bit_index();
for (auto sq : sq_vector)
{
m_pieces_pos[sq] = site == Site::BLACK ? BLACK_QUEEN
: WHITE_QUEEN;
}
}
break;
case 6: //King
{
std::vector< std::size_t > sq_vector = element -> scan_for_bit_index();
for (auto sq : sq_vector)
{
m_pieces_pos[sq] = site == Site::BLACK ? BLACK_KING
: WHITE_KING;
}
}
break;
default:
break;
}
++index;
}
} |
yashlad27/codeforce-codechef-problems | codeforces/Probems/61A.cpp | <reponame>yashlad27/codeforce-codechef-problems<gh_stars>0
#include <iostream>
#include <cstring>
#include <algorithm>
using namespace std;
int main() {
// ultra - fast mathematician: 61A
string n1, n2;
cin>>n1>>n2;
//string res;
for(int i = 0; i<n1.length(); i++)
{
if(n1[i]==n2[i])
{
n1[i]='0';
}
else{
n1[i]='1';
}
}
cout<<n1<<endl;
return 0;
}
|
isaacbrodsky/freeze | freeze/src/main/java/com/isaacbrodsky/freeze/elements/Door.java | /**
*
*/
package com.isaacbrodsky.freeze.elements;
import com.isaacbrodsky.freeze.elements.data.InteractionRule;
import com.isaacbrodsky.freeze.elements.data.InteractionRulesSet;
import com.isaacbrodsky.freeze.elements.data.SaveData;
import com.isaacbrodsky.freeze.elements.oop.Message;
import com.isaacbrodsky.freeze.game.Board;
import com.isaacbrodsky.freeze.game.GameController;
import com.isaacbrodsky.freeze.graphics.ElementColoring;
/**
* @author isaac
*
*/
public class Door extends AbstractElement {
private ElementColoring color;
private boolean pickupable;
public void createInstance(SaveData dat) {
this.color = new ElementColoring(dat.getColor(),
ElementColoring.ColorMode.CODOMINANT);
}
public SaveData getSaveData() {
return new SaveData(0x09, color);
}
/*
* (non-Javadoc)
*
* @see com.isaacbrodsky.freeze.elements.Element#getColoring()
*/
@Override
public ElementColoring getColoring() {
return color;
}
/*
* (non-Javadoc)
*
* @see com.isaacbrodsky.freeze.elements.Element#getCycle()
*/
@Override
public int getCycle() {
return 0;
}
/*
* (non-Javadoc)
*
* @see com.isaacbrodsky.freeze.elements.Element#getDisplayCharacter()
*/
@Override
public int getDisplayCharacter() {
return 10;
}
/*
* (non-Javadoc)
*
* @see com.isaacbrodsky.freeze.elements.Element#getInteractionsRules()
*/
@Override
public InteractionRulesSet getInteractionsRules() {
if (pickupable) {
return new InteractionRulesSet(InteractionRule.ITEM);
} else {
return new InteractionRulesSet();
}
}
/*
* (non-Javadoc)
*
* @see
* com.isaacbrodsky.freeze.elements.Element#message(com.isaacbrodsky.freeze
* .game.GameController, com.isaacbrodsky.freeze.game.Board,
* com.isaacbrodsky.freeze.elements.oop.Message)
*/
@Override
public void message(GameController game, Board board, Message msg) {
tick(game, board);
if (msg.isTouch()) {
int code = color.getBackCode();
if (code < 8)
code += 8;
if (!pickupable) {
game.setMessage("The " + ElementColoring.nameFromCode(code)
+ " door is locked!");
} else {
game.setMessage("The " + ElementColoring.nameFromCode(code)
+ " door is now open.");
toggleMe(game);
}
}
}
/**
* Toggle the associated status counter
*/
private void toggleMe(GameController game) {
if (color.getBackCode() == 0) {
game.getState().gems = game.getState().gems ^ 256;
} else {
int code = color.getBackCode() - 1;
if (code > 6)
code -= 8;
game.getState().keys[code] = 0;
}
}
private boolean getPickupable(GameController game) {
if (color.getBackCode() == 0) {
return ((game.getState().gems & 256) == 256);
} else {
int code = color.getBackCode() - 1;
if (code > 6)
code -= 8;
return (game.getState().keys[code] != 0);
}
}
/*
* (non-Javadoc)
*
* @see
* com.isaacbrodsky.freeze.elements.Element#tick(com.isaacbrodsky.freeze
* .game.GameController, com.isaacbrodsky.freeze.game.Board)
*/
@Override
public void tick(GameController game, Board board) {
pickupable = getPickupable(game);
}
}
|
AnupamaGupta01/kudu-1 | src/kudu/rpc/messenger.cc | <filename>src/kudu/rpc/messenger.cc<gh_stars>1-10
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
#include "kudu/rpc/messenger.h"
#include <arpa/inet.h>
#include <sys/socket.h>
#include <sys/types.h>
#include <unistd.h>
#include <gflags/gflags.h>
#include <glog/logging.h>
#include <list>
#include <mutex>
#include <set>
#include <string>
#include "kudu/gutil/gscoped_ptr.h"
#include "kudu/gutil/map-util.h"
#include "kudu/gutil/stl_util.h"
#include "kudu/gutil/strings/substitute.h"
#include "kudu/rpc/acceptor_pool.h"
#include "kudu/rpc/connection.h"
#include "kudu/rpc/constants.h"
#include "kudu/rpc/reactor.h"
#include "kudu/rpc/rpc_header.pb.h"
#include "kudu/rpc/rpc_service.h"
#include "kudu/rpc/rpcz_store.h"
#include "kudu/rpc/sasl_common.h"
#include "kudu/rpc/transfer.h"
#include "kudu/util/errno.h"
#include "kudu/util/flag_tags.h"
#include "kudu/util/metrics.h"
#include "kudu/util/monotime.h"
#include "kudu/util/net/socket.h"
#include "kudu/util/status.h"
#include "kudu/util/threadpool.h"
#include "kudu/util/trace.h"
using std::string;
using std::shared_ptr;
using strings::Substitute;
DEFINE_int32(rpc_default_keepalive_time_ms, 65000,
"If an RPC connection from a client is idle for this amount of time, the server "
"will disconnect the client.");
TAG_FLAG(rpc_default_keepalive_time_ms, advanced);
namespace kudu {
namespace rpc {
class Messenger;
class ServerBuilder;
MessengerBuilder::MessengerBuilder(std::string name)
: name_(std::move(name)),
connection_keepalive_time_(
MonoDelta::FromMilliseconds(FLAGS_rpc_default_keepalive_time_ms)),
num_reactors_(4),
num_negotiation_threads_(4),
coarse_timer_granularity_(MonoDelta::FromMilliseconds(100)) {}
MessengerBuilder& MessengerBuilder::set_connection_keepalive_time(const MonoDelta &keepalive) {
connection_keepalive_time_ = keepalive;
return *this;
}
MessengerBuilder& MessengerBuilder::set_num_reactors(int num_reactors) {
num_reactors_ = num_reactors;
return *this;
}
MessengerBuilder& MessengerBuilder::set_negotiation_threads(int num_negotiation_threads) {
num_negotiation_threads_ = num_negotiation_threads;
return *this;
}
MessengerBuilder& MessengerBuilder::set_coarse_timer_granularity(const MonoDelta &granularity) {
coarse_timer_granularity_ = granularity;
return *this;
}
MessengerBuilder &MessengerBuilder::set_metric_entity(
const scoped_refptr<MetricEntity>& metric_entity) {
metric_entity_ = metric_entity;
return *this;
}
Status MessengerBuilder::Build(Messenger **msgr) {
RETURN_NOT_OK(SaslInit(kSaslAppName)); // Initialize SASL library before we start making requests
gscoped_ptr<Messenger> new_msgr(new Messenger(*this));
RETURN_NOT_OK(new_msgr.get()->Init());
*msgr = new_msgr.release();
return Status::OK();
}
Status MessengerBuilder::Build(shared_ptr<Messenger> *msgr) {
Messenger *ptr;
RETURN_NOT_OK(Build(&ptr));
// See docs on Messenger::retain_self_ for info about this odd hack.
*msgr = shared_ptr<Messenger>(
ptr, std::mem_fun(&Messenger::AllExternalReferencesDropped));
return Status::OK();
}
// See comment on Messenger::retain_self_ member.
void Messenger::AllExternalReferencesDropped() {
Shutdown();
CHECK(retain_self_.get());
// If we have no more external references, then we no longer
// need to retain ourself. We'll destruct as soon as all our
// internal-facing references are dropped (ie those from reactor
// threads).
retain_self_.reset();
}
void Messenger::Shutdown() {
// Since we're shutting down, it's OK to block.
ThreadRestrictions::ScopedAllowWait allow_wait;
std::lock_guard<percpu_rwlock> guard(lock_);
if (closing_) {
return;
}
VLOG(1) << "shutting down messenger " << name_;
closing_ = true;
DCHECK(rpc_services_.empty()) << "Unregister RPC services before shutting down Messenger";
rpc_services_.clear();
for (const shared_ptr<AcceptorPool>& acceptor_pool : acceptor_pools_) {
acceptor_pool->Shutdown();
}
acceptor_pools_.clear();
// Need to shut down negotiation pool before the reactors, since the
// reactors close the Connection sockets, and may race against the negotiation
// threads' blocking reads & writes.
negotiation_pool_->Shutdown();
for (Reactor* reactor : reactors_) {
reactor->Shutdown();
}
}
Status Messenger::AddAcceptorPool(const Sockaddr &accept_addr,
shared_ptr<AcceptorPool>* pool) {
Socket sock;
RETURN_NOT_OK(sock.Init(0));
RETURN_NOT_OK(sock.SetReuseAddr(true));
RETURN_NOT_OK(sock.Bind(accept_addr));
Sockaddr remote;
RETURN_NOT_OK(sock.GetSocketAddress(&remote));
shared_ptr<AcceptorPool> acceptor_pool(new AcceptorPool(this, &sock, remote));
std::lock_guard<percpu_rwlock> guard(lock_);
acceptor_pools_.push_back(acceptor_pool);
*pool = acceptor_pool;
return Status::OK();
}
// Register a new RpcService to handle inbound requests.
Status Messenger::RegisterService(const string& service_name,
const scoped_refptr<RpcService>& service) {
DCHECK(service);
std::lock_guard<percpu_rwlock> guard(lock_);
if (InsertIfNotPresent(&rpc_services_, service_name, service)) {
return Status::OK();
} else {
return Status::AlreadyPresent("This service is already present");
}
}
Status Messenger::UnregisterAllServices() {
std::lock_guard<percpu_rwlock> guard(lock_);
rpc_services_.clear();
return Status::OK();
}
// Unregister an RpcService.
Status Messenger::UnregisterService(const string& service_name) {
std::lock_guard<percpu_rwlock> guard(lock_);
if (rpc_services_.erase(service_name)) {
return Status::OK();
} else {
return Status::ServiceUnavailable(Substitute("service $0 not registered on $1",
service_name, name_));
}
}
void Messenger::QueueOutboundCall(const shared_ptr<OutboundCall> &call) {
Reactor *reactor = RemoteToReactor(call->conn_id().remote());
reactor->QueueOutboundCall(call);
}
void Messenger::QueueInboundCall(gscoped_ptr<InboundCall> call) {
shared_lock<rw_spinlock> guard(&lock_.get_lock());
scoped_refptr<RpcService>* service = FindOrNull(rpc_services_,
call->remote_method().service_name());
if (PREDICT_FALSE(!service)) {
Status s = Status::ServiceUnavailable(Substitute("service $0 not registered on $1",
call->remote_method().service_name(), name_));
LOG(INFO) << s.ToString();
call.release()->RespondFailure(ErrorStatusPB::ERROR_NO_SUCH_SERVICE, s);
return;
}
call->set_method_info((*service)->LookupMethod(call->remote_method()));
// The RpcService will respond to the client on success or failure.
WARN_NOT_OK((*service)->QueueInboundCall(std::move(call)), "Unable to handle RPC call");
}
void Messenger::RegisterInboundSocket(Socket *new_socket, const Sockaddr &remote) {
Reactor *reactor = RemoteToReactor(remote);
reactor->RegisterInboundSocket(new_socket, remote);
}
Messenger::Messenger(const MessengerBuilder &bld)
: name_(bld.name_),
closing_(false),
rpcz_store_(new RpczStore()),
metric_entity_(bld.metric_entity_),
retain_self_(this) {
for (int i = 0; i < bld.num_reactors_; i++) {
reactors_.push_back(new Reactor(retain_self_, i, bld));
}
CHECK_OK(ThreadPoolBuilder("negotiator")
.set_max_threads(bld.num_negotiation_threads_)
.Build(&negotiation_pool_));
}
Messenger::~Messenger() {
std::lock_guard<percpu_rwlock> guard(lock_);
CHECK(closing_) << "Should have already shut down";
STLDeleteElements(&reactors_);
}
Reactor* Messenger::RemoteToReactor(const Sockaddr &remote) {
uint32_t hashCode = remote.HashCode();
int reactor_idx = hashCode % reactors_.size();
// This is just a static partitioning; we could get a lot
// fancier with assigning Sockaddrs to Reactors.
return reactors_[reactor_idx];
}
Status Messenger::Init() {
Status status;
for (Reactor* r : reactors_) {
RETURN_NOT_OK(r->Init());
}
return Status::OK();
}
Status Messenger::DumpRunningRpcs(const DumpRunningRpcsRequestPB& req,
DumpRunningRpcsResponsePB* resp) {
shared_lock<rw_spinlock> guard(&lock_.get_lock());
for (Reactor* reactor : reactors_) {
RETURN_NOT_OK(reactor->DumpRunningRpcs(req, resp));
}
return Status::OK();
}
void Messenger::ScheduleOnReactor(const boost::function<void(const Status&)>& func,
MonoDelta when) {
DCHECK(!reactors_.empty());
// If we're already running on a reactor thread, reuse it.
Reactor* chosen = nullptr;
for (Reactor* r : reactors_) {
if (r->IsCurrentThread()) {
chosen = r;
}
}
if (chosen == nullptr) {
// Not running on a reactor thread, pick one at random.
chosen = reactors_[rand() % reactors_.size()];
}
DelayedTask* task = new DelayedTask(func, when);
chosen->ScheduleReactorTask(task);
}
const scoped_refptr<RpcService> Messenger::rpc_service(const string& service_name) const {
std::lock_guard<percpu_rwlock> guard(lock_);
scoped_refptr<RpcService> service;
if (FindCopy(rpc_services_, service_name, &service)) {
return service;
} else {
return scoped_refptr<RpcService>(nullptr);
}
}
} // namespace rpc
} // namespace kudu
|
kkcookies99/UAST | Dataset/Leetcode/valid/19/3.cpp | class Solution {
public:
ListNode* XXX(ListNode* head, int n) {
if(!head | !head -> next) return NULL;
ListNode * fast = head, *slow = head;
for(int i = 0; i < n; i++){
fast = fast -> next;
}
if(!fast){
return head -> next;
}
while(fast -> next){
fast = fast -> next;
slow = slow -> next;
}
slow -> next = slow -> next -> next;
return head;
}
};
|
tscher/PySDDP | PySDDP/newave/script/arquivos.py | <filename>PySDDP/newave/script/arquivos.py
import os
from typing import IO
from PySDDP.newave.script.templates.arquivos import ArquivosTemplate
class Arquivos(ArquivosTemplate):
def __init__(self):
super().__init__()
self.lista_entrada = list()
self._conteudo_ = None
self.dir_base = None
self._numero_registros_ = None
def ler(self, file_name: str) -> None:
"""
Implementa o método para leitura do arquivo que contem os nomes dos
arquivos de entrada e saída para a execucao do NEWAVE
:param file_name: string com o caminho completo para o arquivo
"""
# Inserir o nome do arquivo PIVO na lista de arquivos de entrada
self.lista_entrada.clear()
self.lista_entrada.append(os.path.split(file_name)[1])
self.lista_entrada.append("HIDR.DAT")
self.lista_entrada.append("VAZOES.DAT")
# noinspection PyBroadException
monitor: dict = dict()
contador = 0
try:
with open(file_name, 'r', encoding='latin-1') as f: # type: IO[str]
continua = True
contador = 1
while continua:
self.next_line(f)
linha = self.linha.strip()
nome = linha[30:].strip()
for mneumo in self.arquivos.keys():
if self.arquivos[mneumo]["ordem"] == contador:
self.arquivos[mneumo]["valor"] = nome
monitor[mneumo] = nome
# Transformo cada um das chaves do dicionario em atributos da classe
setattr(self, mneumo, monitor[mneumo])
self.lista_entrada.append(nome)
contador += 1
if contador == 39:
break
except Exception as err:
print(self.linha)
if isinstance(err, StopIteration):
# Armazeno num atributo o conteudo do arquivo, exceto os comentários
self._conteudo_ = monitor
else:
raise
self._numero_registros_ = contador - 1
self.dir_base = os.path.split(file_name)[0]
self.nome_arquivo = os.path.split(file_name)[1]
print("OK! Leitura do", os.path.split(file_name)[1], "realizada com sucesso.")
def escrever(self, file_out: str) -> None:
"""
Escreve o arquivo que contem os nomes dos
arquivos para execucao do Newave
:param file_out: caminho completo para o arquivo
"""
if not os.path.isdir(os.path.split(file_out)[0]):
os.mkdir(os.path.split(file_out)[0])
formato = "{descricao: <27}: {valor: <12}\n"
try:
with open(file_out, 'w', encoding='latin-1') as f: # type: IO[str]
# Imprime dados
for mneumo in self.arquivos.keys():
linha = dict(
descricao=self.arquivos[mneumo]["descricao"],
valor=self.arquivos[mneumo]["valor"]
)
f.write(formato.format(**linha))
except Exception:
raise
print("OK! Escrita do", os.path.split(file_out)[1], "realizada com sucesso.")
|
ddomingos-daitan/oss-models | authc-model/src/main/java/com/symphony/s2/authc/model/IMultiTenantServiceRegistry.java | <filename>authc-model/src/main/java/com/symphony/s2/authc/model/IMultiTenantServiceRegistry.java
/*
* Copyright 2020 Symphony Communication Services, LLC.
*
* All Rights Reserved
*/
package com.symphony.s2.authc.model;
import com.symphony.s2.authc.canon.IServiceInfo;
/**
* A service to look up information about multi tenant services.
*
* @author <NAME>
*
*/
public interface IMultiTenantServiceRegistry
{
/**
* Return service information for the given service.
*
* @param service The service for which info should be returned.
*
* @return Service information for the given service.
*/
IServiceInfo fetchServiceInfo(IMultiTenantService service);
}
|
NilsRenaud/vert.x | src/main/java/io/vertx/core/impl/JavaVerticleFactory.java | /*
* Copyright (c) 2011-2019 Contributors to the Eclipse Foundation
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0
* which is available at https://www.apache.org/licenses/LICENSE-2.0.
*
* SPDX-License-Identifier: EPL-2.0 OR Apache-2.0
*/
package io.vertx.core.impl;
import io.vertx.core.Promise;
import io.vertx.core.Verticle;
import io.vertx.core.impl.verticle.CompilingClassLoader;
import io.vertx.core.spi.VerticleFactory;
import java.util.concurrent.Callable;
/**
* @author <a href="http://tfox.org"><NAME></a>
*/
public class JavaVerticleFactory implements VerticleFactory {
@Override
public String prefix() {
return "java";
}
@Override
public void createVerticle(String verticleName, ClassLoader classLoader, Promise<Callable<Verticle>> promise) {
verticleName = VerticleFactory.removePrefix(verticleName);
Class<Verticle> clazz;
try {
if (verticleName.endsWith(".java")) {
CompilingClassLoader compilingLoader = new CompilingClassLoader(classLoader, verticleName);
String className = compilingLoader.resolveMainClassName();
clazz = (Class<Verticle>) compilingLoader.loadClass(className);
} else {
clazz = (Class<Verticle>) classLoader.loadClass(verticleName);
}
} catch (ClassNotFoundException e) {
promise.fail(e);
return;
}
promise.complete(clazz::newInstance);
}
}
|
cenodis/InfinityLib | src/main/java/com/infinityraider/infinitylib/modules/dualwield/MouseClickHandler.java | <filename>src/main/java/com/infinityraider/infinitylib/modules/dualwield/MouseClickHandler.java<gh_stars>0
package com.infinityraider.infinitylib.modules.dualwield;
import net.minecraft.client.Minecraft;
import net.minecraft.client.entity.EntityPlayerSP;
import net.minecraft.client.multiplayer.PlayerControllerMP;
import net.minecraft.entity.*;
import net.minecraft.item.ItemStack;
import net.minecraft.util.EnumHand;
import net.minecraft.world.GameType;
import net.minecraftforge.client.event.MouseEvent;
import net.minecraftforge.fml.common.eventhandler.Event;
import net.minecraftforge.fml.common.eventhandler.SubscribeEvent;
import net.minecraftforge.fml.relauncher.Side;
import net.minecraftforge.fml.relauncher.SideOnly;
import org.lwjgl.input.Keyboard;
@SideOnly(Side.CLIENT)
public class MouseClickHandler {
private static final MouseClickHandler INSTANCE = new MouseClickHandler();
private PlayerControllerMP playerController;
private boolean leftButtonPressed = false;
private boolean rightButtonPressed = false;
private static final int LMB = 0;
private static final int RMB = 1;
private MouseClickHandler() {}
public static MouseClickHandler getInstance() {
return INSTANCE;
}
@SubscribeEvent
@SuppressWarnings("unused")
public void onLeftClick(MouseEvent event) {
EntityPlayerSP player = Minecraft.getMinecraft().player;
ItemStack stack = player.getHeldItemOffhand();
if(event.getButton() != LMB) {
return;
}
leftButtonPressed = !leftButtonPressed;
if(stack == null) {
return;
}
if(stack.getItem() instanceof IDualWieldedWeapon) {
if(leftButtonPressed) {
boolean shift = Keyboard.isKeyDown(Keyboard.KEY_LSHIFT);
boolean ctrl = Keyboard.isKeyDown(Keyboard.KEY_LCONTROL);
IDualWieldedWeapon weapon = (IDualWieldedWeapon) stack.getItem();
attackEntity(weapon, player, stack, true, shift, ctrl, EnumHand.OFF_HAND);
weapon.onItemUsed(stack, player, shift, ctrl, EnumHand.OFF_HAND);
new MessageMouseButtonPressed(true, shift, ctrl).sendToServer();
Minecraft.getMinecraft().player.swingArm(EnumHand.OFF_HAND);
}
event.setResult(Event.Result.DENY);
event.setCanceled(true);
}
}
@SubscribeEvent
@SuppressWarnings("unused")
public void onRightClick(MouseEvent event) {
EntityPlayerSP player = Minecraft.getMinecraft().player;
ItemStack stack = player.getHeldItemMainhand();
if(event.getButton() != RMB) {
return;
}
rightButtonPressed = !rightButtonPressed;
if(stack == null) {
return;
}
if(stack.getItem() instanceof IDualWieldedWeapon) {
if(rightButtonPressed) {
boolean shift = Minecraft.getMinecraft().gameSettings.keyBindSneak.isKeyDown();
boolean ctrl = Minecraft.getMinecraft().gameSettings.keyBindSprint.isKeyDown();
IDualWieldedWeapon weapon = (IDualWieldedWeapon) stack.getItem();
attackEntity(weapon, player, stack, false, shift, ctrl, EnumHand.MAIN_HAND);
weapon.onItemUsed(stack, player, shift, ctrl, EnumHand.MAIN_HAND);
new MessageMouseButtonPressed(false, shift, ctrl).sendToServer();
Minecraft.getMinecraft().player.swingArm(EnumHand.MAIN_HAND);
}
event.setResult(Event.Result.DENY);
event.setCanceled(true);
}
}
private void attackEntity(IDualWieldedWeapon weapon, EntityPlayerSP player, ItemStack stack, boolean left, boolean shift, boolean ctrl, EnumHand hand) {
if(Minecraft.getMinecraft().objectMouseOver == null) {
return;
}
Entity target = Minecraft.getMinecraft().objectMouseOver.entityHit;
if(target != null) {
if(!weapon.onItemAttack(stack, player, target, shift, ctrl, left ? EnumHand.OFF_HAND : EnumHand.MAIN_HAND)) {
if(this.playerController == null) {
this.playerController = Minecraft.getMinecraft().playerController;
}
if(this.playerController != null) {
new MessageAttackDualWielded(target, left, shift, ctrl).sendToServer();
if(this.playerController.getCurrentGameType() != GameType.SPECTATOR) {
ModuleDualWield.getInstance().attackTargetEntityWithCurrentItem(player, target, weapon, stack, hand);
player.resetCooldown();
}
}
}
}
}
}
|
AgeOfLearning/material-design-icons | av/icon-twotone-missed-video-call-element/index.js | import styles from './template.css';
import template from './template';
import AoflElement from '@aofl/web-components/aofl-element';
/**
* @summary IconTwotoneMissedVideoCallElement
* @class IconTwotoneMissedVideoCallElement
* @extends {AoflElement}
*/
class IconTwotoneMissedVideoCallElement extends AoflElement {
/**
* Creates an instance of IconTwotoneMissedVideoCallElement.
*/
constructor() {
super();
}
/**
* @readonly
*/
static get is() {
return 'icon-twotone-missed-video-call';
}
/**
*
* @return {Object}
*/
render() {
return super.render(template, [styles]);
}
}
window.customElements.define(IconTwotoneMissedVideoCallElement.is, IconTwotoneMissedVideoCallElement);
export default IconTwotoneMissedVideoCallElement;
|
Ecotrust/COMPASS | mp/data_manager/migrations/0002_auto_20190529_1612.py | <gh_stars>1-10
# Generated by Django 2.2.1 on 2019-05-29 16:12
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('data_manager', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='dataneed',
name='themes',
field=models.ManyToManyField(blank=True, to='data_manager.Theme'),
),
migrations.AlterField(
model_name='layer',
name='attribute_fields',
field=models.ManyToManyField(blank=True, to='data_manager.AttributeInfo'),
),
migrations.AlterField(
model_name='layer',
name='lookup_table',
field=models.ManyToManyField(blank=True, to='data_manager.LookupInfo'),
),
migrations.AlterField(
model_name='layer',
name='sublayers',
field=models.ManyToManyField(blank=True, related_name='_layer_sublayers_+', to='data_manager.Layer'),
),
migrations.AlterField(
model_name='layer',
name='themes',
field=models.ManyToManyField(blank=True, to='data_manager.Theme'),
),
migrations.AlterField(
model_name='toc',
name='themes',
field=models.ManyToManyField(blank=True, to='data_manager.TOCTheme'),
),
migrations.AlterField(
model_name='tocsubtheme',
name='layers',
field=models.ManyToManyField(blank=True, to='data_manager.Layer'),
),
migrations.AlterField(
model_name='toctheme',
name='layers',
field=models.ManyToManyField(blank=True, to='data_manager.Layer'),
),
migrations.AlterField(
model_name='toctheme',
name='subthemes',
field=models.ManyToManyField(blank=True, to='data_manager.TOCSubTheme'),
),
]
|
hitjl/trino | plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/catalog/glue/TestIcebergGlueCatalogConnectorSmokeTest.java | <gh_stars>1000+
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.plugin.iceberg.catalog.glue;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.AmazonS3ClientBuilder;
import com.amazonaws.services.s3.model.DeleteObjectsRequest;
import com.amazonaws.services.s3.model.ListObjectsV2Request;
import com.amazonaws.services.s3.model.ListObjectsV2Result;
import com.amazonaws.services.s3.model.S3ObjectSummary;
import com.google.common.collect.ImmutableMap;
import io.trino.plugin.hive.metastore.glue.GlueMetastoreApiStats;
import io.trino.plugin.iceberg.BaseIcebergConnectorSmokeTest;
import io.trino.plugin.iceberg.IcebergQueryRunner;
import io.trino.plugin.iceberg.SchemaInitializer;
import io.trino.testing.QueryRunner;
import org.apache.iceberg.FileFormat;
import org.testng.annotations.AfterClass;
import org.testng.annotations.Parameters;
import org.testng.annotations.Test;
import java.util.List;
import static com.google.common.collect.ImmutableList.toImmutableList;
import static io.trino.plugin.hive.metastore.glue.AwsSdkUtil.getPaginatedResults;
import static io.trino.testing.sql.TestTable.randomTableSuffix;
import static java.lang.String.format;
import static java.util.Objects.requireNonNull;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
/*
* TestIcebergGlueCatalogConnectorSmokeTest currently uses AWS Default Credential Provider Chain,
* See https://docs.aws.amazon.com/sdk-for-java/v1/developer-guide/credentials.html#credentials-default
* on ways to set your AWS credentials which will be needed to run this test.
*/
public class TestIcebergGlueCatalogConnectorSmokeTest
extends BaseIcebergConnectorSmokeTest
{
private final String bucketName;
private final String schemaName;
@Parameters("s3.bucket")
public TestIcebergGlueCatalogConnectorSmokeTest(String bucketName)
{
super(FileFormat.PARQUET);
this.bucketName = requireNonNull(bucketName, "bucketName is null");
this.schemaName = "test_iceberg_smoke_" + randomTableSuffix();
}
@Override
protected QueryRunner createQueryRunner()
throws Exception
{
return IcebergQueryRunner.builder()
.setIcebergProperties(
ImmutableMap.of(
"iceberg.catalog.type", "glue",
"hive.metastore.glue.default-warehouse-dir", schemaPath()))
.setSchemaInitializer(
SchemaInitializer.builder()
.withClonedTpchTables(REQUIRED_TPCH_TABLES)
.withSchemaName(schemaName)
.build())
.build();
}
@AfterClass(alwaysRun = true)
public void cleanup()
{
computeActual("SHOW TABLES").getMaterializedRows()
.forEach(table -> getQueryRunner().execute("DROP TABLE " + table.getField(0)));
getQueryRunner().execute("DROP SCHEMA IF EXISTS " + schemaName);
// DROP TABLES should clean up any files, but clear the directory manually to be safe
AmazonS3 s3 = AmazonS3ClientBuilder.standard().build();
ListObjectsV2Request listObjectsRequest = new ListObjectsV2Request()
.withBucketName(bucketName)
.withPrefix(schemaPath());
List<DeleteObjectsRequest.KeyVersion> keysToDelete = getPaginatedResults(
s3::listObjectsV2,
listObjectsRequest,
ListObjectsV2Request::setContinuationToken,
ListObjectsV2Result::getNextContinuationToken,
new GlueMetastoreApiStats())
.map(ListObjectsV2Result::getObjectSummaries)
.flatMap(objectSummaries -> objectSummaries.stream().map(S3ObjectSummary::getKey))
.map(DeleteObjectsRequest.KeyVersion::new)
.collect(toImmutableList());
if (!keysToDelete.isEmpty()) {
s3.deleteObjects(new DeleteObjectsRequest(bucketName).withKeys(keysToDelete));
}
}
@Test
@Override
public void testShowCreateTable()
{
assertThat((String) computeScalar("SHOW CREATE TABLE region"))
.isEqualTo(format("" +
"CREATE TABLE iceberg.%1$s.region (\n" +
" regionkey bigint,\n" +
" name varchar,\n" +
" comment varchar\n" +
")\n" +
"WITH (\n" +
" format = 'ORC',\n" +
" format_version = 2,\n" +
" location = '%2$s/%1$s.db/region'\n" +
")",
schemaName,
schemaPath()));
}
@Test
@Override
public void testRenameSchema()
{
assertThatThrownBy(super::testRenameSchema)
.hasStackTraceContaining("renameNamespace is not supported for Iceberg Glue catalogs");
}
private String schemaPath()
{
return format("s3://%s/%s", bucketName, schemaName);
}
}
|
dsabanin/intellij-scala | scala/scala-impl/src/org/jetbrains/plugins/scala/lang/psi/impl/base/types/ScMatchTypeCasesImpl.scala | <filename>scala/scala-impl/src/org/jetbrains/plugins/scala/lang/psi/impl/base/types/ScMatchTypeCasesImpl.scala
package org.jetbrains.plugins.scala.lang.psi.impl.base.types
import com.intellij.lang.ASTNode
import org.jetbrains.plugins.scala.lang.psi.api.base.types.{ScMatchTypeCase, ScMatchTypeCases}
import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiElementImpl
class ScMatchTypeCasesImpl(node: ASTNode) extends ScalaPsiElementImpl(node) with ScMatchTypeCases {
override def firstCase: ScMatchTypeCase = findChildByClassScala(classOf[ScMatchTypeCase])
override def cases: Seq[ScMatchTypeCase] = findChildrenByClassScala(classOf[ScMatchTypeCase]).toSeq
}
|
gnafit/gna | tests/expression1/variable.py | #!/usr/bin/env python
from gna.expression import *
v0 = Variable('v0')
print( v0 )
v1 = Variable('v1', 'i')
print(v1)
v2 = Variable('v2', 'i', 'j')
print(v2)
v3 = Variable('v3', 'j', 'k')
print(v3)
v4 = Variable('v4', 'm')
print(v4)
pr1 = v2*v3
pr1.name = 'pr1'
print( 'pr1=v2*v3:', pr1, '=', pr1.estr() )
pr2 = pr1*v4
pr2.name = 'pr2'
print( 'pr2=pr1*v4:', pr2, '=', pr2.estr() )
pr3 = v4*pr1
pr3.name = 'pr3'
print( 'pr3=v4*pr1:', pr3, '=', pr3.estr() )
w1 = Variable('w1', 'k')
print(w1)
w2 = Variable('w2', 'l', 'm')
print(w2)
w3 = Variable('w3', 'm', 'n')
print(v3)
pr4=w1*w2*w3
pr4.name='pr4'
print('pr4:', pr4, '=', pr4.estr())
pr5 = pr3*pr4
pr5.name = 'pr5'
print( 'pr5=pr3*pr4:', pr5, '=', pr5.estr() )
pr6 = pr4*pr3
pr6.name = 'pr6'
print( 'pr6=pr4*pr3:', pr6, '=', pr6.estr() )
print()
pr6.name='?'
print('ident', 'pr6', pr6.ident())
print('ident full', 'pr6', pr6.ident_full())
|
mauricioklein/algorithm-exercises | challenge-24/test_solver.py | <gh_stars>1-10
import unittest
from solver import Solution
class TestSolver(unittest.TestCase):
def test_move_zeros(self):
self.assertEqual(Solution().move_zeros([ 1, 0, 3, 5, 0, 12]), [ 1, 3, 5, 12, 0, 0])
self.assertEqual(Solution().move_zeros([ 3, 0, 1, 12, 0, 5]), [ 3, 1, 12, 5, 0, 0])
self.assertEqual(Solution().move_zeros([ 1, 20, 3, 5, 8, 12]), [ 1, 20, 3, 5, 8, 12])
self.assertEqual(Solution().move_zeros([-1, 0, 2, -3, 0, 0]), [-1, 2, -3, 0, 0, 0])
self.assertEqual(Solution().move_zeros([ 0, 0, 0, 0 ]), [ 0, 0, 0, 0 ])
self.assertEqual(Solution().move_zeros([ ]), [ ])
if __name__ == "__main__":
unittest.main()
|
sirinath/daffodil | daffodil-test/src/test/scala-new/edu/illinois/ncsa/daffodil/usertests/TestUserSubmittedTests.scala | <reponame>sirinath/daffodil<filename>daffodil-test/src/test/scala-new/edu/illinois/ncsa/daffodil/usertests/TestUserSubmittedTests.scala
package edu.illinois.ncsa.daffodil.usertests
/* Copyright (c) 2012-2013 Tresys Technology, LLC. All rights reserved.
*
* Developed by: Tresys Technology, LLC
* http://www.tresys.com
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal with
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
* of the Software, and to permit persons to whom the Software is furnished to do
* so, subject to the following conditions:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimers.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimers in the
* documentation and/or other materials provided with the distribution.
*
* 3. Neither the names of Tresys Technology, nor the names of its contributors
* may be used to endorse or promote products derived from this Software
* without specific prior written permission.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* CONTRIBUTORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS WITH THE
* SOFTWARE.
*/
import junit.framework.Assert._
import org.junit.Test
import scala.xml._
import edu.illinois.ncsa.daffodil.xml.XMLUtils
import edu.illinois.ncsa.daffodil.xml.XMLUtils._
import edu.illinois.ncsa.daffodil.compiler.Compiler
import edu.illinois.ncsa.daffodil.util._
import edu.illinois.ncsa.daffodil.tdml.DFDLTestSuite
import java.io.File
import edu.illinois.ncsa.daffodil.debugger.Debugger
import edu.illinois.ncsa.daffodil.debugger.InteractiveDebugger
import edu.illinois.ncsa.daffodil.japi.debugger.TraceRunner
import edu.illinois.ncsa.daffodil.japi.debugger.JavaInteractiveDebuggerRunner
import edu.illinois.ncsa.daffodil.debugger.InteractiveDebuggerRunner
import edu.illinois.ncsa.daffodil.japi.debugger.DebuggerRunner
class TestUserSubmittedTestsNew {
val testDir = "/edu/illinois/ncsa/daffodil/usertests/"
val aa = testDir + "UserSubmittedTests.tdml"
lazy val runner = new DFDLTestSuite(Misc.getRequiredResource(aa))
@Test def test_dfdl_782() = {
val tr = new CustomTraceRunner
tr.init
val crunner = new CustomInteractiveDebuggerRunner(tr)
val db = new InteractiveDebugger(crunner)
Debugger.setDebugging(true)
Debugger.setDebugger(db)
runner.runOneTest("test_DFDL_782")
// Comment out these two lines to see issue
// documented in DFDL-790
Debugger.setDebugging(false)
Debugger.setDebugger(null)
}
}
class CustomInteractiveDebuggerRunner(dr: DebuggerRunner)
extends InteractiveDebuggerRunner {
def init(id: InteractiveDebugger): Unit = dr.init
def getCommand(): String = dr.getCommand
def lineOutput(line: String): Unit = dr.lineOutput(line)
def fini(): Unit = dr.fini
}
class CustomTraceRunner extends TraceRunner {
private var _lines = List.empty[String]
def getAllTheLines(): String = {
val sb = new StringBuilder
_lines.foreach(line => {
if (line.length > 0) sb.append(line)
})
val allTheLines = sb.toString
allTheLines
}
override def init: Unit = { _lines = List.empty[String] }
override def lineOutput(line: String) = _lines ++ (line + "\n")
}
|
concord-consortium/rigse | rails/spec/policies/portal/offering_policy_spec.rb | <reponame>concord-consortium/rigse
# frozen_string_literal: false
require 'spec_helper'
RSpec.describe Portal::OfferingPolicy do
let(:user) { FactoryBot.create(:user) }
let(:scope) { Pundit.policy_scope!(user, Portal::Offering) }
describe "Scope" do
before(:each) do
@project1 = FactoryBot.create(:project)
@project2 = FactoryBot.create(:project)
@project3 = FactoryBot.create(:project)
@cohort1 = FactoryBot.create(:admin_cohort)
@cohort2 = FactoryBot.create(:admin_cohort)
@cohort3 = FactoryBot.create(:admin_cohort)
@project1.cohorts << @cohort1
@project2.cohorts << @cohort2
@project3.cohorts << @cohort3
@teacher1 = FactoryBot.create(:portal_teacher)
@teacher2 = FactoryBot.create(:portal_teacher)
@teacher3 = FactoryBot.create(:portal_teacher)
@runnable1 = FactoryBot.create(:external_activity)
@runnable2 = FactoryBot.create(:external_activity)
@runnable3 = FactoryBot.create(:external_activity)
@offering1 = FactoryBot.create(:portal_offering, {clazz: @teacher1.clazzes[0], runnable: @runnable1})
@offering2 = FactoryBot.create(:portal_offering, {clazz: @teacher2.clazzes[0], runnable: @runnable2})
@offering3 = FactoryBot.create(:portal_offering, {clazz: @teacher3.clazzes[0], runnable: @runnable3})
@teacher1.cohorts << @cohort1
@teacher2.cohorts << @cohort1
@teacher3.cohorts << @cohort2
end
context 'normal user' do
it 'does not allow access to any offerings' do
expect(scope.to_a.length).to eq 0
end
end
context 'project researcher' do
before(:each) do
user.add_role_for_project('researcher', @project1)
end
it 'allows access to project offerings' do
expect(scope.to_a).to match_array([@offering1, @offering2])
end
end
context 'project admin' do
before(:each) do
user.add_role_for_project('admin', @project2)
end
it 'allows access to project offerings' do
expect(scope.to_a).to match_array([@offering3])
end
end
context 'teacher' do
let(:user) { @teacher1.user }
it 'allows access to teacher offerings' do
expect(scope.to_a).to match_array([@offering1])
end
context 'who is also a project admin' do
before(:each) do
# project3 is for @cohort3 and has no teachers in it.
user.add_role_for_project('admin', @project3)
end
it 'allows access to teacher offerings' do
# We still expect to see the teachers own offering here
# Even though they are not an admin for @project1
expect(scope.to_a).to match_array([@offering1])
end
end
end
context 'admin user' do
let(:user) { FactoryBot.generate(:admin_user) }
it 'allows access to all offerings' do
expect(scope.to_a).to match_array([@offering1, @offering2, @offering3])
end
end
end
# TODO: auto-generated
describe '#api_show?' do
it 'api_show?' do
offering_policy = described_class.new(nil, nil)
result = offering_policy.api_show?
expect(result).to be_nil
end
end
# TODO: auto-generated
describe '#api_index?' do
it 'api_index?' do
offering_policy = described_class.new(nil, nil)
result = offering_policy.api_index?
expect(result).to be_nil
end
end
# TODO: auto-generated
describe '#api_report?' do
it 'api_report?' do
offering_policy = described_class.new(nil, nil)
result = offering_policy.api_report?
expect(result).to be_nil
end
end
# TODO: auto-generated
describe '#show?' do
it 'show?' do
offering_policy = described_class.new(nil, nil)
result = offering_policy.show?
expect(result).to be_nil
end
end
# TODO: auto-generated
describe '#destroy?' do
it 'destroy?' do
offering_policy = described_class.new(nil, nil)
result = offering_policy.destroy?
expect(result).to be_nil
end
end
# TODO: auto-generated
describe '#activate?' do
it 'activate?' do
offering_policy = described_class.new(nil, nil)
result = offering_policy.activate?
expect(result).to be_nil
end
end
# TODO: auto-generated
describe '#deactivate?' do
it 'deactivate?' do
offering_policy = described_class.new(nil, nil)
result = offering_policy.deactivate?
expect(result).to be_nil
end
end
# TODO: auto-generated
describe '#update?' do
it 'update?' do
offering_policy = described_class.new(nil, nil)
result = offering_policy.update?
expect(result).to be_nil
end
end
# TODO: auto-generated
describe '#answers?' do
it 'answers?' do
offering_policy = described_class.new(nil, nil)
result = offering_policy.answers?
expect(result).to be_nil
end
end
# TODO: auto-generated
describe '#student_report?' do
it 'student_report?' do
offering_policy = described_class.new(nil, nil)
result = offering_policy.student_report?
expect(result).to be_nil
end
end
# TODO: auto-generated
describe '#report?' do
it 'report?' do
offering_policy = described_class.new(nil, nil)
result = offering_policy.report?
expect(result).to be_nil
end
end
describe '#external_report?' do
let(:report) {
FactoryBot.create(:external_report)
}
let(:context) {
double(
user: user,
original_user: nil,
request: nil,
params: {
report_id: report.id
})
}
let (:offering) {
FactoryBot.create(:portal_offering,
runnable: FactoryBot.create(:external_activity))
}
subject {
# make sure the report of the offering is our report
offering.runnable.external_reports << report
offering_policy = described_class.new(context, offering)
offering_policy.external_report?
}
context 'user is not part of clazz or admin' do
it { is_expected.to be_falsey }
end
context 'user is a teacher of offering clazz' do
let(:user) {
teacher = FactoryBot.create(:portal_teacher, :clazzes => [offering.clazz])
teacher.user
}
it { is_expected.to be_truthy }
end
context 'user is a teacher of a different clazz' do
let(:user) {
clazz = FactoryBot.create(:portal_clazz)
teacher = FactoryBot.create(:portal_teacher, :clazzes => [clazz])
teacher.user
}
it { is_expected.to be_falsey }
end
context 'user is a student in the clazz' do
let(:user) {
student = FactoryBot.create(:full_portal_student, :clazzes => [offering.clazz])
student.user
}
context 'report is not allowed for students' do
it { is_expected.to be_falsey }
end
context 'report is allowed for students' do
let(:report) {
FactoryBot.create(:external_report, allowed_for_students: true)
}
it { is_expected.to be_truthy }
end
end
end
# TODO: auto-generated
describe '#offering_collapsed_status?' do
it 'offering_collapsed_status?' do
offering_policy = described_class.new(nil, nil)
result = offering_policy.offering_collapsed_status?
expect(result).to be_nil
end
end
end
|
enrobyn/lookit-api | studies/tasks.py | import logging
import os
import re
import shutil
import subprocess
import time
import tempfile
import hashlib
import datetime
import zipfile
from io import BytesIO, StringIO
import requests
from django.conf import settings
from django.core.files import File
from django.utils import timezone
from google.cloud import storage as gc_storage
from project import storages
from project.celery import app
from studies.helpers import send_mail
import attachment_helpers
from celery.utils.log import get_task_logger
logger = get_task_logger(__name__)
logger.setLevel(logging.DEBUG)
# setup a stream handler for capturing logs for db logging
log_buffer = StringIO()
handler = logging.StreamHandler(log_buffer)
handler.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
handler.setFormatter(formatter)
logger.addHandler(handler)
def get_repo_path(full_repo_path):
return re.search('https://github.com/(.*)', full_repo_path).group(1).rstrip('/')
def get_branch_sha(repo_url, branch):
logger.debug(f'Getting {branch} sha for {repo_url}...')
api_url = f'https://api.github.com/repos/{get_repo_path(repo_url)}/git/refs'
logger.debug(f'Making API request to {api_url}...')
response = requests.get(api_url)
sha = list(filter(lambda datum: datum['ref'] == f'refs/heads/{branch}', response.json()))[0]['object']['sha']
logger.debug(f'Got sha of {sha}')
return sha
def unzip_file(file, destination_folder):
"""
Github puts all files into a f`{repo_name}_{sha}`/ directory.
This strips off the top-level directory and uses the destination_folder
in it's place.
"""
logger.debug(f'Unzipping into {destination_folder}...')
os.makedirs(destination_folder, mode=0o777, exist_ok=True)
with zipfile.ZipFile(BytesIO(file)) as zip_file:
for member in zip_file.infolist():
if member.is_dir():
os.makedirs(os.path.join(destination_folder, member.filename.partition('/')[-1]), mode=0o777, exist_ok=True)
continue
with open(os.path.join(destination_folder, member.filename.partition('/')[-1]), 'wb') as outfile:
outfile.write(zip_file.read(member))
def deploy_to_remote(local_path, storage):
for root_directory, dirs, files in os.walk(local_path, topdown=True):
for filename in files:
full_path = os.path.join(root_directory, filename)
with open(full_path, mode='rb') as f:
remote_path = full_path.split('../ember_build/deployments/')[1]
logger.debug(f'Uploading {full_path} to {storage.location}/{remote_path}...')
storage.save(remote_path, File(f))
def download_repos(addons_repo_url, addons_sha=None, player_sha=None):
if addons_sha is None or not re.match('([a-f0-9]{40})', addons_sha):
addons_sha = get_branch_sha(addons_repo_url, settings.EMBER_ADDONS_BRANCH)
if player_sha is None or not re.match('([a-f0-9]{40})', player_sha):
player_sha = get_branch_sha(settings.EMBER_EXP_PLAYER_REPO, settings.EMBER_EXP_PLAYER_BRANCH)
repo_destination_folder = f'{player_sha}_{addons_sha}'
local_repo_destination_folder = os.path.join('./ember_build/checkouts/', repo_destination_folder)
if os.path.isdir(local_repo_destination_folder):
logger.debug(f'Found directory {local_repo_destination_folder}')
return (repo_destination_folder, addons_sha, player_sha)
addons_zip_path = f'{addons_repo_url}/archive/{addons_sha}.zip'
player_zip_path = f'{settings.EMBER_EXP_PLAYER_REPO}/archive/{player_sha}.zip'
logger.debug(f'Downloading {player_zip_path}...')
unzip_file(requests.get(player_zip_path).content, local_repo_destination_folder)
logger.debug(f'Downloading {addons_zip_path}...')
unzip_file(requests.get(addons_zip_path).content, os.path.join(local_repo_destination_folder, 'lib'))
return (repo_destination_folder, addons_sha, player_sha)
def build_docker_image():
# this is broken out so that it can be more complicated if it needs to be
logger.debug(f'Running docker build...')
return subprocess.run(
[
'docker',
'build',
'--pull',
'--cache-from',
'ember_build',
'-t',
'ember_build',
'.'
],
cwd=settings.EMBER_BUILD_ROOT_PATH,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT
)
@app.task(bind=True, max_retries=10, retry_backoff=10)
def build_experiment(self, study_uuid, researcher_uuid, preview=True):
ex = None
try:
from studies.models import Study, StudyLog
from accounts.models import User
save_versions = preview
now = timezone.now()
try:
study = Study.objects.get(uuid=study_uuid)
except Study.DoesNotExist as ex:
logger.error(f'Study with uuid {study_uuid} does not exist. {ex}')
raise
try:
researcher = User.objects.get(uuid=researcher_uuid)
except User.DoesNotExist as ex:
logger.error(f'User with uuid {researcher_uuid} does not exist. {ex}')
raise
destination_directory = f'{study_uuid}'
player_sha = study.metadata.get('last_known_player_sha', None)
addons_sha = study.metadata.get('last_known_addons_sha', None)
addons_repo_url = study.metadata.get('addons_repo_url', settings.EMBER_ADDONS_REPO)
logger.debug(f"Got {addons_repo_url} from {study.metadata.get('addons_repo_url')}")
if preview:
current_state = study.state
study.state = 'previewing'
study.save()
if player_sha is None and addons_sha is None:
# if they're previewing and the sha's on their study aren't set
# save the latest master sha of both repos
save_versions = True
checkout_directory, addons_sha, player_sha = download_repos(addons_repo_url, addons_sha=addons_sha, player_sha=player_sha)
if save_versions:
study.metadata['last_known_addons_sha'] = addons_sha
study.metadata['last_known_player_sha'] = player_sha
study.save()
container_checkout_directory = os.path.join('/checkouts/', checkout_directory)
container_destination_directory = os.path.join('/deployments/', destination_directory)
build_image_comp_process = build_docker_image()
local_checkout_path = os.path.join(settings.EMBER_BUILD_ROOT_PATH, 'checkouts')
local_deployments_path = os.path.join(settings.EMBER_BUILD_ROOT_PATH, 'deployments')
replacement_string = f"prepend: '/studies/{study_uuid}/'"
build_command = [
'docker',
'run',
'--rm',
'-e', f'CHECKOUT_DIR={container_checkout_directory}',
'-e', f'REPLACEMENT={re.escape(replacement_string)}',
'-e', f'STUDY_OUTPUT_DIR={container_destination_directory}',
'-e', f"SENTRY_DSN={os.environ.get('SENTRY_DSN_JS', None)}",
'-e', f"PIPE_ACCOUNT_HASH={os.environ.get('PIPE_ACCOUNT_HASH', None)}",
'-e', f"PIPE_ENVIRONMENT={os.environ.get('PIPE_ENVIRONMENT', None)}",
'-v', f'{local_checkout_path}:/checkouts',
'-v', f'{local_deployments_path}:/deployments',
'ember_build'
]
logger.debug(f'Running build.sh for {container_checkout_directory}...')
ember_build_comp_process = subprocess.run(
build_command,
cwd=settings.EMBER_BUILD_ROOT_PATH,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT
)
if preview:
# if they're previewing put things in the preview directory
storage = storages.LookitPreviewExperimentStorage()
else:
# otherwise put them in the experiment directory
storage = storages.LookitExperimentStorage()
cloud_deployment_directory = os.path.join(local_deployments_path, destination_directory)
deploy_to_remote(cloud_deployment_directory, storage)
context = {
'org_name': study.organization.name,
'study_name': study.name,
'study_id': study.pk,
'study_uuid': str(study.uuid),
'action': 'previewed' if preview else 'deployed'
}
send_mail.delay(
'notify_admins_of_study_action',
'Study Previewed' if preview else 'Study Deployed',
settings.EMAIL_FROM_ADDRESS,
bcc=list(study.study_organization_admin_group.user_set.values_list('username', flat=True)),
**context
)
send_mail.delay(
'notify_researchers_of_deployment',
'Study Previewed' if preview else 'Study Deployed',
settings.EMAIL_FROM_ADDRESS,
bcc=list(study.study_admin_group.user_set.values_list('username', flat=True)),
**context
)
if not preview:
study.state = 'active'
else:
study.previewed = True
study.state = current_state
study.save()
except Exception as e:
ex = e
logger.error(e)
finally:
StudyLog.objects.create(
study=study,
action='preview' if preview else 'deploy',
user=researcher,
extra={
'ember_build': str(ember_build_comp_process.stdout),
'image_build': str(build_image_comp_process.stdout),
'ex': str(ex),
'log': log_buffer.getvalue(),
}
)
log_buffer.close()
if ex:
raise self.retry(exc=ex, countdown=30)
def cleanup_old_directories(root_path, older_than):
if not older_than:
older_than = timezone.now() - timezone.timedelta(days=1)
else:
assert type(older_than) == timezone.datetime, 'older_than must be an instance of datetime'
with os.scandir(root_path) as sd:
for entry in sd:
if entry.is_dir() and entry.stat().st_mtime < time.mktime(older_than.timetuple()):
logger.debug(f'Deleting {entry.path}...')
shutil.rmtree(entry.path)
@app.task
def cleanup_builds(older_than=None):
logger.debug('Cleaning up builds...')
deployments = os.path.join(settings.EMBER_BUILD_ROOT_PATH, 'deployments')
cleanup_old_directories(deployments, older_than)
@app.task
def cleanup_checkouts(older_than=None):
logger.debug('Cleaning up checkouts...')
checkouts = os.path.join(settings.EMBER_BUILD_ROOT_PATH, 'checkouts')
cleanup_old_directories(checkouts, older_than)
@app.task
def cleanup_docker_images():
logger.debug('Cleaning up docker images...')
images = subprocess.run(['docker', 'images', '--quiet', '--filter', 'dangling=true'], stdout=subprocess.PIPE)
for image in images.stdout.splitlines():
subprocess.run(['docker', 'rmi', '--force', image])
@app.task(bind=True, max_retries=10, retry_backoff=10)
def build_zipfile_of_videos(self, filename, study_uuid, orderby, match, requesting_user_uuid, consent=False):
from studies.models import Study
from accounts.models import User
# get the study in question
study = Study.objects.get(uuid=study_uuid)
# get the user
requesting_user = User.objects.get(uuid=requesting_user_uuid)
# find the requested attachments
if consent:
attachments = attachment_helpers.get_consent_videos(study.uuid)
else:
attachments = attachment_helpers.get_study_attachments(study, orderby, match)
m = hashlib.sha256()
for attachment in attachments:
m.update(attachment.key.encode('utf-8'))
# create a sha256 of the included filenames
sha = m.hexdigest()
# use that sha in the filename
zip_filename = f'{filename}_{sha}.zip'
# get the gc client
gs_client = gc_storage.client.Client(project=settings.GS_PROJECT_ID)
# get the bucket
gs_private_bucket = gs_client.get_bucket(settings.GS_PRIVATE_BUCKET_NAME)
# instantiate a blob for the file
gs_blob = gc_storage.blob.Blob(zip_filename, gs_private_bucket)
# if the file exists short circuit and send the email with a 30m link
if not gs_blob.exists():
# if it doesn't exist build the zipfile
with tempfile.TemporaryDirectory() as temp_directory:
zip_file_path = os.path.join(temp_directory, zip_filename)
zip = zipfile.ZipFile(zip_file_path, 'w')
for attachment in attachments:
temporary_file_path = os.path.join(temp_directory, attachment.key)
file_response = requests.get(
attachment_helpers.get_download_url(attachment.key),
stream=True
)
local_file = open(temporary_file_path, mode='w+b')
for chunk in file_response.iter_content(8192):
local_file.write(chunk)
local_file.close()
zip.write(temporary_file_path, attachment.key)
zip.close()
# upload the zip to GoogleCloudStorage
gs_blob.upload_from_filename(zip_file_path)
# then send the email with a 30m link
signed_url = gs_blob.generate_signed_url(int(time.time() + datetime.timedelta(minutes=30).seconds))
# send an email with the signed url and return
context = dict(
signed_url=signed_url,
user=requesting_user,
videos=attachments,
zip_filename=zip_filename
)
send_mail(
'download_zip',
'Your video archive has been created',
settings.EMAIL_FROM_ADDRESS,
bcc=[requesting_user.username, ],
from_email=settings.EMAIL_FROM_ADDRESS,
**context
)
|
lechium/tvOS135Headers | System/Library/PrivateFrameworks/GameCenterUI.framework/GKBubblePathAnimator.h | <filename>System/Library/PrivateFrameworks/GameCenterUI.framework/GKBubblePathAnimator.h
/*
* This header is generated by classdump-dyld 1.0
* on Sunday, June 7, 2020 at 11:27:23 AM Mountain Standard Time
* Operating System: Version 13.4.5 (Build 17L562)
* Image Source: /System/Library/PrivateFrameworks/GameCenterUI.framework/GameCenterUI
* classdump-dyld is licensed under GPLv3, Copyright © 2013-2016 by <NAME>.
*/
@interface GKBubblePathAnimator : NSObject
@property (nonatomic,readonly) long long animatorType;
+(id)sharedBubblePathAnimator;
+(id)hiTimingFunction;
-(void)animateTransition:(id)arg1 ;
-(void)animateTransition:(id)arg1 completionHandler:(/*^block*/id)arg2 ;
-(long long)animatorType;
-(BOOL)bubblesOnscreenAtBeginning;
-(BOOL)bubblesOnscreenAtEnd;
-(double)minimumDurationForViewAnimations;
-(id)effectiveViewForContext:(id)arg1 ;
@end
|
fgtorres/regadb-leishmaniasis | regadb-research/src/be/kuleuven/rega/research/conserved/groupers/SubtypeGrouper.java | <reponame>fgtorres/regadb-leishmaniasis<filename>regadb-research/src/be/kuleuven/rega/research/conserved/groupers/SubtypeGrouper.java
package be.kuleuven.rega.research.conserved.groupers;
import java.util.List;
import net.sf.regadb.db.DrugGeneric;
import net.sf.regadb.db.NtSequence;
import net.sf.regadb.db.TestResult;
import be.kuleuven.rega.research.conserved.Grouper;
public class SubtypeGrouper implements Grouper {
public String getGroup(NtSequence ntseq, List<DrugGeneric> genericDrugs) {
for (TestResult tr : ntseq.getTestResults()) {
if (tr.getTest().getDescription().equals("Rega Subtype Tool")) {
return tr.getValue();
}
}
return null;
}
}
|
scottdonaldau/QRL | src/qrl/core/misc/DependencyChecker.py | <filename>src/qrl/core/misc/DependencyChecker.py
#!/usr/bin/env python
# -*- coding:utf-8 -*-
import sys
import os
import pkg_resources
class DependencyChecker:
@staticmethod
def _get_requirements_path():
return os.path.abspath(os.path.join(os.path.dirname(__file__),
os.path.pardir,
os.path.pardir,
os.path.pardir,
os.path.pardir,
"requirements.txt"))
@staticmethod
def check():
requirements_path = DependencyChecker._get_requirements_path()
requirements = []
with open(requirements_path, "r") as fp:
for line in fp.readlines():
if line.startswith("#"):
continue
requirements.append(line.strip("\n"))
try:
pkg_resources.require(requirements)
except Exception as e:
sys.exit("dependencies not satisfied, run [pip3 install -r requirements.txt] first. \n {}".format(e))
|
mayc2/PseudoKnot_research | RNAstructure_Source/java_interface/src/ur_rna/GUITester/GuiTools/Matchers/VisibilityMatcher.java | package ur_rna.GUITester.GuiTools.Matchers;
import java.awt.*;
/**
* Tests the visibility of the GUI component (whether it is showing or not)
*/
public class VisibilityMatcher extends ComposableBase {
private boolean _vis = true;
public VisibilityMatcher() { }
public VisibilityMatcher(boolean visible) {
_vis = visible;
}
@Override
public boolean matches(Component c) { return c.isShowing() == _vis; }
public boolean getVisible() { return _vis; }
public void setVisible(boolean value) { _vis = value; }
@Override
public String toString() {
return "{ VisibilityMatcher vis=" + (_vis ? "showing" : "hidden") + " }";
}
}
|
haoweiqiu/No1Tutoring | TutoringService-Backend/src/main/java/ca/mcgill/ecse321/tutoring_service/model/Student.java | <reponame>haoweiqiu/No1Tutoring
package ca.mcgill.ecse321.tutoring_service.model;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.ManyToOne;
import java.util.Set;
import javax.persistence.OneToMany;
import javax.persistence.Id;
@Entity
public class Student {
private School school;
@ManyToOne(optional = true)
public School getSchool() {
return this.school;
}
public void setSchool(School school) {
this.school = school;
}
private int year;
public void setYear(int value) {
this.year = value;
}
public int getYear() {
return this.year;
}
private Set<Registration> registration;
@OneToMany(mappedBy = "student")
public Set<Registration> getRegistration() {
return this.registration;
}
public void setRegistration(Set<Registration> registrations) {
this.registration = registrations;
}
private String name;
public void setName(String value) {
this.name = value;
}
public String getName() {
return this.name;
}
private String email;
public void setEmail(String value) {
this.email = value;
}
public String getEmail() {
return this.email;
}
@Id
@GeneratedValue(strategy = GenerationType.AUTO)
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
private int id;
private String password;
public void setPassword(String value) {
this.password = value;
}
public String getPassword() {
return this.password;
}
}
|
SynthSys/BioDare2-BACK | src/main/java/ed/biodare2/backend/web/rest/ScrabController.java | /*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package ed.biodare2.backend.web.rest;
import java.security.Principal;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.atomic.AtomicLong;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
/**
*
* @author tzielins
*/
@RestController
@RequestMapping("api/scrab")
public class ScrabController {
private static final String template = "Hello, %s called by %s!";
private final AtomicLong counter = new AtomicLong();
@RequestMapping(method = RequestMethod.GET)
public Map<String,String> greeting(Principal principal,@RequestParam(required=false, defaultValue="Tomek") String name) {
Map<String,String> data = new HashMap<>();
data.put("greeting", String.format(template, name,principal != null ? principal.getName(): "null"));
return data;
}
@RequestMapping(method = RequestMethod.POST)
public Map<String,String> post(@RequestBody Map<String,String> request,Principal principal) {
String content = request.getOrDefault("content", "empty");
Map<String,String> data = new HashMap<>();
data.put("greeting", content +" by: "+(principal != null ? principal.getName(): "null"));
return data;
}
}
|
shufps/hornet | plugins/restapi/v2/receipts.go | package v2
import (
"github.com/labstack/echo/v4"
"github.com/pkg/errors"
"github.com/gohornet/hornet/pkg/model/utxo"
"github.com/gohornet/hornet/pkg/restapi"
)
func receipts(_ echo.Context) (*receiptsResponse, error) {
receipts := make([]*utxo.ReceiptTuple, 0)
if err := deps.UTXOManager.ForEachReceiptTuple(func(rt *utxo.ReceiptTuple) bool {
receipts = append(receipts, rt)
return true
}, utxo.ReadLockLedger(false)); err != nil {
return nil, errors.WithMessagef(echo.ErrInternalServerError, "unable to retrieve receipts: %s", err)
}
return &receiptsResponse{Receipts: receipts}, nil
}
func receiptsByMigratedAtIndex(c echo.Context) (*receiptsResponse, error) {
migratedAt, err := restapi.ParseMilestoneIndexParam(c, restapi.ParameterMilestoneIndex)
if err != nil {
return nil, err
}
receipts := make([]*utxo.ReceiptTuple, 0)
if err := deps.UTXOManager.ForEachReceiptTupleMigratedAt(migratedAt, func(rt *utxo.ReceiptTuple) bool {
receipts = append(receipts, rt)
return true
}, utxo.ReadLockLedger(false)); err != nil {
return nil, errors.WithMessagef(echo.ErrInternalServerError, "unable to retrieve receipts for migrated at index %d: %s", migratedAt, err)
}
return &receiptsResponse{Receipts: receipts}, nil
}
|
dave2wave/whimsy | tools/iclasort.rb | <reponame>dave2wave/whimsy<filename>tools/iclasort.rb<gh_stars>10-100
#!/usr/bin/env ruby
$LOAD_PATH.unshift '/srv/whimsy/lib'
require 'whimsy/asf'
iclas = File.join(ASF::SVN['officers'], 'iclas.txt')
cmd = ['svn', 'update', iclas]
puts cmd.join(' ')
system *cmd
source = File.read(iclas)
sorted = ASF::ICLA.sort(source)
if source == sorted
puts 'no change'
else
puts "Writing sorted file"
File.write(iclas, sorted)
system 'svn', 'diff', iclas
end
|
anthowen/duplify | env/lib/python3.6/site-packages/scipy/version.py |
# THIS FILE IS GENERATED FROM SCIPY SETUP.PY
short_version = '1.1.0'
version = '1.1.0'
full_version = '1.1.0'
git_revision = '<PASSWORD>'
release = True
if not release:
version = full_version
|
powlab/jeye | src/main/scala/org/powlab/jeye/utils/ConstantPoolUtils.scala | <reponame>powlab/jeye<filename>src/main/scala/org/powlab/jeye/utils/ConstantPoolUtils.scala
package org.powlab.jeye.utils
import org.powlab.jeye.core._
import org.powlab.jeye.core.Constants._
import org.powlab.jeye.core.Descriptors._
import org.powlab.jeye.core.parsing.DescriptorParser.Pisc
import org.powlab.jeye.core.{ Types, Utils }
import org.powlab.jeye.decode.expression._
import org.powlab.jeye.utils.DecodeUtils.{ getClassMeta, getJavaNotationClassName, getSimpleClassName, getViewType, isConstructor }
class ConstantPoolUtils(classFile: ClassFile) {
val keepConstantPool = classFile.constant_pool
def getFloat(constantPoolFloat: ConstantU4Info) = {
Utils.toFloat(constantPoolFloat.bytes)
}
// ----------------------- Получение структур --------------------------------
def get(index: Int) = keepConstantPool(index)
def getUtf8Struct(index: Int) = keepConstantPool(index).asInstanceOf[ConstantUtf8Info]
def getStringStruct(index: Int) = keepConstantPool(index).asInstanceOf[ConstantStringInfo]
def getIntegerStruct(index: Int) = keepConstantPool(index).asInstanceOf[ConstantU4Info]
def getFloatStruct(index: Int) = keepConstantPool(index).asInstanceOf[ConstantU4Info]
def getLongStruct(index: Int) = keepConstantPool(index).asInstanceOf[ConstantU8Info]
def getDoubleStruct(index: Int) = keepConstantPool(index).asInstanceOf[ConstantU8Info]
def getMethodStruct(index: Int) = keepConstantPool(index).asInstanceOf[ConstantRefInfo]
def getFieldStruct(index: Int) = keepConstantPool(index).asInstanceOf[ConstantRefInfo]
def getNameAndTypeStruct(index: Int) = keepConstantPool(index).asInstanceOf[ConstantNameAndTypeInfo]
def getClassStruct(index: Int) = keepConstantPool(index).asInstanceOf[ConstantClassInfo]
def getInvokeDynamicStruct(index: Int) = keepConstantPool(index).asInstanceOf[ConstantInvokeDynamicInfo]
def getMethodHandleStruct(index: Int) = keepConstantPool(index).asInstanceOf[ConstantMethodHandleInfo]
// ----------------------- Получение констант --------------------------------
def getUtf8(index: Int) = new String(getUtf8Struct(index).bytes, CP_STRING_CHARSETNAME)
def getUtf8(constant: ConstantUtf8Info) = new String(constant.bytes, CP_STRING_CHARSETNAME)
def getClassName(index: Int) = getUtf8(keepConstantPool(index).asInstanceOf[ConstantClassInfo].name_index)
def getString(constant: ConstantStringInfo): String = getUtf8(constant.string_index)
def getString(index: Int): String = getString(getStringStruct(index))
def getInteger(constant: ConstantU4Info): Int = Utils.toInt(constant.bytes)
def getInteger(index: Int): Int = getInteger(getIntegerStruct(index))
def getFloat(index: Int) = Utils.toFloat(getFloatStruct(index).bytes)
def getDouble(constant: ConstantU8Info): Double = Utils.toDouble(constant.high_bytes, constant.low_bytes)
def getDouble(index: Int): Double = getDouble(getDoubleStruct(index))
def getLong(constant: ConstantU8Info): Long = Utils.toLong(constant.high_bytes, constant.low_bytes)
def getLong(index: Int): Long = getLong(getLongStruct(index))
/**
* TODO here: этому методу здесь не место, так как он ссылается на выражение ITypedExpression
* Лучше всего его расположить ExpressionHelpers
*/
def getConstantVariable(index: Int): ITypedExpression = {
val tag = keepConstantPool(index).tag
tag match {
case CONSTANT_String => StringLiteralExpression(getString(index))
case CONSTANT_Integer => IntLiteralExpression(getInteger(index))
case CONSTANT_Float => FloatLiteralExpression(getFloat(index))
case CONSTANT_Double => DoubleLiteralExpression(getDouble(index))
case CONSTANT_Long => LongLiteralExpression(getLong(index))
case CONSTANT_Class => {
val clazz = getClassInformator(index)
val descriptor = Pisc(clazz.meta)
ClassLiteralExpression(descriptor)
}
case _ => null
}
}
// ----------------------- Информаторы --------------------------------
def getNameAndTypeInfo(cpItem: ConstantRefInfo) = NameAndTypeInfo(cpItem.name_and_type_index)
def getNameAndTypeInfo(name_and_type_index: Int) = NameAndTypeInfo(name_and_type_index)
case class NameAndTypeInfo(aNameAndTypeIndex: Int) {
var memberNameAndType = keepConstantPool(aNameAndTypeIndex).asInstanceOf[ConstantNameAndTypeInfo]
val name = getUtf8(memberNameAndType.name_index)
val descriptor = getUtf8(memberNameAndType.descriptor_index)
}
def getClassInformator(index: Int) = {
val name = getClassName(index)
if (Types.isArrayType(name.charAt(0))) {
val desc = Pisc(name)
val baseType = desc.lowType
if (Types.isBaseType(baseType.symbol.charAt(0))) {
new PrimitiveClassInformator(name, baseType.description, desc.meta, true)
} else {
new ClassInformator(name, desc.meta, true)
}
} else {
new ClassInformator(name, getClassMeta(name), false)
}
}
val thisClass = getClassInformator(classFile.this_class)
val superClass = getClassInformator(classFile.super_class)
val constructorCount = classFile.methods.count(method => isConstructor(getUtf8(method.name_index)))
}
class ClassInformator(val name: String, val meta: String, val isArray: Boolean) {
def javaName = getJavaNotationClassName(name)
def simpleName = getSimpleClassName(name)
def viewName = getViewType(meta)
}
class PrimitiveClassInformator(name: String, viewType: String, meta: String, isArray: Boolean) extends ClassInformator(name, meta, isArray) {
override def javaName = viewType
override def simpleName = viewType
}
|
Mosframe/uni.ts | source/Libs/dotnet/System/Collections/Array/Compare.js | /*!
* @author electricessence / https://github.com/electricessence/
* Licensing: MIT https://github.com/electricessence/TypeScript.NET/blob/master/LICENSE.md
*/
import * as Values from "../../Compare";
import { Type } from "../../Types";
/* validateSize: Utility for quick validation/invalidation of array equality.
Why this way? Why not pass a closure for the last return?
Reason: Performance and avoiding the creation of new functions/closures. */
function validateSize(a, b) {
// Both valid and are same object, or both are null/undefined.
if (a && b && a === b || !a && !b)
return true;
// At this point, at least one has to be non-null.
if (!a || !b)
return false;
const len = a.length;
if (len !== b.length)
return false;
// If both are arrays and have zero length, they are equal.
if (len === 0)
return true;
// Return the length for downstream processing.
return len;
}
export function areAllEqual(arrays, strict = true, equalityComparer = Values.areEqual) {
if (!arrays)
throw new Error("ArgumentNullException: 'arrays' cannot be null.");
if (arrays.length < 2)
throw new Error("Cannot compare a set of arrays less than 2.");
if (Type.isFunction(strict)) {
equalityComparer = strict;
strict = true;
}
const first = arrays[0];
for (let i = 1, l = arrays.length; i < l; i++) {
if (!areEqual(first, arrays[i], strict, equalityComparer))
return false;
}
return true;
}
export function areEqual(a, b, strict = true, equalityComparer = Values.areEqual) {
const len = validateSize(a, b);
if (Type.isBoolean(len))
return len;
if (Type.isFunction(strict)) {
equalityComparer = strict;
strict = true;
}
for (let i = 0; i < len; i++) {
if (!equalityComparer(a[i], b[i], strict))
return false;
}
return true;
}
function internalSort(a, comparer) {
if (!a || a.length < 2)
return a;
const len = a.length;
let b;
if (len > 65536)
b = new Array(len);
else {
b = [];
b.length = len;
}
for (let i = 0; i < len; i++) {
b[i] = a[i];
}
b.sort(comparer);
return b;
}
export function areEquivalent(a, b, comparer = Values.compare) {
const len = validateSize(a, b);
if (Type.isBoolean(len))
return len;
// There might be a better more performant way to do this, but for the moment, this
// works quite well.
a = internalSort(a, comparer);
b = internalSort(b, comparer);
for (let i = 0; i < len; i++) {
if (comparer(a[i], b[i]) !== 0)
return false;
}
return true;
}
//# sourceMappingURL=Compare.js.map |
ytobah/dlib-mod | tools/mltool/src/regression.h | // The contents of this file are in the public domain.
// See LICENSE_FOR_EXAMPLE_PROGRAMS.txt (in trunk/examples)
// Authors:
// <NAME>
// <NAME>
#ifndef DLIB_MLTOOL_REGREsSION_H__
#define DLIB_MLTOOL_REGREsSION_H__
#include "common.h"
#include <vector>
void
krr_test (
command_line_parser& parser,
std::vector<dense_sample_type>& dense_samples,
std::vector<double>& labels
);
void
krls_test (
command_line_parser& parser,
std::vector<dense_sample_type>& dense_samples,
std::vector<double>& labels
);
void
mlp_test (
command_line_parser& parser,
std::vector<dense_sample_type>& dense_samples,
std::vector<double>& labels
);
void
svr_test (
command_line_parser& parser,
std::vector<dense_sample_type>& dense_samples,
std::vector<double>& labels
);
#endif // DLIB_MLTOOL_REGREsSION_H__
|
vine-io/vine | core/codec/yaml/yaml.go | <gh_stars>1-10
// Copyright 2020 The vine Authors
//
// MIT License
//
// Copyright (c) 2020 Lack
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package yaml
import (
"io"
"github.com/gogo/protobuf/proto"
"github.com/vine-io/vine/core/codec"
myamlpb "github.com/vine-io/vine/util/yamlpb"
"gopkg.in/yaml.v3"
)
type Codec struct {
Conn io.ReadWriteCloser
Encoder *yaml.Encoder
Decoder *yaml.Decoder
}
func (c *Codec) ReadHeader(m *codec.Message, t codec.MessageType) error {
return nil
}
func (c *Codec) ReadBody(b interface{}) error {
if b == nil {
return nil
}
if pb, ok := b.(proto.Message); ok {
return myamlpb.UnmarshalNext(c.Decoder, pb)
}
return c.Decoder.Decode(b)
}
func (c *Codec) Write(m *codec.Message, b interface{}) error {
if b == nil {
return nil
}
return c.Encoder.Encode(b)
}
func (c *Codec) Close() error {
return c.Conn.Close()
}
func (c *Codec) String() string {
return "yaml"
}
func NewCodec(c io.ReadWriteCloser) codec.Codec {
return &Codec{
Conn: c,
Decoder: yaml.NewDecoder(c),
Encoder: yaml.NewEncoder(c),
}
}
|
samssonart/gmtThesisAR | Implementation/iOSIlluminati/include/AR/sys/CameraVideo.h | <reponame>samssonart/gmtThesisAR
/*
* CameraVideo.h
* ARToolKit5
*
* This file is part of ARToolKit.
*
* ARToolKit is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* ARToolKit is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with ARToolKit. If not, see <http://www.gnu.org/licenses/>.
*
* As a special exception, the copyright holders of this library give you
* permission to link this library with independent modules to produce an
* executable, regardless of the license terms of these independent modules, and to
* copy and distribute the resulting executable under terms of your choice,
* provided that you also meet, for each linked independent module, the terms and
* conditions of the license of that module. An independent module is a module
* which is neither derived from nor based on this library. If you modify this
* library, you may extend this exception to your version of the library, but you
* are not obligated to do so. If you do not wish to do so, delete this exception
* statement from your version.
*
* Copyright 2015 Daqri, LLC.
* Copyright 2008-2015 ARToolworks, Inc.
*
* Author(s): <NAME>
*
* Rev Date Who Changes
* 1.0.0 2008-05-04 PRL Written.
*
*/
#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>
@protocol CameraVideoTookPictureDelegate<NSObject>
@required
/*
This delegate method is called each time a frame is captured
from the video stream.
The frame can be retrieved with the call:
UInt64 timestamp;
unsigned char *frameData = frameTimestamp:×tamp;
*/
- (void) cameraVideoTookPicture:(id)sender userData:(void *)data;
@optional
/*
This delegate method is called if the user requested a high-resolution JPEG photo.
You can write the JPEG to the user's photo roll with this code:
#import <AssetsLibrary/AssetsLibrary.h>
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
[library writeImageDataToSavedPhotosAlbum:jpegData metadata:nil completionBlock:^(NSURL *assetURL, NSError *error) {
if (error) {
NSLog(@"Error writing captured photo to photo album.\n");
}
}];
[library release];
or you can write it directly to a file:
if (![jpegData writeToFile:jpegPath atomically:NO]) {
NSLog(@"Error writing captured photo to '%@'\n", jpegPath);
}
There is no guarantee that this call will be made on any particular thread.
*/
- (void) cameravideoTookPictureHires:(id)sender userData:(void *)data jpegData:(NSData *)jpegData;
@end
typedef void (^CameraVideoStartAsyncCompletionBlock)(void);
extern NSString *const CameraVideoiOSDeviceiPhone; // "iPhone"
extern NSString *const CameraVideoiOSDeviceiPhone3G; // "iPhone 3G"
extern NSString *const CameraVideoiOSDeviceiPhone3GS; // "iPhone 3GS"
extern NSString *const CameraVideoiOSDeviceiPhone4; // "iPhone 4"
extern NSString *const CameraVideoiOSDeviceiPhone4S; // "iPhone 4S"
extern NSString *const CameraVideoiOSDeviceiPhone5; // "iPhone 5"
extern NSString *const CameraVideoiOSDeviceiPhone5s; // "iPhone 5s"
extern NSString *const CameraVideoiOSDeviceiPhone5c; // "iPhone 5c"
extern NSString *const CameraVideoiOSDeviceiPhone6; // "iPhone 6"
extern NSString *const CameraVideoiOSDeviceiPhone6Plus; // "iPhone 6 Plus"
extern NSString *const CameraVideoiOSDeviceiPhone6S; // "iPhone 6S"
extern NSString *const CameraVideoiOSDeviceiPhone6SPlus; // "iPhone 6S Plus"
extern NSString *const CameraVideoiOSDeviceiPodTouch4; // "iPod Touch (4th Generation)"
extern NSString *const CameraVideoiOSDeviceiPodTouch5; // "iPod Touch (5th Generation)"
extern NSString *const CameraVideoiOSDeviceiPad2; // "iPad 2"
extern NSString *const CameraVideoiOSDeviceiPad3; // "iPad (3rd generation)"
extern NSString *const CameraVideoiOSDeviceiPad4; // "iPad (4th generation)"
extern NSString *const CameraVideoiOSDeviceiPadAir; // "iPad Air"
extern NSString *const CameraVideoiOSDeviceiPadAir2; // "iPad Air 2"
extern NSString *const CameraVideoiOSDeviceiPadMini; // "iPad mini"
extern NSString *const CameraVideoiOSDeviceiPadMini2; // "iPad mini (2nd generation)"
extern NSString *const CameraVideoiOSDeviceiPadMini3; // "iPad mini (3rd generation)"
extern NSString *const CameraVideoiOSDeviceiPadMini4; // "iPad mini (4th generation)"
extern NSString *const CameraVideoiOSDeviceiPhoneX; // "iPhone (Unknown model)"
extern NSString *const CameraVideoiOSDeviceiPodX; // "iPod (Unknown model)"
extern NSString *const CameraVideoiOSDeviceiPadX; // "iPad (Unknown model)"
extern NSString *const CameraVideoiOSDeviceAppleTVX; // "Apple TV (Unknown model)"
@class CameraVideo;
@interface CameraVideo : NSObject <AVCaptureVideoDataOutputSampleBufferDelegate>
- (id) init;
@property(readonly) NSString *iOSDevice;
/*
Set or get the video image quality/size.
Attempts to change this property while (running == TRUE) will be ignored.
Acceptable values:
AVCaptureSessionPresetHigh // iPhone 3G: 400x304.
// iPhone 3GS: 640x480.
// iPhone 4/iPod touch 4G/iPad 2: rear 1280x720, front 640x480.
// iPhone 4S and later, iPad (3rd generation): rear 1920x1080, front 640x480.
// iPhone (4th generation), iPad Air, iPad Mini, iPad Mini (2nd generation): rear 1920x1080, front 1280x720
// iPhone 5, 5c, 5s: rear 1920x1080, front 1280x720.
AVCaptureSessionPresetMedium // iPhone 3G: 400x304
// iPhone 3GS/iPhone 4/iPod touch 4G/iPad 2: 480x360
AVCaptureSessionPresetLow // iPhone 3G/iPhone 3GS/iPhone 4/iPod touch 4G/iPad 2: 192x144.
AVCaptureSessionPreset640x480 // iPhone 3G: not supported. iPhone 3GS/iPhone 4/iPod touch 4G/iPad 2:640x480.
AVCaptureSessionPreset1280x720 // iPhone 3G/3GS: not supported. iPhone 4/iPod touch 4G/iPad 2:1280x720.
AVCaptureSessionPreset352x288 // iOS 5.0-only, iPhone 3GS and later.
AVCaptureSessionPreset1920x1080 // iOS 5.0-only, iPhone 4S and later, iPad (3rd generation) and later.
Default value is AVCaptureSessionPresetMedium.
N.B. 1920x1080 and 1280x720 are 16:9 aspect ratio.
640x480, 480x360, 192x144 are 4:3 aspect ratio.
*/
@property(nonatomic, assign) NSString *captureSessionPreset;
/*
Set or get the video device position.
Attempts to change this property while (running == TRUE) will be ignored.
Acceptable values:
AVCaptureDevicePositionBack
AVCaptureDevicePositionFront
Default value is AVCaptureDevicePositionBack.
*/
@property(nonatomic) AVCaptureDevicePosition captureDevicePosition;
/*
Set or get the video image pixel format.
Attempts to change this property while (running == TRUE) will be ignored.
Acceptable values:
kCVPixelFormatType_32BGRA (Default.)
kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
kCVPixelFormatType_420YpCbCr8BiPlanarFullRange (Recommended for iPhone 3GS, 4 and later.)
kCVPixelFormatType_422YpCbCr8 (Recommended for iPhone 3G)
*/
@property(nonatomic) OSType pixelFormat;
// Starts capture from camera and waits until the first frame has
// been received. Then sets 'running' and returns to caller.
// Note that since this runs the runloop, it may deadlock if
// a runloop task is invoked which waits on this this thread.
// It is recommended to use -startAsync: instead.
// In cases of error, 'running' will not be set on return.
- (void) start;
// Starts capture from camera, sets 'running' and returns to caller
// immediately. Once the first frame has been received, invokes
// 'completion' on main queue.
// In cases of error, 'running' will not be set on return.
- (void) startAsync:(CameraVideoStartAsyncCompletionBlock)completion;
// Set once -start or -startAsync: has been called successfully.
@property(nonatomic, readonly) BOOL running;
@property(nonatomic) BOOL pause;
// The delegate which gets call each time a new frame is available, and its userdata.
// See discussion of CameraVideoTookPictureDelegate above.
@property(nonatomic, assign) id <CameraVideoTookPictureDelegate> tookPictureDelegate;
@property(nonatomic, assign) void *tookPictureDelegateUserData;
// These values are valid only once the first frame has been received.
// When invalid, they return 0.
// When a multi-planar format is in use, these are the same as calling
// -widthOfPlane:0, -heightOfPlane:0 or -bytesPerRowOfPlane:0.
@property(nonatomic, readonly) size_t width;
@property(nonatomic, readonly) size_t height;
@property(nonatomic, readonly) size_t bytesPerRow;
@property(nonatomic, readonly) size_t planeCount; // 0 for non-planar formats, or number of planes (will be 2 for kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange and kCVPixelFormatType_420YpCbCr8BiPlanarFullRange (iPhone 4 and later).
- (size_t)widthOfPlane:(unsigned int)plane;
- (size_t)heightOfPlane:(unsigned int)plane;
- (size_t)bytesPerRowOfPlane:(unsigned int)plane;
@property(nonatomic) BOOL flipV;
@property(nonatomic) BOOL flipH;
// Invoke this method to request the capture of a high resolution photo from the video
// stream. If successful, the CameraVideoTookPictureDelegate delegate protocol
// method -cameravideoTookPictureHires:userData:jpegData: will be invoked, so make
// sure you've set the delegate before invocation.
- (void) takePhoto;
// You can also do the same thing, just from a notification with this method.
- (void) takePhotoViaNotification:(NSNotification *)notification;
// If set, when the next frame arrives it will be saved to the user's camera roll.
// Only supported when using 32-bit RGB pixel formats, e.g. pixelFormat = kCVPixelFormatType_32BGRA.
@property BOOL willSaveNextFrame;
// Sets focus mode. When mode == AVCaptureFocusModeAutoFocus, then coords must be a
// valid 2D coordinate in pixels, with 0,0 at the top-left of the frame where the frame
// is considered upright when the device is held in landscape mode with the home button on the right.
// Note that this DOES NOT give a visual indication of the focus point; that is up to the
// caller to display, should he or she wish to.
- (BOOL) setFocus:(AVCaptureFocusMode)mode atPixelCoords:(CGPoint)coords;
// Get a pointer to the most recent frame.
// If timestampOut is non-NULL, it will be filled with a timestamp using the same
// timebase as CVGetCurrentHostTime().
- (unsigned char *) frameTimestamp:(UInt64 *)timestampOut;
// Get a pointer to the most recent only if it is newer than 'timestamp'.
// Otherwise returns NULL.
// If timestampOut is non-NULL, it will be filled with a timestamp using the same
// timebase as CVGetCurrentHostTime().
- (unsigned char *) frameTimestamp:(UInt64 *)timestampOut ifNewerThanTimestamp:(UInt64)timestamp;
- (BOOL) framePlanes:(unsigned char **)bufDataPtrs count:(size_t)count timestamp:(UInt64 *)timestampOut;
- (BOOL) framePlanes:(unsigned char **)bufDataPtrs count:(size_t)count timestamp:(UInt64 *)timestampOut ifNewerThanTimestamp:(UInt64)timestamp;
@property(nonatomic, readonly) UInt64 timestampsPerSecond;
// If set, callbacks on the delegate method -cameraVideoTookPicture:userData: will
// be made on a serial queue not necessarily attached to the main thread.
// Defaults to FALSE, i.e. callbacks are on the main thread. This allows OpenGL
// operations, which should normally be on the main thread, to be called during the callback.
// Changing this variable after calling -start will not take effect until -start is next called.
@property(nonatomic) BOOL multithreaded;
- (void) stop;
- (void) dealloc;
@end |
utiasASRL/vtr3 | main/src/vtr_vision/src/messages/bridge.cpp | // Copyright 2021, Autonomous Space Robotics Lab (ASRL)
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* \file bridge.cpp
* \brief Source file for the ASRL vision package
* \details
*
* \author Autonomous Space Robotics Lab (ASRL)
*/
#include <algorithm>
#include <cctype>
#include <lgmath.hpp>
#include <string>
#include <vtr_logging/logging.hpp>
#include <vtr_vision/messages/bridge.hpp>
namespace vtr {
namespace messages {
std::tuple<decltype(CV_32F), decltype(sizeof(float))> featureCvType(
const vision::FeatureImpl &type) {
switch (type) {
case vision::FeatureImpl::ASRL_GPU_SURF:
return std::make_tuple(CV_32F, sizeof(float));
case vision::FeatureImpl::OPENCV_ORB:
return std::make_tuple(CV_8UC1, sizeof(char));
default:
LOG(WARNING) << "featureCvType: Can't find the feature type "
<< static_cast<long long>(type);
return std::make_tuple(CV_16F, sizeof(char));
}
}
vision::FeatureImpl str2FeatureType(std::string str) {
std::transform(str.begin(), str.end(), str.begin(), ::tolower);
if (str.find("surf") != std::string::npos)
return vision::FeatureImpl::ASRL_GPU_SURF;
else if (str.find("orb") != std::string::npos)
return vision::FeatureImpl::OPENCV_ORB;
LOG(WARNING) << "Could not identify feature type '" << str << "'";
return vision::FeatureImpl::UNKNOWN;
}
std::string featureType2Str(const vision::FeatureImpl &impl) {
switch (impl) {
case vision::FeatureImpl::ASRL_GPU_SURF:
return "surf";
case vision::FeatureImpl::OPENCV_ORB:
return "orb";
case vision::FeatureImpl::UNKNOWN:
return "unknown";
default:
LOG(WARNING) << "featureType2Str: Can't find the feature type "
<< static_cast<long long>(impl);
return "unknown";
}
}
vision::Features copyFeatures(const vtr_messages::msg::Features &msg_features) {
// name
vision::Features features;
features.name = msg_features.name;
// shortcuts
const auto &msg_keypoints = msg_features.keypoints;
const auto &msg_kp_info = msg_features.keypoint_info;
const auto &msg_desc_type = msg_features.desc_type;
const auto &msg_descriptors = msg_features.descriptors;
// descriptor type
features.feat_type.dims = msg_desc_type.dims;
features.feat_type.bytes_per_desc = msg_desc_type.bytes_per_desc;
features.feat_type.upright = msg_desc_type.upright;
features.feat_type.impl = str2FeatureType(msg_desc_type.name);
// preallocate the number of keypoints
int num_kps = msg_keypoints.size();
features.keypoints.reserve(num_kps);
features.feat_infos.reserve(num_kps);
if (num_kps == 0) return features;
// make sure keypoint info is as big as keypoints
int num_info = msg_kp_info.size();
bool use_info = num_info == num_kps;
if (!use_info)
LOG(WARNING) << "for " << num_kps << " keypoints, only " << num_info
<< " info items, skipping.";
// fill in keypoints
for (int i = 0; i < num_kps; ++i) {
const auto &i_pos = msg_keypoints[i].position;
auto &i_kp = features.keypoints[i];
auto &i_info = features.feat_infos[i];
i_kp.pt = decltype(i_kp.pt)(i_pos.x, i_pos.y);
if (use_info) {
const auto &m_info = msg_kp_info[i];
i_info.laplacian_bit = m_info.laplacian_bit;
i_kp.octave = m_info.scale;
i_kp.angle = m_info.orientation;
// precision isn't available in vtr_vision::vision_msgs::Features
// i_info.precision = m_info.precision();
i_kp.response = m_info.response;
}
}
// wrap the descriptors in a cv::Mat
const unsigned &bpd = features.feat_type.bytes_per_desc;
decltype(sizeof(float)) byte_depth;
decltype(CV_8UC1) cv_type;
std::tie(cv_type, byte_depth) = featureCvType(features.feat_type.impl);
if (bpd % byte_depth != 0) {
LOG(ERROR) << "bytes per descriptor: " << bpd
<< " is not divisible by byte depth: " << byte_depth;
return features;
}
if (num_kps * bpd != (unsigned)msg_descriptors.size()) {
LOG(WARNING) << "the descriptor size: " << msg_descriptors.size()
<< " is not equal to #: " << num_kps << " x B/d: " << bpd;
return features;
}
features.descriptors = cv::Mat(num_kps, bpd / byte_depth, cv_type,
(void *)msg_descriptors.data(), bpd);
// done
return features;
}
vision::ChannelFeatures copyFeatures(
const vtr_messages::msg::ChannelFeatures
&msg_channel) { ///<[in] the protobuf camera message
vision::ChannelFeatures channel;
channel.fully_matched = msg_channel.fully_matched;
channel.cameras.reserve(msg_channel.cameras.size());
for (const auto &camera : msg_channel.cameras)
channel.cameras.emplace_back(copyFeatures(camera));
return channel;
}
vision::RigFeatures copyFeatures(const vtr_messages::msg::RigFeatures
&msg_rig) { ///<[in] the protobuf message
vision::RigFeatures rig;
rig.channels.reserve(msg_rig.channels.size());
for (const auto &channel : msg_rig.channels)
rig.channels.emplace_back(copyFeatures(channel));
return rig;
}
vtr_messages::msg::ChannelFeatures copyFeatures(
const vision::ChannelFeatures &channel_features) {
vtr_messages::msg::ChannelFeatures ros_msg;
ros_msg.name = channel_features.name;
for (auto &camera : channel_features.cameras) {
ros_msg.cameras.push_back(copyFeatures(camera));
}
ros_msg.fully_matched = channel_features.fully_matched;
// TODO: (old) Rig Matches
return ros_msg;
}
vtr_messages::msg::RigFeatures copyFeatures(
const vision::RigFeatures &rig_features) {
vtr_messages::msg::RigFeatures ros_msg;
ros_msg.name = rig_features.name;
for (auto &channel : rig_features.channels) {
ros_msg.channels.push_back(copyFeatures(channel));
}
return ros_msg;
}
vtr_messages::msg::Matches copyMatches(
const vision::LandmarkMatches &match_list) {
vtr_messages::msg::Matches msg_match_list;
auto &msg_matches = msg_match_list.matches;
msg_matches.reserve(match_list.size());
for (const vision::LandmarkMatch &match : match_list) {
vtr_messages::msg::Match msg_match; // = *msg_matches.Add();
msg_match.from_id = copyLandmarkId(match.from);
msg_match.to_id.reserve(match.to.size());
for (const vision::LandmarkId &to : match.to) {
msg_match.to_id.push_back(copyLandmarkId(to));
}
msg_matches.push_back(msg_match);
}
return msg_match_list;
}
vision::LandmarkMatches copyMatches(
const vtr_messages::msg::Matches &msg_match_list) {
const auto &msg_matches = msg_match_list.matches;
vision::LandmarkMatches match_list;
match_list.reserve(msg_matches.size());
for (const vtr_messages::msg::Match &msg_match : msg_matches) {
match_list.emplace_back();
vision::LandmarkMatch &match = match_list.back();
// Check that from_id has been assigned
if (msg_match.from_id != vtr_messages::msg::FeatureId()) {
match.from = copyLandmarkId(msg_match.from_id);
}
match.to.reserve(msg_match.to_id.size());
for (const vtr_messages::msg::FeatureId &msg_to : msg_match.to_id) {
match.to.push_back(copyLandmarkId(msg_to));
}
}
return match_list;
}
std::vector<vision::RigMatches> concatenateMatches(
const std::vector<vision::RigMatches> &matches1,
const std::vector<vision::RigMatches> &matches2) {
// just copy matches 1 over
std::vector<vision::RigMatches> outmatches = matches1;
// iterate over each rig
for (const auto &ii : matches2) {
// find if there is a set of rig matches with the same name as the matches
// we are appending
auto rigit = std::find_if(
outmatches.begin(), outmatches.end(),
[&](vision::RigMatches const &m) { return m.name == ii.name; });
// check if there is a rig with the same name
if (rigit == outmatches.end()) {
// if there are no matching rigs, just add the rig matches to the end
outmatches.push_back(ii);
} else {
// if there is a matching rig, we now need to check each channel
*rigit = concatenateMatches(*rigit, ii);
}
}
return outmatches;
}
vision::RigMatches concatenateMatches(const vision::RigMatches &matches1,
const vision::RigMatches &matches2) {
// just copy matches 1 over
vision::RigMatches outmatches = matches1;
// iterate over each channel
for (const auto &channel : matches2.channels) {
// find if there is a set of channel matches with the same name as the
// matches we are appending
auto channelit =
std::find_if(outmatches.channels.begin(), outmatches.channels.end(),
[&](vision::ChannelMatches const &m) {
return m.name == channel.name;
});
// check if there is a channel with the same name
if (channelit == outmatches.channels.end()) {
// if there are no matching channels, just add the channel matches to the
// end
outmatches.channels.push_back(channel);
} else {
// if there are matching channels, then append the matches to the end
channelit->matches.insert(channelit->matches.end(),
channel.matches.begin(), channel.matches.end());
}
}
return outmatches;
}
Eigen::Matrix<double, 3, Eigen::Dynamic> copyPointCloud(
const vtr_messages::msg::ChannelLandmarks &msg_landmarks) {
int num_points = msg_landmarks.points.size();
if (!num_points) return Eigen::Matrix<double, 3, Eigen::Dynamic>();
// LOG(WARNING) << "CHECK THIS FUNCTION!" << __FILE__ << " " << __LINE__;
// const auto proto_data = asrl_described_image.mutable_points_3d();
// Eigen::Map<const
// Eigen::Matrix<float,4,Eigen::Dynamic>,0,Eigen::OuterStride<>>
// homogeneous((float*)proto_data->data(),4,num_points,Eigen::OuterStride<>(proto_data->Get(0).ByteSize()));
Eigen::Matrix<double, 4, Eigen::Dynamic> homogeneous(4, num_points);
for (int idx = 0; idx < num_points; idx++) {
const auto &pt = msg_landmarks.points[idx];
homogeneous.col(idx) = Eigen::Vector4d(pt.x, pt.y, pt.z, pt.w);
}
Eigen::Matrix<double, 3, Eigen::Dynamic> points =
homogeneous.cast<double>().colwise().hnormalized();
return points;
}
cv::Mat wrapDescriptors(const vtr_messages::msg::Features &features) {
// Get the descriptor type
if (features.desc_type == vtr_messages::msg::DescriptorType())
return cv::Mat();
auto type = features.desc_type;
// Shortcut to sizes
unsigned n = features.keypoints.size();
unsigned bpd = type.bytes_per_desc;
unsigned d = type.dims;
// Check that the binary blob is the right size
const auto &descriptor_string = features.descriptors;
if (descriptor_string.size() != bpd * n) {
LOG(ERROR) << "The descriptor binary blob is the wrong size: # keypoints: "
<< n;
return cv::Mat();
}
// Figure out the columns / type for OpenCV
unsigned cv_type, cols;
if (bpd == d * sizeof(float)) {
cv_type = CV_32F;
cols = bpd / sizeof(float);
} else if (bpd == d * sizeof(char) || bpd * 8 == d) {
cv_type = CV_8U;
cols = bpd / sizeof(char);
} else {
LOG(ERROR) << "Unknown descriptor type: " << bpd << " bytes per descriptor";
return cv::Mat();
}
// Construct and return the mat around the data
return cv::Mat(n, cols, cv_type, (void *)descriptor_string.data());
}
cv::Mat wrapImage(const vtr_messages::msg::Image &asrl_image) {
const auto &data = asrl_image.data;
// assert(data != nullptr);
// Convert to opencv
uint32_t width = asrl_image.width;
uint32_t height = asrl_image.height;
std::string encoding = asrl_image.encoding;
if (encoding == "mono8") {
return cv::Mat(cv::Size(width, height), CV_8UC1, (void *)data.data());
} else if (encoding == "bgr8") {
return cv::Mat(cv::Size(width, height), CV_8UC3, (void *)data.data());
} else {
return cv::Mat();
}
}
vtr_messages::msg::DescriptorType copyDescriptorType(
const vision::FeatureType &feat_type) {
vtr_messages::msg::DescriptorType ros_desc_type;
ros_desc_type.name = featureType2Str(feat_type.impl);
ros_desc_type.dims = feat_type.dims;
ros_desc_type.bytes_per_desc = feat_type.bytes_per_desc;
ros_desc_type.upright = feat_type.upright;
return ros_desc_type;
}
vision::FeatureType copyDescriptorType(
const vtr_messages::msg::DescriptorType &desc_type) {
vision::FeatureType feature_type;
feature_type.impl = str2FeatureType(desc_type.name);
feature_type.dims = desc_type.dims;
feature_type.bytes_per_desc = desc_type.bytes_per_desc;
feature_type.upright = desc_type.upright;
return feature_type;
}
vtr_messages::msg::Features copyFeatures(const vision::Features &features) {
// name
vtr_messages::msg::Features ros_features;
ros_features.name = features.name;
// fill in the descriptor type
ros_features.desc_type = copyDescriptorType(features.feat_type);
// fill in the keypoint / info
for (unsigned idx = 0; idx < features.keypoints.size(); ++idx) {
const auto &keypoint = features.keypoints[idx];
const auto &keypoint_info = features.feat_infos[idx];
vtr_messages::msg::Keypoint ros_keypoint;
ros_keypoint.position.x = keypoint.pt.x;
ros_keypoint.position.y = keypoint.pt.y;
vtr_messages::msg::FeatureInfo ros_keypoint_info;
ros_keypoint_info.laplacian_bit = keypoint_info.laplacian_bit;
ros_keypoint_info.scale = keypoint.octave;
ros_keypoint_info.orientation = keypoint.angle;
ros_keypoint_info.response = keypoint.response;
// precision isn't available in vtr_vision::vision_msgs::Features
ros_features.keypoints.push_back(ros_keypoint);
ros_features.keypoint_info.push_back(ros_keypoint_info);
}
// memcpy the descriptors over.
auto ros_descriptors = &ros_features.descriptors;
if (features.descriptors.step[0] != features.feat_type.bytes_per_desc) {
LOG(ERROR) << "feature bytes per descriptor is set incorrectly to "
<< features.feat_type.bytes_per_desc << ", should be "
<< features.descriptors.step[0];
}
auto datasize = features.descriptors.rows * features.descriptors.step[0];
ros_descriptors->resize(datasize);
memcpy(&(*ros_descriptors)[0], features.descriptors.data, datasize);
return ros_features;
}
vision::Image copyImages(const vtr_messages::msg::Image &ros_image) {
vision::Image image;
image.stamp = ros_image.stamp.nanoseconds_since_epoch;
image.name = ros_image.name;
image.data = wrapImage(ros_image).clone();
return image;
}
vision::ChannelImages copyImages(
const vtr_messages::msg::ChannelImages &ros_channel) {
vision::ChannelImages channel;
channel.name = ros_channel.name;
const auto &cameras = ros_channel.cameras;
auto num_cameras = cameras.size();
for (unsigned int idx = 0; idx < num_cameras; ++idx) {
channel.cameras.emplace_back(copyImages(cameras[idx]));
}
return channel;
}
vision::RigImages copyImages(const vtr_messages::msg::RigImages &ros_rig) {
vision::RigImages rig;
rig.name = ros_rig.name;
const auto &channels = ros_rig.channels;
auto num_channels = channels.size();
for (unsigned int idx = 0; idx < num_channels; ++idx) {
auto channel = copyImages(channels[idx]);
rig.channels.emplace_back(std::move(channel));
}
return rig;
}
vtr_messages::msg::Image copyImages(const vision::Image &asrl_image) {
vtr_messages::msg::Image image;
const auto &cv_image = asrl_image.data;
image.stamp.nanoseconds_since_epoch = asrl_image.stamp;
image.name = asrl_image.name;
image.width = cv_image.cols;
image.height = cv_image.rows;
image.step = cv_image.step;
if (cv_image.type() == CV_8UC1) {
image.encoding = "mono8";
image.depth = 1;
} else if (cv_image.type() == CV_8UC3) {
image.encoding = "bgr8";
image.depth = 3;
}
auto datasize = image.step * image.height;
image.data.resize(datasize);
memcpy(&image.data[0], &cv_image.data[0], datasize);
return image;
}
vtr_messages::msg::ChannelImages copyImages(
const vision::ChannelImages &asrl_channel) {
vtr_messages::msg::ChannelImages channel;
channel.name = asrl_channel.name;
for (auto &asrl_camera : asrl_channel.cameras) {
channel.cameras.push_back(copyImages(asrl_camera));
}
return channel;
}
vtr_messages::msg::RigImages copyImages(const vision::RigImages &asrl_rig) {
vtr_messages::msg::RigImages rig;
rig.name = asrl_rig.name;
for (auto &asrl_channel : asrl_rig.channels) {
rig.channels.push_back(copyImages(asrl_channel));
}
return rig;
}
vision::Transform copyExtrinsics(
const vtr_messages::msg::Transform &ros_extrinsic) {
Eigen::Matrix4d transform = Eigen::Matrix4d::Identity();
Eigen::Vector3d axisangle =
Eigen::Vector3d(ros_extrinsic.orientation.x, ros_extrinsic.orientation.y,
ros_extrinsic.orientation.z);
transform.block(0, 0, 3, 3) = lgmath::so3::vec2rot(axisangle);
transform(0, 3) = ros_extrinsic.translation.x;
transform(1, 3) = ros_extrinsic.translation.y;
transform(2, 3) = ros_extrinsic.translation.z;
return lgmath::se3::Transformation(transform);
}
vision::CameraIntrinsic copyIntrinsics(
const vtr_messages::msg::CameraCalibration &ros_intrinsics) {
vision::CameraIntrinsic intrinsic;
for (int row = 0; row < 3; ++row) {
for (int col = 0; col < 3; ++col) {
intrinsic(row, col) = ros_intrinsics.k_mat[row * 3 + col];
}
}
return intrinsic;
}
vision::RigCalibration copyCalibration(
const vtr_messages::msg::RigCalibration &ros_calibration) {
vision::RigCalibration calibration;
calibration.rectified = ros_calibration.rectified;
auto num_cameras = ros_calibration.intrinsics.size();
for (unsigned int idx = 0; idx < num_cameras; ++idx) {
calibration.intrinsics.push_back(
copyIntrinsics(ros_calibration.intrinsics[idx]));
calibration.extrinsics.push_back(
copyExtrinsics(ros_calibration.extrinsics[idx]));
}
return calibration;
}
vision::RigCalibration copyCalibration(
const vtr_messages::msg::XB3CalibrationResponse &ros_calibration) {
vision::RigCalibration calibration;
// the xb3 calibration is always rectified
calibration.rectified = true;
// fill out the extrinsics
calibration.extrinsics.emplace_back();
calibration.extrinsics.emplace_back();
auto &right_extrinsics = calibration.extrinsics[1];
Eigen::Matrix<double, 4, 4> right_extrinsic =
Eigen::Matrix<double, 4, 4>::Identity();
right_extrinsic(0, 3) = -ros_calibration.baseline;
right_extrinsics = lgmath::se3::Transformation(right_extrinsic);
// fill out intrinsics
Eigen::Matrix<double, 3, 3> intrinsic_matrix =
Eigen::Matrix<double, 3, 3>::Identity();
intrinsic_matrix(0, 0) = ros_calibration.focal_length;
intrinsic_matrix(0, 2) = ros_calibration.optical_center_col;
intrinsic_matrix(1, 1) = ros_calibration.focal_length;
intrinsic_matrix(1, 2) = ros_calibration.optical_center_row;
calibration.intrinsics.push_back(intrinsic_matrix);
calibration.intrinsics.push_back(intrinsic_matrix);
return calibration;
}
vtr_messages::msg::ChannelLandmarks copyLandmarks(
const vision::ChannelLandmarks &asrl_landmarks) {
vtr_messages::msg::ChannelLandmarks new_landmarks;
new_landmarks.name = asrl_landmarks.name;
auto lm_info = asrl_landmarks.appearance.feat_infos.cbegin();
for (auto kp = asrl_landmarks.appearance.keypoints.cbegin();
kp != asrl_landmarks.appearance.keypoints.end(); ++kp, ++lm_info) {
// copy over the feature info
vtr_messages::msg::FeatureInfo ros_keypoint_info;
ros_keypoint_info.laplacian_bit = lm_info->laplacian_bit;
// precision isn't available in vtr_vision::vision_msgs::ChannelLandmarks
ros_keypoint_info.scale = kp->octave;
ros_keypoint_info.orientation = kp->angle;
ros_keypoint_info.response = kp->response;
new_landmarks.lm_info.push_back(ros_keypoint_info);
}
#if 0
for(const auto &vo_obs : asrl_landmarks.vo_obs) {
// copy over the vo observations
}
#endif
for (int idx = 0; idx < asrl_landmarks.points.cols(); ++idx) {
auto &point = asrl_landmarks.points.col(idx);
vtr_messages::msg::HVec3 ros_point;
ros_point.x = point(0);
ros_point.y = point(1);
ros_point.z = point(2);
ros_point.w = 1.0;
new_landmarks.points.push_back(ros_point);
new_landmarks.num_vo_observations.push_back(1);
auto &cov = asrl_landmarks.covariances.col(idx);
for (int cov_idx = 0; cov_idx < 9; ++cov_idx) {
new_landmarks.covariance.push_back(cov(cov_idx));
}
// update the validity
new_landmarks.valid.push_back(asrl_landmarks.valid.at(idx));
}
// fill in the descriptor type
new_landmarks.desc_type =
copyDescriptorType(asrl_landmarks.appearance.feat_type);
// memcpy the descriptors over.
auto datasize = asrl_landmarks.appearance.descriptors.rows *
asrl_landmarks.appearance.feat_type.bytes_per_desc;
new_landmarks.descriptors.resize(datasize);
memcpy(&new_landmarks.descriptors[0],
asrl_landmarks.appearance.descriptors.data, datasize);
return new_landmarks;
}
#if 0
void updateLandmarks(vision_msgs::ChannelLandmarks &landmarks, const vision::ChannelLandmarks &asrl_landmarks) {
for(int idx = 0; idx < asrl_landmarks.points.cols(); ++idx) {
// update the landmark positions
auto *proto_point = landmarks.mutable_points(idx);
auto &point = asrl_landmarks.points.col(idx);
proto_point->set_x(point(0));
proto_point->set_y(point(1));
proto_point->set_z(point(2));
proto_point->set_w(1.0);
// update the covariances
auto &cov = asrl_landmarks.covariances.col(idx);
for(int cov_idx = 0; cov_idx < 9; ++cov_idx) {
// we need to index by 9 elements
landmarks.set_covariance(idx*9+cov_idx,cov(cov_idx));
}
// update the validity
landmarks.set_valid(idx, asrl_landmarks.valid.at(idx));
}
return;
}
#endif
vtr_messages::msg::RigLandmarks copyLandmarks(
const vision::RigLandmarks &asrl_landmarks) {
vtr_messages::msg::RigLandmarks landmarks;
landmarks.name = asrl_landmarks.name;
for (const auto &asrl_channel : asrl_landmarks.channels) {
landmarks.channels.push_back(copyLandmarks(asrl_channel));
}
return landmarks;
}
#if 0
void updateLandmarks(vision_msgs::RigLandmarks &landmarks, const vision::RigLandmarks &asrl_landmarks) {
unsigned i = 0;
for(const auto & asrl_channel : asrl_landmarks.channels) {
auto *channel = landmarks.mutable_channels(i);
updateLandmarks(*channel,asrl_channel);
i++;
}
return;
}
#endif
vision::PersistentId copyPersistentId(
const vtr_messages::msg::GraphPersistentId &persistent_id) {
vision::PersistentId id;
id.robot = persistent_id.robot;
id.stamp = persistent_id.stamp;
return id;
}
vtr_messages::msg::GraphPersistentId copyPersistentId(
const vision::PersistentId &id) {
vtr_messages::msg::GraphPersistentId persistent_id;
persistent_id.robot = id.robot;
persistent_id.stamp = id.stamp;
return persistent_id;
}
vision::LandmarkId copyLandmarkId(const vtr_messages::msg::FeatureId &ros_id) {
vision::LandmarkId id;
id.index = ros_id.idx;
id.camera = ros_id.camera;
id.channel = ros_id.channel;
id.rig = ros_id.rig;
id.persistent = copyPersistentId(ros_id.persistent);
return id;
}
vtr_messages::msg::FeatureId copyLandmarkId(const vision::LandmarkId &id) {
vtr_messages::msg::FeatureId ros_id;
ros_id.idx = id.index;
ros_id.camera = id.camera;
ros_id.channel = id.channel;
ros_id.rig = id.rig;
ros_id.persistent = copyPersistentId(id.persistent);
return ros_id;
}
vision::Observations copyObservation(
const vtr_messages::msg::Observations &ros_observation) {
vision::Observations observations;
observations.name = ros_observation.name;
for (unsigned int kp_idx = 0; kp_idx < ros_observation.keypoints.size();
++kp_idx) {
// insert the 2D position
const auto &ros_kp = ros_observation.keypoints[kp_idx];
observations.points.emplace_back(
vision::Point(ros_kp.position.x, ros_kp.position.y));
// insert the precision
const auto &ros_precision = ros_observation.precisions[kp_idx];
observations.precisions.emplace_back(ros_precision);
// insert the covariances
observations.covariances.emplace_back(Eigen::Matrix2d());
auto &cov = observations.covariances.back();
cov(0, 0) = ros_observation.covariances[kp_idx * 4];
cov(0, 1) = ros_observation.covariances[kp_idx * 4 + 1];
cov(1, 0) = ros_observation.covariances[kp_idx * 4 + 2];
cov(1, 1) = ros_observation.covariances[kp_idx * 4 + 3];
}
for (const auto &ros_landmark : ros_observation.landmarks) {
observations.landmarks.emplace_back(vision::LandmarkMatch());
auto &landmark = observations.landmarks.back();
landmark.from = copyLandmarkId(ros_landmark.from_id);
for (const auto &obs_idx : ros_landmark.to_id) {
landmark.to.push_back(copyLandmarkId(obs_idx));
}
}
return observations;
}
vision::ChannelObservations copyObservation(
const vtr_messages::msg::ChannelObservations &ros_observation) {
vision::ChannelObservations observations;
observations.name = ros_observation.name;
for (const auto &camera : ros_observation.cameras) {
observations.cameras.emplace_back(copyObservation(camera));
}
return observations;
}
vision::RigObservations copyObservation(
const vtr_messages::msg::RigObservations &ros_observation) {
vision::RigObservations observations;
observations.name = ros_observation.name;
for (const auto &channel : ros_observation.channels) {
observations.channels.emplace_back(copyObservation(channel));
}
return observations;
}
vision::ChannelBowVocabulary copyChannelBowVocabulary(
const vtr_messages::msg::ChannelBowVocabulary &ros_channel) {
vision::ChannelBowVocabulary channel;
channel.reserve(ros_channel.words.size());
for (const auto &cluster : ros_channel.words) {
channel.emplace_back(copyLandmarkId(cluster));
}
return channel;
}
vtr_messages::msg::ChannelBowVocabulary copyChannelBowVocabulary(
const vision::ChannelBowVocabulary &channel) {
vtr_messages::msg::ChannelBowVocabulary ros_vocab;
for (const auto &word : channel) {
ros_vocab.words.push_back(copyLandmarkId(word));
}
return ros_vocab;
}
vtr_messages::msg::RigBowVocabulary copyRigBowVocabulary(
const vision::RigBowVocabulary &rig) {
vtr_messages::msg::RigBowVocabulary ros_rig;
for (const auto &channel : rig) {
ros_rig.channels.push_back(copyChannelBowVocabulary(channel));
}
return ros_rig;
}
vision::RigBowVocabulary copyRigBowVocabulary(
const vtr_messages::msg::RigBowVocabulary &ros_rig) {
vision::RigBowVocabulary rig;
rig.reserve(ros_rig.channels.size());
for (const auto &channel : ros_rig.channels) {
rig.emplace_back(copyChannelBowVocabulary(channel));
}
return rig;
}
vision::BowWordCount copyBowWordCount(
const vtr_messages::msg::BowWordCount &ros_word_count) {
vision::BowWordCount word_count;
word_count.first = copyLandmarkId(ros_word_count.feature);
word_count.second = ros_word_count.count;
return word_count;
}
vtr_messages::msg::BowWordCount copyBowWordCount(
const vision::BowWordCount &word_count) {
vtr_messages::msg::BowWordCount ros_word_count;
ros_word_count.feature = copyLandmarkId(word_count.first);
ros_word_count.count = word_count.second;
return ros_word_count;
}
vision::BowDescriptor copyBowDescriptor(
const vtr_messages::msg::BowDescriptor &ros_bow) {
vision::BowDescriptor bow;
for (const auto &word_count : ros_bow.word_counts) {
bow.insert(
bow.end(),
copyBowWordCount(word_count)); //< .end() is optimal if list is sorted
}
return bow;
}
vtr_messages::msg::BowDescriptor copyBowDescriptor(
const vision::BowDescriptor &bow) {
vtr_messages::msg::BowDescriptor ros_bow;
for (const auto &word_count : bow) {
ros_bow.word_counts.push_back(copyBowWordCount(word_count));
}
return ros_bow;
}
} // namespace messages
} // namespace vtr
|
gidmoth/freeswitch-image-builder | freeswitch/src/mod/xml_int/mod_xml_rpc/ws.c | #include "ws.h"
#ifndef _MSC_VER
#include <fcntl.h>
#endif
#define SHA1_HASH_SIZE 20
static struct globals_s globals;
#ifndef WSS_STANDALONE
void init_ssl(void)
{
// SSL_library_init();
}
void deinit_ssl(void)
{
return;
}
#else
static void pthreads_thread_id(CRYPTO_THREADID *id);
static void pthreads_locking_callback(int mode, int type, const char *file, int line);
static pthread_mutex_t *lock_cs;
static long *lock_count;
static void thread_setup(void)
{
int i;
lock_cs = OPENSSL_malloc(CRYPTO_num_locks() * sizeof(pthread_mutex_t));
lock_count = OPENSSL_malloc(CRYPTO_num_locks() * sizeof(long));
for (i = 0; i < CRYPTO_num_locks(); i++) {
lock_count[i] = 0;
pthread_mutex_init(&(lock_cs[i]), NULL);
}
CRYPTO_THREADID_set_callback(pthreads_thread_id);
CRYPTO_set_locking_callback(pthreads_locking_callback);
}
static void thread_cleanup(void)
{
int i;
CRYPTO_set_locking_callback(NULL);
for (i=0; i<CRYPTO_num_locks(); i++) {
pthread_mutex_destroy(&(lock_cs[i]));
}
OPENSSL_free(lock_cs);
OPENSSL_free(lock_count);
}
static void pthreads_locking_callback(int mode, int type, const char *file, int line)
{
if (mode & CRYPTO_LOCK) {
pthread_mutex_lock(&(lock_cs[type]));
lock_count[type]++;
} else {
pthread_mutex_unlock(&(lock_cs[type]));
}
}
static void pthreads_thread_id(CRYPTO_THREADID *id)
{
CRYPTO_THREADID_set_numeric(id, (unsigned long)pthread_self());
}
void init_ssl(void) {
SSL_library_init();
OpenSSL_add_all_algorithms(); /* load & register cryptos */
SSL_load_error_strings(); /* load all error messages */
globals.ssl_method = TLSv1_server_method(); /* create server instance */
globals.ssl_ctx = SSL_CTX_new(globals.ssl_method); /* create context */
assert(globals.ssl_ctx);
/* set the local certificate from CertFile */
SSL_CTX_use_certificate_file(globals.ssl_ctx, globals.cert, SSL_FILETYPE_PEM);
/* set the private key from KeyFile */
SSL_CTX_use_PrivateKey_file(globals.ssl_ctx, globals.key, SSL_FILETYPE_PEM);
/* verify private key */
if ( !SSL_CTX_check_private_key(globals.ssl_ctx) ) {
abort();
}
SSL_CTX_set_cipher_list(globals.ssl_ctx, "HIGH:!DSS:!aNULL@STRENGTH");
thread_setup();
}
void deinit_ssl(void) {
thread_cleanup();
}
#endif
static const char c64[65] = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
static int cheezy_get_var(char *data, char *name, char *buf, size_t buflen)
{
char *p=data;
/* the old way didnt make sure that variable values were used for the name hunt
* and didnt ensure that only a full match of the variable name was used
*/
do {
if(!strncasecmp(p,name,strlen(name)) && *(p+strlen(name))==':') break;
} while((p = (strstr(p,"\n")+1))!=(char *)1);
if (p != (char *)1 && *p!='\0') {
char *v, *e = 0;
v = strchr(p, ':');
if (v) {
v++;
while(v && *v == ' ') {
v++;
}
if (v) {
e = strchr(v, '\r');
if (!e) {
e = strchr(v, '\n');
}
}
if (v && e) {
size_t cplen;
size_t len = e - v;
if (len > buflen - 1) {
cplen = buflen -1;
} else {
cplen = len;
}
strncpy(buf, v, cplen);
*(buf+cplen) = '\0';
return 1;
}
}
}
return 0;
}
static int b64encode(unsigned char *in, size_t ilen, unsigned char *out, size_t olen)
{
int y=0,bytes=0;
size_t x=0;
unsigned int b=0,l=0;
if(olen) {
}
for(x=0;x<ilen;x++) {
b = (b<<8) + in[x];
l += 8;
while (l >= 6) {
out[bytes++] = c64[(b>>(l-=6))%64];
if(++y!=72) {
continue;
}
//out[bytes++] = '\n';
y=0;
}
}
if (l > 0) {
out[bytes++] = c64[((b%16)<<(6-l))%64];
}
if (l != 0) while (l < 6) {
out[bytes++] = '=', l += 2;
}
return 0;
}
#ifdef NO_OPENSSL
static void sha1_digest(char *digest, unsigned char *in)
{
SHA1Context sha;
char *p;
int x;
SHA1Init(&sha);
SHA1Update(&sha, in, strlen(in));
SHA1Final(&sha, digest);
}
#else
static void sha1_digest(unsigned char *digest, char *in)
{
SHA_CTX sha;
SHA1_Init(&sha);
SHA1_Update(&sha, in, strlen(in));
SHA1_Final(digest, &sha);
}
#endif
int ws_handshake_kvp(wsh_t *wsh, char *key, char *version, char *proto)
{
char input[256] = "";
unsigned char output[SHA1_HASH_SIZE] = "";
char b64[256] = "";
char respond[512] = "";
if (!wsh->tsession) {
return -3;
}
if (!*key || !*version || !*proto) {
goto err;
}
snprintf(input, sizeof(input), "%s%s", key, WEBSOCKET_GUID);
sha1_digest(output, input);
b64encode((unsigned char *)output, SHA1_HASH_SIZE, (unsigned char *)b64, sizeof(b64));
snprintf(respond, sizeof(respond),
"HTTP/1.1 101 Switching Protocols\r\n"
"Upgrade: websocket\r\n"
"Connection: Upgrade\r\n"
"Sec-WebSocket-Accept: %s\r\n"
"Sec-WebSocket-Protocol: %s\r\n\r\n",
b64,
proto);
if (ws_raw_write(wsh, respond, strlen(respond))) {
wsh->handshake = 1;
return 0;
}
err:
snprintf(respond, sizeof(respond), "HTTP/1.1 400 Bad Request\r\n"
"Sec-WebSocket-Version: 13\r\n\r\n");
ws_raw_write(wsh, respond, strlen(respond));
ws_close(wsh, WS_NONE);
return -1;
}
issize_t ws_raw_read(wsh_t *wsh, void *data, size_t bytes)
{
issize_t r;
TConn *conn = wsh->tsession->connP;
if (!wsh->handshake) {
r = wsh->tsession->connP->buffersize;
memcpy(data, conn->buffer.b, r);
printf("%s\n", conn->buffer.t);
ConnReadInit(conn);
return r;
} else {
const char *readError = NULL;
// printf(" pos=%d size=%d need=%d\n", conn->bufferpos, conn->buffersize, bytes);
r = conn->buffersize - conn->bufferpos;
if (r < 0) {
printf("286 Read Error %d!\n", r);
return 0;
} else if (r == 0) {
ConnRead(conn, 2, NULL, NULL, &readError);
if (readError) {
// printf("292 Read Error %s\n", readError);
free((void *)readError);
return 0;
}
r = conn->buffersize - conn->bufferpos;
}
if (r <= (issize_t)bytes) {
memcpy(data, conn->buffer.b + conn->bufferpos, r);
// ConnReadInit(conn);
conn->bufferpos = conn->buffersize;
ConnReadInit(conn);
return r;
} else {
memcpy(data, conn->buffer.b + conn->bufferpos, bytes);
conn->bufferpos += (uint32_t)bytes;
return (issize_t)bytes;
}
}
}
issize_t ws_raw_write(wsh_t *wsh, void *data, size_t bytes)
{
size_t r;
if (wsh->ssl) {
do {
r = SSL_write(wsh->ssl, data, (int)bytes);
} while (r == -1 && SSL_get_error(wsh->ssl, (int)r) == SSL_ERROR_WANT_WRITE);
return (issize_t)r;
}
if (ConnWrite(wsh->tsession->connP, data, (uint32_t)bytes)) {
return (issize_t)bytes;
} else {
return 0;
}
}
wsh_t * ws_init(ws_tsession_t *tsession)
{
wsh_t *wsh = (wsh_t *)malloc(sizeof(*wsh));
if (!wsh) return NULL;
memset(wsh, 0, sizeof(*wsh));
wsh->tsession = tsession;
wsh->buflen = sizeof(wsh->buffer);
return wsh;
}
void ws_destroy(wsh_t *wsh)
{
if (!wsh) {
return;
}
if (!wsh->down) {
ws_close(wsh, WS_NONE);
}
if (wsh->down > 1) {
return;
}
wsh->down = 2;
if (wsh->ssl) {
int code;
do {
code = SSL_shutdown(wsh->ssl);
} while (code == -1 && SSL_get_error(wsh->ssl, code) == SSL_ERROR_WANT_READ);
SSL_free(wsh->ssl);
wsh->ssl = NULL;
}
}
issize_t ws_close(wsh_t *wsh, int16_t reason)
{
if (wsh->down) {
return -1;
}
wsh->down = 1;
return reason * -1;
}
issize_t ws_read_frame(wsh_t *wsh, ws_opcode_t *oc, uint8_t **data)
{
issize_t need = 2;
char *maskp;
again:
need = 2;
maskp = NULL;
*data = NULL;
if (wsh->down) {
return -1;
}
if (!wsh->handshake) {
return ws_close(wsh, WS_PROTO_ERR);
}
if ((wsh->datalen = ws_raw_read(wsh, wsh->buffer, 14)) < need) {
while (!wsh->down && (wsh->datalen += ws_raw_read(wsh, wsh->buffer + wsh->datalen, 14 - wsh->datalen)) < need) ;
#if 0
if (0 && (wsh->datalen += ws_raw_read(wsh, wsh->buffer + wsh->datalen, 14 - wsh->datalen)) < need) {
/* too small - protocol err */
return ws_close(wsh, WS_PROTO_ERR);
}
#endif
}
*oc = *wsh->buffer & 0xf;
switch(*oc) {
case WSOC_CLOSE:
{
wsh->plen = wsh->buffer[1] & 0x7f;
*data = (uint8_t *) &wsh->buffer[2];
return ws_close(wsh, 1000);
}
break;
case WSOC_CONTINUATION:
case WSOC_TEXT:
case WSOC_BINARY:
case WSOC_PING:
case WSOC_PONG:
{
//int fin = (wsh->buffer[0] >> 7) & 1;
int mask = (wsh->buffer[1] >> 7) & 1;
if (mask) {
need += 4;
if (need > wsh->datalen) {
/* too small - protocol err */
*oc = WSOC_CLOSE;
return ws_close(wsh, WS_PROTO_ERR);
}
}
wsh->plen = wsh->buffer[1] & 0x7f;
wsh->payload = &wsh->buffer[2];
if (wsh->plen == 127) {
uint64_t *u64;
need += 8;
if (need > wsh->datalen) {
/* too small - protocol err */
*oc = WSOC_CLOSE;
return ws_close(wsh, WS_PROTO_ERR);
}
u64 = (uint64_t *) wsh->payload;
wsh->payload += 8;
wsh->plen = ntohl((u_long)*u64);
} else if (wsh->plen == 126) {
uint16_t *u16;
need += 2;
if (need > wsh->datalen) {
/* too small - protocol err */
*oc = WSOC_CLOSE;
return ws_close(wsh, WS_PROTO_ERR);
}
u16 = (uint16_t *) wsh->payload;
wsh->payload += 2;
wsh->plen = ntohs(*u16);
}
if (mask) {
maskp = (char *)wsh->payload;
wsh->payload += 4;
}
need = (wsh->plen - (wsh->datalen - need));
if ((need + wsh->datalen) > (issize_t)wsh->buflen) {
/* too big - Ain't nobody got time fo' dat */
*oc = WSOC_CLOSE;
return ws_close(wsh, WS_DATA_TOO_BIG);
}
wsh->rplen = wsh->plen - need;
while(need) {
issize_t r = ws_raw_read(wsh, wsh->payload + wsh->rplen, need);
if (r < 1) {
/* invalid read - protocol err .. */
*oc = WSOC_CLOSE;
return ws_close(wsh, WS_PROTO_ERR);
}
wsh->datalen += r;
wsh->rplen += r;
need -= r;
}
if (mask && maskp) {
issize_t i;
for (i = 0; i < wsh->datalen; i++) {
wsh->payload[i] ^= maskp[i % 4];
}
}
if (*oc == WSOC_PING) {
ws_write_frame(wsh, WSOC_PONG, wsh->payload, wsh->rplen);
goto again;
}
*(wsh->payload+wsh->rplen) = '\0';
*data = (uint8_t *)wsh->payload;
//printf("READ[%ld][%d]-----------------------------:\n[%s]\n-------------------------------\n", wsh->rplen, *oc, (char *)*data);
return wsh->rplen;
}
break;
default:
{
/* invalid op code - protocol err .. */
*oc = WSOC_CLOSE;
return ws_close(wsh, WS_PROTO_ERR);
}
break;
}
}
issize_t ws_feed_buf(wsh_t *wsh, void *data, size_t bytes)
{
if (bytes + wsh->wdatalen > wsh->buflen) {
return -1;
}
memcpy(wsh->wbuffer + wsh->wdatalen, data, bytes);
wsh->wdatalen += (issize_t)bytes;
return (issize_t)bytes;
}
issize_t ws_send_buf(wsh_t *wsh, ws_opcode_t oc)
{
issize_t r = 0;
if (!wsh->wdatalen) {
return -1;
}
r = ws_write_frame(wsh, oc, wsh->wbuffer, wsh->wdatalen);
wsh->wdatalen = 0;
return r;
}
issize_t ws_write_frame(wsh_t *wsh, ws_opcode_t oc, void *data, size_t bytes)
{
uint8_t hdr[14] = { 0 };
size_t hlen = 2;
if (wsh->down) {
return -1;
}
//printf("WRITE[%ld]-----------------------------:\n[%s]\n-----------------------------------\n", bytes, (char *) data);
hdr[0] = (uint8_t)(oc | 0x80);
if (bytes < 126) {
hdr[1] = (uint8_t)bytes;
} else if (bytes < 0x10000) {
uint16_t *u16;
hdr[1] = 126;
hlen += 2;
u16 = (uint16_t *) &hdr[2];
*u16 = htons((uint16_t) bytes);
} else {
uint64_t *u64;
hdr[1] = 127;
hlen += 8;
u64 = (uint64_t *) &hdr[2];
*u64 = htonl((unsigned long)bytes);
}
if (ws_raw_write(wsh, (void *) &hdr[0], hlen) != (issize_t)hlen) {
return -1;
}
if (ws_raw_write(wsh, data, bytes) != (issize_t)bytes) {
return -2;
}
return (issize_t)bytes;
}
|
lazuli-inc/fess | src/main/java/org/codelibs/fess/util/ParameterUtil.java | /*
* Copyright 2012-2021 CodeLibs Project and the Others.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*/
package org.codelibs.fess.util;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.regex.Pattern;
import org.codelibs.core.lang.StringUtil;
import org.codelibs.core.misc.Pair;
import org.codelibs.fess.es.config.exentity.CrawlingConfig.ConfigName;
import org.lastaflute.core.security.PrimaryCipher;
public class ParameterUtil {
private static final String CIPHER_PREFIX = "{cipher}";
protected static final String XPATH_PREFIX = "field.xpath.";
protected static final String META_PREFIX = "field.meta.";
protected static final String VALUE_PREFIX = "field.value.";
protected static final String SCRIPT_PREFIX = "field.script.";
protected static final String CLIENT_PREFIX = "client.";
protected static final String CONFIG_PREFIX = "config.";
protected static final String FIELD_PREFIX = "field.config.";
protected ParameterUtil() {
// nothing
}
public static Map<String, String> parse(final String value) {
final Map<String, String> paramMap = new LinkedHashMap<>();
if (value != null) {
int unknownKey = 0;
final Pattern properyPattern = Pattern.compile(ComponentUtil.getFessConfig().getAppEncryptPropertyPattern());
final PrimaryCipher cipher = ComponentUtil.getPrimaryCipher();
final String[] lines = value.split("[\r\n]");
for (final String line : lines) {
if (StringUtil.isNotBlank(line)) {
final int pos = line.indexOf('=');
if (pos == 0) {
paramMap.put("unknown." + (unknownKey + 1), line.substring(pos + 1).trim());
unknownKey++;
} else if (pos > 0) {
final String key = line.substring(0, pos).trim();
if (pos < line.length()) {
String data = line.substring(pos + 1).trim();
if (properyPattern.matcher(key).matches() && data.startsWith(CIPHER_PREFIX)) {
data = cipher.decrypt(data.substring(CIPHER_PREFIX.length()));
}
paramMap.put(key, data);
} else {
paramMap.put(key, StringUtil.EMPTY);
}
} else {
paramMap.put(line.trim(), StringUtil.EMPTY);
}
}
}
}
return paramMap;
}
public static String encrypt(final String value) {
final StringBuilder buf = new StringBuilder();
final Pattern properyPattern = Pattern.compile(ComponentUtil.getFessConfig().getAppEncryptPropertyPattern());
final PrimaryCipher cipher = ComponentUtil.getPrimaryCipher();
ParameterUtil.parse(value).entrySet().stream().map(e -> {
final String k = e.getKey();
final String v = e.getValue();
if (properyPattern.matcher(k).matches() && !v.startsWith(CIPHER_PREFIX)) {
return new Pair<>(k, CIPHER_PREFIX + cipher.encrypt(v));
}
return new Pair<>(k, v);
}).forEach(e -> {
if (buf.length() > 0) {
buf.append('\n');
}
buf.append(e.getFirst());
buf.append('=');
buf.append(e.getSecond());
});
return buf.toString();
}
public static void loadConfigParams(final Map<String, Object> paramMap, final String configParam) {
final Map<String, String> map = ParameterUtil.parse(configParam);
if (!map.isEmpty()) {
paramMap.putAll(map);
}
}
public static Map<ConfigName, Map<String, String>> createConfigParameterMap(final String configParameters) {
final Map<ConfigName, Map<String, String>> map = new HashMap<>();
final Map<String, String> configConfigMap = new LinkedHashMap<>();
final Map<String, String> clientConfigMap = new LinkedHashMap<>();
final Map<String, String> xpathConfigMap = new LinkedHashMap<>();
final Map<String, String> metaConfigMap = new LinkedHashMap<>();
final Map<String, String> valueConfigMap = new LinkedHashMap<>();
final Map<String, String> scriptConfigMap = new LinkedHashMap<>();
final Map<String, String> fieldConfigMap = new LinkedHashMap<>();
map.put(ConfigName.CONFIG, configConfigMap);
map.put(ConfigName.CLIENT, clientConfigMap);
map.put(ConfigName.XPATH, xpathConfigMap);
map.put(ConfigName.META, metaConfigMap);
map.put(ConfigName.VALUE, valueConfigMap);
map.put(ConfigName.SCRIPT, scriptConfigMap);
map.put(ConfigName.FIELD, fieldConfigMap);
for (final Map.Entry<String, String> entry : ParameterUtil.parse(configParameters).entrySet()) {
final String key = entry.getKey();
if (key.startsWith(CONFIG_PREFIX)) {
configConfigMap.put(key.substring(CONFIG_PREFIX.length()), entry.getValue());
} else if (key.startsWith(CLIENT_PREFIX)) {
clientConfigMap.put(key.substring(CLIENT_PREFIX.length()), entry.getValue());
} else if (key.startsWith(XPATH_PREFIX)) {
xpathConfigMap.put(key.substring(XPATH_PREFIX.length()), entry.getValue());
} else if (key.startsWith(META_PREFIX)) {
metaConfigMap.put(key.substring(META_PREFIX.length()), entry.getValue());
} else if (key.startsWith(VALUE_PREFIX)) {
valueConfigMap.put(key.substring(VALUE_PREFIX.length()), entry.getValue());
} else if (key.startsWith(SCRIPT_PREFIX)) {
scriptConfigMap.put(key.substring(SCRIPT_PREFIX.length()), entry.getValue());
} else if (key.startsWith(FIELD_PREFIX)) {
fieldConfigMap.put(key.substring(FIELD_PREFIX.length()), entry.getValue());
}
}
return map;
}
}
|
tejeshMore-dev/compiled-leet-codes | problems/1539-kth-missing-positive-number/1539-kth-missing-positive-number.js | /**
* @param {number[]} arr
* @param {number} k
* @return {number}
*/
var findKthPositive = function(arr, k) {
let pointer, diff;
for( let i = 0; i < arr.length; i++ ) {
if( i=== 0 && arr[0] > 1 ) {
diff = arr[i] - 1;
pointer = 0;
} else {
diff = arr[i] - arr[i-1] -1
pointer = arr[i-1];
}
while( diff && k ) {
pointer++;
diff--;
k--;
}
if( !k )
return pointer
}
if( k ) {
pointer = arr[arr.length-1];
pointer +=k
}
return pointer;
}; |
bsramin/shorty | client/src/lib/helpers.js | <gh_stars>1-10
// Returns a function, that, as long as it continues to be invoked, will not
// be triggered. The function will be called after it stops being called for
// N milliseconds. If `immediate` is passed, trigger the function on the
// leading edge, instead of the trailing.
import ky from 'ky'
import { API_V1_ENDPOINT } from '../constants/endpoint'
import Auth from './Authentication'
export const debounce = (func, wait, immediate) => {
let timeout
return function () {
const context = this
const args = arguments
const later = () => {
timeout = null
if (!immediate) func.apply(context, args)
}
const callNow = immediate && !timeout
clearTimeout(timeout)
timeout = setTimeout(later, wait)
if (callNow) func.apply(context, args)
}
}
export const objectToQuery = (object) =>
Object.keys(object)
.reduce(
(result, key) =>
object[key] !== null && object[key] !== undefined
? (result += `${key}=${object[key]}&`)
: result,
'?'
)
.slice(0, -1)
/**
* @name sessionStorage
* @description Simple SessionStorage wrapper with READ/SAVE/REMOVE
* READ: sessionStorage('<NAME>')
* REMOVE: sessionStorage('<NAME>', null)
* SAVE: sessionStorage('<NAME>', '<VALUE>')
* @param {String} key
* @param {String} value
*/
export const sessionStorage = (key, value = undefined) =>
value !== undefined
? value !== null
? window.sessionStorage.setItem(key, value)
: window.sessionStorage.removeItem(key)
: window.sessionStorage.getItem(key)
/**
* @name localStorage
* @description Simple LocalStorage wrapper with READ/SAVE/REMOVE
* READ: localStorage('<NAME>')
* REMOVE: localStorage('<NAME>', null)
* SAVE: localStorage('<NAME>', '<VALUE>')
* @param {String} key
* @param {String} value
*/
export const localStorage = (key, value = undefined) =>
value !== undefined
? value !== null
? window.localStorage.setItem(key, value)
: window.localStorage.removeItem(key)
: window.localStorage.getItem(key)
/**
* @name clearUrl
* @description Remove http(s):// from given url
* @param {String} url - Url to clear
* @returns {string|*}
*/
export const clearUrl = (url) => {
if (url.indexOf('://') !== -1) return url.substr(url.indexOf('://') + 3)
return url
}
/**
* @name removeInitialSlash
* @description Remove initial slash from path
* @param {String} path - Path to check
* @return {String}
*/
export const removeInitialSlash = (path) =>
path.indexOf('/') === 0 ? path.substr(1) : path
/**
*
* @type {Ky}
*/
export const api = ky.extend({
prefixUrl: API_V1_ENDPOINT,
hooks: {
beforeRequest: [
async (options) => {
const isSecure = options.secure || true
if (isSecure && !Auth.isRefreshPending() && Auth.isExpired()) {
await Auth.refreshToken()
}
options.headers.set('Authorization', Auth.getAuthenticationHeader())
},
],
},
})
export const exceptionHandler = async (exception) => {
try {
const { status } = await exception.response
if (status === 401) {
await Auth.startRefreshToken()
} else return window.location.assign('/500')
} catch (error) {
Auth.deauthenticate()
window.location.assign('/')
}
}
|
dbflute-test/dbflute-test-dbms-mysql | src/main/java/org/docksidestage/mysql/dbflute/bsentity/BsWhiteDeprecatedClsElement.java | <filename>src/main/java/org/docksidestage/mysql/dbflute/bsentity/BsWhiteDeprecatedClsElement.java
/*
* Copyright 2004-2013 the Seasar Foundation and the Others.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*/
package org.docksidestage.mysql.dbflute.bsentity;
import java.util.List;
import java.util.ArrayList;
import org.dbflute.dbmeta.DBMeta;
import org.dbflute.dbmeta.AbstractEntity;
import org.dbflute.dbmeta.accessory.DomainEntity;
import org.docksidestage.mysql.dbflute.allcommon.DBMetaInstanceHandler;
import org.docksidestage.mysql.dbflute.allcommon.CDef;
import org.docksidestage.mysql.dbflute.exentity.*;
/**
* The entity of WHITE_DEPRECATED_CLS_ELEMENT as TABLE. <br>
* <pre>
* [primary-key]
* DEPRECATED_CLS_ELEMENT_CODE
*
* [column]
* DEPRECATED_CLS_ELEMENT_CODE, DEPRECATED_CLS_ELEMENT_NAME
*
* [sequence]
*
*
* [identity]
*
*
* [version-no]
*
*
* [foreign table]
*
*
* [referrer table]
*
*
* [foreign property]
*
*
* [referrer property]
*
*
* [get/set template]
* /= = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
* String deprecatedClsElementCode = entity.getDeprecatedClsElementCode();
* String deprecatedClsElementName = entity.getDeprecatedClsElementName();
* entity.setDeprecatedClsElementCode(deprecatedClsElementCode);
* entity.setDeprecatedClsElementName(deprecatedClsElementName);
* = = = = = = = = = =/
* </pre>
* @author DBFlute(AutoGenerator)
*/
public abstract class BsWhiteDeprecatedClsElement extends AbstractEntity implements DomainEntity {
// ===================================================================================
// Definition
// ==========
/** The serial version UID for object serialization. (Default) */
private static final long serialVersionUID = 1L;
// ===================================================================================
// Attribute
// =========
/** DEPRECATED_CLS_ELEMENT_CODE: {PK, NotNull, CHAR(3), classification=DeprecatedMapCollaborationType} */
protected String _deprecatedClsElementCode;
/** DEPRECATED_CLS_ELEMENT_NAME: {VARCHAR(20)} */
protected String _deprecatedClsElementName;
// ===================================================================================
// DB Meta
// =======
/** {@inheritDoc} */
public DBMeta asDBMeta() {
return DBMetaInstanceHandler.findDBMeta(asTableDbName());
}
/** {@inheritDoc} */
public String asTableDbName() {
return "white_deprecated_cls_element";
}
// ===================================================================================
// Key Handling
// ============
/** {@inheritDoc} */
public boolean hasPrimaryKeyValue() {
if (_deprecatedClsElementCode == null) { return false; }
return true;
}
// ===================================================================================
// Classification Property
// =======================
/**
* Get the value of deprecatedClsElementCode as the classification of DeprecatedMapCollaborationType. <br>
* DEPRECATED_CLS_ELEMENT_CODE: {PK, NotNull, CHAR(3), classification=DeprecatedMapCollaborationType} <br>
* has deprecated element
* <p>It's treated as case insensitive and if the code value is null, it returns null.</p>
* @return The instance of classification definition (as ENUM type). (NullAllowed: when the column value is null)
*/
public CDef.DeprecatedMapCollaborationType getDeprecatedClsElementCodeAsDeprecatedMapCollaborationType() {
return CDef.DeprecatedMapCollaborationType.codeOf(getDeprecatedClsElementCode());
}
/**
* Set the value of deprecatedClsElementCode as the classification of DeprecatedMapCollaborationType. <br>
* DEPRECATED_CLS_ELEMENT_CODE: {PK, NotNull, CHAR(3), classification=DeprecatedMapCollaborationType} <br>
* has deprecated element
* @param cdef The instance of classification definition (as ENUM type). (NullAllowed: if null, null value is set to the column)
*/
public void setDeprecatedClsElementCodeAsDeprecatedMapCollaborationType(CDef.DeprecatedMapCollaborationType cdef) {
setDeprecatedClsElementCode(cdef != null ? cdef.code() : null);
}
// ===================================================================================
// Classification Setting
// ======================
/**
* Set the value of deprecatedClsElementCode as FooName (FOO). <br>
* FooName
*/
public void setDeprecatedClsElementCode_FooName() {
setDeprecatedClsElementCodeAsDeprecatedMapCollaborationType(CDef.DeprecatedMapCollaborationType.FooName);
}
/**
* Set the value of deprecatedClsElementCode as BarName (BAR). <br>
* BarBar: here (deprecated: test of deprecated)
*/
@Deprecated
public void setDeprecatedClsElementCode_BarName() {
setDeprecatedClsElementCodeAsDeprecatedMapCollaborationType(CDef.DeprecatedMapCollaborationType.BarName);
}
/**
* Set the value of deprecatedClsElementCode as QuxName (QUX). <br>
* QuxQux: (deprecated: no original comment)
*/
@Deprecated
public void setDeprecatedClsElementCode_QuxName() {
setDeprecatedClsElementCodeAsDeprecatedMapCollaborationType(CDef.DeprecatedMapCollaborationType.QuxName);
}
// ===================================================================================
// Classification Determination
// ============================
/**
* Is the value of deprecatedClsElementCode FooName? <br>
* FooName
* <p>It's treated as case insensitive and if the code value is null, it returns false.</p>
* @return The determination, true or false.
*/
public boolean isDeprecatedClsElementCodeFooName() {
CDef.DeprecatedMapCollaborationType cdef = getDeprecatedClsElementCodeAsDeprecatedMapCollaborationType();
return cdef != null ? cdef.equals(CDef.DeprecatedMapCollaborationType.FooName) : false;
}
/**
* Is the value of deprecatedClsElementCode BarName? <br>
* BarBar: here (deprecated: test of deprecated)
* <p>It's treated as case insensitive and if the code value is null, it returns false.</p>
* @return The determination, true or false.
*/
@Deprecated
public boolean isDeprecatedClsElementCodeBarName() {
CDef.DeprecatedMapCollaborationType cdef = getDeprecatedClsElementCodeAsDeprecatedMapCollaborationType();
return cdef != null ? cdef.equals(CDef.DeprecatedMapCollaborationType.BarName) : false;
}
/**
* Is the value of deprecatedClsElementCode QuxName? <br>
* QuxQux: (deprecated: no original comment)
* <p>It's treated as case insensitive and if the code value is null, it returns false.</p>
* @return The determination, true or false.
*/
@Deprecated
public boolean isDeprecatedClsElementCodeQuxName() {
CDef.DeprecatedMapCollaborationType cdef = getDeprecatedClsElementCodeAsDeprecatedMapCollaborationType();
return cdef != null ? cdef.equals(CDef.DeprecatedMapCollaborationType.QuxName) : false;
}
/**
* contains deprecated element here <br>
* The group elements:[FooName, BarName]
* @return The determination, true or false.
*/
public boolean isDeprecatedClsElementCode_ContainsDeprecated() {
CDef.DeprecatedMapCollaborationType cdef = getDeprecatedClsElementCodeAsDeprecatedMapCollaborationType();
return cdef != null && cdef.isContainsDeprecated();
}
// ===================================================================================
// Classification Name/Alias
// =========================
/**
* Get the value of the column 'deprecatedClsElementCode' as classification name.
* @return The string of classification name. (NullAllowed: when the column value is null)
*/
public String getDeprecatedClsElementCodeName() {
CDef.DeprecatedMapCollaborationType cdef = getDeprecatedClsElementCodeAsDeprecatedMapCollaborationType();
return cdef != null ? cdef.name() : null;
}
/**
* Get the value of the column 'deprecatedClsElementCode' as classification alias.
* @return The string of classification alias. (NullAllowed: when the column value is null)
*/
public String getDeprecatedClsElementCodeAlias() {
CDef.DeprecatedMapCollaborationType cdef = getDeprecatedClsElementCodeAsDeprecatedMapCollaborationType();
return cdef != null ? cdef.alias() : null;
}
// ===================================================================================
// Foreign Property
// ================
// ===================================================================================
// Referrer Property
// =================
protected <ELEMENT> List<ELEMENT> newReferrerList() { // overriding to import
return new ArrayList<ELEMENT>();
}
// ===================================================================================
// Basic Override
// ==============
@Override
protected boolean doEquals(Object obj) {
if (obj instanceof BsWhiteDeprecatedClsElement) {
BsWhiteDeprecatedClsElement other = (BsWhiteDeprecatedClsElement)obj;
if (!xSV(_deprecatedClsElementCode, other._deprecatedClsElementCode)) { return false; }
return true;
} else {
return false;
}
}
@Override
protected int doHashCode(int initial) {
int hs = initial;
hs = xCH(hs, asTableDbName());
hs = xCH(hs, _deprecatedClsElementCode);
return hs;
}
@Override
protected String doBuildStringWithRelation(String li) {
return "";
}
@Override
protected String doBuildColumnString(String dm) {
StringBuilder sb = new StringBuilder();
sb.append(dm).append(xfND(_deprecatedClsElementCode));
sb.append(dm).append(xfND(_deprecatedClsElementName));
if (sb.length() > dm.length()) {
sb.delete(0, dm.length());
}
sb.insert(0, "{").append("}");
return sb.toString();
}
@Override
protected String doBuildRelationString(String dm) {
return "";
}
@Override
public WhiteDeprecatedClsElement clone() {
return (WhiteDeprecatedClsElement)super.clone();
}
// ===================================================================================
// Accessor
// ========
/**
* [get] DEPRECATED_CLS_ELEMENT_CODE: {PK, NotNull, CHAR(3), classification=DeprecatedMapCollaborationType} <br>
* @return The value of the column 'DEPRECATED_CLS_ELEMENT_CODE'. (basically NotNull if selected: for the constraint)
*/
public String getDeprecatedClsElementCode() {
checkSpecifiedProperty("deprecatedClsElementCode");
return _deprecatedClsElementCode;
}
/**
* [set] DEPRECATED_CLS_ELEMENT_CODE: {PK, NotNull, CHAR(3), classification=DeprecatedMapCollaborationType} <br>
* @param deprecatedClsElementCode The value of the column 'DEPRECATED_CLS_ELEMENT_CODE'. (basically NotNull if update: for the constraint)
*/
protected void setDeprecatedClsElementCode(String deprecatedClsElementCode) {
checkClassificationCode("DEPRECATED_CLS_ELEMENT_CODE", CDef.DefMeta.DeprecatedMapCollaborationType, deprecatedClsElementCode);
registerModifiedProperty("deprecatedClsElementCode");
_deprecatedClsElementCode = deprecatedClsElementCode;
}
/**
* [get] DEPRECATED_CLS_ELEMENT_NAME: {VARCHAR(20)} <br>
* @return The value of the column 'DEPRECATED_CLS_ELEMENT_NAME'. (NullAllowed even if selected: for no constraint)
*/
public String getDeprecatedClsElementName() {
checkSpecifiedProperty("deprecatedClsElementName");
return _deprecatedClsElementName;
}
/**
* [set] DEPRECATED_CLS_ELEMENT_NAME: {VARCHAR(20)} <br>
* @param deprecatedClsElementName The value of the column 'DEPRECATED_CLS_ELEMENT_NAME'. (NullAllowed: null update allowed for no constraint)
*/
public void setDeprecatedClsElementName(String deprecatedClsElementName) {
registerModifiedProperty("deprecatedClsElementName");
_deprecatedClsElementName = deprecatedClsElementName;
}
/**
* For framework so basically DON'T use this method.
* @param deprecatedClsElementCode The value of the column 'DEPRECATED_CLS_ELEMENT_CODE'. (basically NotNull if update: for the constraint)
*/
public void mynativeMappingDeprecatedClsElementCode(String deprecatedClsElementCode) {
setDeprecatedClsElementCode(deprecatedClsElementCode);
}
}
|
duesenklipper/wicketstuff-security | swarm-parent/hive/src/main/java/org/apache/wicket/security/hive/SimpleCachingHive.java | <gh_stars>0
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.wicket.security.hive;
import java.util.Map;
import java.util.WeakHashMap;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.wicket.security.hive.authentication.Subject;
import org.apache.wicket.security.hive.authorization.Permission;
/**
* A very simple caching mechanism on top of {@link BasicHive}. If you want more control
* over your cache you could for example use EHCache and extend BasicHive yourself. This
* cache is cleared when a subject logs off but no guarantees are given that the cache
* will not be cleared prematurely or how long it takes after a user logs off to clear the
* cached results.
*
* @author marrink
*/
public class SimpleCachingHive extends BasicHive
{
private final WeakHashMap<Subject, Map<Permission, Boolean>> cache;
public SimpleCachingHive()
{
// reasonable init cache size
cache = new WeakHashMap<Subject, Map<Permission, Boolean>>(50);
}
@Override
protected Boolean cacheLookUp(Subject subject, Permission permission)
{
// easier not use cache when subject is null, since there is no timeout
// mechanism
if (subject == null || permission == null)
return null;
// no synch, since it does not matter much if we miss a cache hit
Map<Permission, Boolean> result = cache.get(subject);
if (result != null)
return result.get(permission);
return null;
}
@Override
protected void cacheResult(Subject subject, Permission permission, boolean result)
{
if (subject == null || permission == null)
return;
Map<Permission, Boolean> resultMap = cache.get(subject);
if (resultMap == null)
{
// a bit of sync, concurrent puts can destroy the internals of a HashMap,
// causing never ending loops
resultMap = new ConcurrentHashMap<Permission, Boolean>();
resultMap.put(permission, result);
synchronized (cache)
{
cache.put(subject, resultMap);
}
}
else
resultMap.put(permission, result);
}
}
|
XQDD/gdou-car | src/main/java/com/wteam/wx/utils/SSLUtils.java | package com.wteam.wx.utils;
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.conn.ssl.SSLConnectionSocketFactory;
import org.apache.http.conn.ssl.TrustStrategy;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.ssl.SSLContextBuilder;
import org.apache.http.util.EntityUtils;
import javax.net.ssl.SSLContext;
import java.io.IOException;
import java.net.URI;
import java.security.KeyManagementException;
import java.security.KeyStoreException;
import java.security.NoSuchAlgorithmException;
import java.security.cert.CertificateException;
import java.security.cert.X509Certificate;
public class SSLUtils {
/**
* 创建ssl连接
*
* @return
*/
public static CloseableHttpClient createSSLClientDefault() {
try {
SSLContext sslContext = new SSLContextBuilder().loadTrustMaterial(null, new TrustStrategy() {
//信任所有
public boolean isTrusted(X509Certificate[] chain, String authType) throws CertificateException {
return true;
}
}).build();
SSLConnectionSocketFactory sslsf = new SSLConnectionSocketFactory(sslContext);
return HttpClients.custom().setSSLSocketFactory(sslsf).build();
} catch (KeyManagementException | NoSuchAlgorithmException | KeyStoreException e) {
e.printStackTrace();
}
return HttpClients.createDefault();
}
public static String httpsGet(URI uri) {
CloseableHttpClient httpClient = SSLUtils.createSSLClientDefault();
try {
HttpResponse response = httpClient.execute(new HttpGet(uri));
return new String(EntityUtils.toString(response.getEntity()).getBytes("iso8859-1"), "utf-8");
} catch (IOException e) {
e.printStackTrace();
return null;
}
}
public static String httpsPost(URI uri, HttpEntity entity) {
CloseableHttpClient httpClient = SSLUtils.createSSLClientDefault();
try {
HttpPost httpPost = new HttpPost(uri);
httpPost.setEntity(entity);
HttpResponse response = httpClient.execute(httpPost);
return new String(EntityUtils.toString(response.getEntity()).getBytes("iso8859-1"), "utf-8");
} catch (IOException e) {
e.printStackTrace();
return null;
}
}
} |
rrv-rafael/curso-java-basico | IntelliJ/src/com/rrvrafael/cursojava/aula17/LoopFor.java | <reponame>rrv-rafael/curso-java-basico
package com.rrvrafael.cursojava.aula17;
public class LoopFor {
public static void main(String[] args) {
for (int i = 0; i < 5; i++)
{
System.out.println("i tem valor: " + i);
}
for (int i = 5; i > 0; i--)
{
System.out.println("i tem valor: " + i);
}
for (int i = 0, j = 10; i < j; i++, j--)
{
System.out.println("i tem valor: " + i + " e j tem valor: " + j);
}
int count = 0;
for ( ; count < 10; )
{
System.out.println("valor de count: " + count);
count += 2;
}
for (int cont = 0; cont < 10; cont += 2)
{
System.out.println("valor de cont: " + cont);
}
int soma = 0;
for (int i = 1; i < 5; soma += i++);
System.out.println("Valor da soma: " + soma);
}
} |
AudioVisualDistributors/Overture-Drivers | drivers/avpro_videoflux/index.js | <reponame>AudioVisualDistributors/Overture-Drivers
const CMD_DEFER_TIME = 1000 // Timeout when using commandDefer
const TICK_PERIOD = 5000 // In-built tick interval
const POLL_PERIOD = 10000 // Continuous polling function interval
const TCP_TIMEOUT = 30000 // Will timeout after this length of inactivity
const TCP_RECONNECT_DELAY = 3000 // How long to wait before attempting to reconnect
let host
exports.init = _host => {
host = _host
}
exports.createDevice = base => {
const logger = base.logger || host.logger
let config
let tcpClient
let frameParser = host.createFrameParser()
frameParser.setSeparator('\r\n')
frameParser.on('data', data => onFrame(data))
//-------------------------------------------------------------------------- BASE FUNCTIONS
function setup(_config) {
config = _config
base.setTickPeriod(TICK_PERIOD)
setPoll('getAllOutputs', POLL_PERIOD, isConnected)
}
function start() {
if (config.simulation) base.getVar('Status').string = 'Connected'
else initTcpClient()
}
function tick() {
if (!config.simulation && !tcpClient) initTcpClient()
}
function disconnect() {
base.getVar('Status').string = 'Disconnected'
base.stopPolling()
}
function stop() {
disconnect()
tcpClient && tcpClient.end()
tcpClient = null
}
//-------------------------------------------------------------------------- SEND/RECEIVE HANDLERS
function initTcpClient() {
if (tcpClient) return // Return if tcpClient already exists
tcpClient = host.createTCPClient()
tcpClient.setOptions({
receiveTimeout: TCP_TIMEOUT,
autoReconnectionAttemptDelay: TCP_RECONNECT_DELAY
})
tcpClient.connect(config.port, config.host)
tcpClient.on('connect', () => {
logger.silly('TCPClient connected')
base.getVar('Status').string = 'Connected'
base.startPolling()
})
tcpClient.on('data', data => {
frameParser.push(data.toString())
})
tcpClient.on('close', () => {
logger.silly('TCPClient closed')
let pending = base.getPendingCommand()
disconnect() // Triggered on timeout, this allows auto reconnect
if (pending) {
base.commandError('Lost Connection')
base.perform(pending.action, pending.params)
}
})
tcpClient.on('error', err => {
logger.error(`TCPClient: ${err}`)
stop() // Throw out the tcpClient and get a fresh connection
})
}
function send(data) {
logger.silly(`TCPClient send: ${data}`)
return tcpClient && tcpClient.write(data)
}
function sendDefer(data) {
if (send(data)) base.commandDefer(CMD_DEFER_TIME)
else base.commandError('Data not sent')
}
function onFrame(data) {
let match // Used for regex matching below
const pendingCommand = base.getPendingCommand()
logger.silly(`onFrame (pending = ${pendingCommand && pendingCommand.action}): ${data}`)
match = data.match(/OUT(\d).*IN(\d)/i)
if (match) {
base.getVar('').string = ''
base.commandDone()
}
}
//-------------------------------------------------------------------------- GET FUNCTIONS
function getAllOutputs() {
sendDefer('GET OUT0 VS\r\n') // Get all outputs
}
//-------------------------------------------------------------------------- SET FUNCTIONS
function selectSource(params) {
let output_name = config.model.output_names[params.Channel]
// Make sure params.Channel is in valid range
if (params.Channel < 1 || params.Channel > config.model.output_names.length) {
logger.error(`selectSource: params.Channel (${params.Channel}) is out of valid range (1-${config.model.output_names.length})`)
return
}
// If simulation mode, just set the variable
if (config.simulation) {
base.getVar(`Sources_${output_name}`).string = params.Name
return
}
// Find input number based on name
let input_number = 0
for (let input in config.model.input_names) {
if (params.Name == config.model.input_names[input]) {
input_number = parseInt(input)
}
}
// Send join command, or error message if input not found
if (input_number > 0) {
logger.debug(`Connecting "${params.Name}" (Input${input_number}) to "${output_name}" (Output${params.Channel})`)
sendDefer(`SET OUT${params.Channel} VS IN${input_number}\r\n`)
}
else {
logger.error(`selectSource: Could not find an input matching "${params.Name}"`)
}
}
//------------------------------------------------------------------------------- HELPER FUNCTIONS
function isConnected() {
return base.getVar('Status').string === 'Connected'
}
function setPoll(action, period, enableFn) {
base.setPoll({
action: action,
period: period,
enablePollFn: enableFn,
startImmediately: true
})
}
//-------------------------------------------------------------------------- EXPORTED FUNCTIONS
return {
setup, start, stop, tick,
getAllOutputs, selectSource
}
}
|
lmz199521/AtGuiGuDemo | app/src/main/java/com/example/hasee/shoppingdemo/Activity/autologin/AutoLoginPresenter.java | <filename>app/src/main/java/com/example/hasee/shoppingdemo/Activity/autologin/AutoLoginPresenter.java
package com.example.hasee.shoppingdemo.Activity.autologin;
import android.util.Log;
import com.example.hasee.shoppingdemo.Bean.LoginBean;
import com.example.hasee.shoppingdemo.Presenter.ContentPresenter;
import com.example.hasee.shoppingdemo.Utils.ErrorUtils;
import com.example.hasee.shoppingdemo.Utils.MVPObserver;
import com.example.hasee.shoppingdemo.Utils.RetrofitCreate;
import java.util.HashMap;
import java.util.Map;
import io.reactivex.Observer;
import io.reactivex.android.schedulers.AndroidSchedulers;
import io.reactivex.disposables.Disposable;
import io.reactivex.schedulers.Schedulers;
/**
* Created by Lmz on 2019/05/22
*/
public class AutoLoginPresenter implements ContentPresenter.IAutoLoginUserPresenter {
private ContentPresenter.IAutoLoginUserView iAutoLoginUserView;
public AutoLoginPresenter(ContentPresenter.IAutoLoginUserView iAutoLoginUserView) {
this.iAutoLoginUserView = iAutoLoginUserView;
}
@Override
public void AutoLogin(String token) {
Map<String,String> map =new HashMap<>();
map.put("token",token);
RetrofitCreate.getShoppingService().AutoLoginUser(map)
.observeOn(AndroidSchedulers.mainThread())
.subscribeOn(Schedulers.io())
.subscribe(new Observer<LoginBean>() {
@Override
public void onSubscribe(Disposable d) {
}
@Override
public void onNext(LoginBean bean) {
Log.d("token","AutoLoginPresenter"+bean.getMessage());
iAutoLoginUserView.onAutoLoginSuccess(bean);
}
@Override
public void onError(Throwable e) {
ErrorUtils.ErrorMess(e);
iAutoLoginUserView.onAutoLoginFailure(e.toString());
}
@Override
public void onComplete() {
}
});
}
}
|
bobrutskovav/java_otus_homework | hw22-cache/src/main/java/ru/otus/cache/CacheHomeWork.java | package ru.otus.cache;
import org.flywaydb.core.Flyway;
import org.hibernate.SessionFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import ru.otus.cache.cachehw.HwCache;
import ru.otus.cache.cachehw.HwListener;
import ru.otus.cache.cachehw.MyCache;
import ru.otus.cache.core.dao.UserDao;
import ru.otus.cache.core.model.AddressDataSet;
import ru.otus.cache.core.model.PhoneDataSet;
import ru.otus.cache.core.model.User;
import ru.otus.cache.core.service.DbServiceUserImpl;
import ru.otus.cache.flyway.MigrationsExecutor;
import ru.otus.cache.flyway.MigrationsExecutorFlyway;
import ru.otus.cache.hibernate.HibernateUtils;
import ru.otus.cache.hibernate.dao.UserDaoHibernate;
import ru.otus.cache.hibernate.sessionmanager.SessionManagerHibernate;
import javax.sql.DataSource;
import java.util.List;
import java.util.Optional;
public class CacheHomeWork {
public static final String HIBERNATE_CFG_FILE = "hibernate.cfg.xml";
private static final Logger logger = LoggerFactory.getLogger(CacheHomeWork.class);
public static void main(String[] args) {
MigrationsExecutor migrationsExecutor = new MigrationsExecutorFlyway(HIBERNATE_CFG_FILE);
migrationsExecutor.executeMigrations();
// Общая часть
SessionFactory sessionFactory = HibernateUtils.buildSessionFactory(HIBERNATE_CFG_FILE, User.class,
AddressDataSet.class, PhoneDataSet.class);
var sessionManager = new SessionManagerHibernate(sessionFactory);
// Работа с пользователем
UserDao userDao = new UserDaoHibernate(sessionManager);
HwCache<String, User> cache = new MyCache<>();
HwListener listener = new HwListener<String, User>() {
@Override
public void notify(String key, User value, String action) {
logger.info("Action Performed KEY: {} VALUE: {} ACTON: {}", key, value, action);
}
};
cache.addListener(listener);
// Код дальше должен остаться, т.е. userDao должен использоваться
var dbServiceUser = new DbServiceUserImpl(userDao, cache);
User newUser = new User(0, "dbServiceUser", 13);
AddressDataSet addressDataSet = new AddressDataSet();
addressDataSet.setStreet("USER STREET");
newUser.setAddressDataSet(addressDataSet);
PhoneDataSet phoneA = new PhoneDataSet();
phoneA.setUser(newUser);
phoneA.setNumber("+700909090");
PhoneDataSet phoneB = new PhoneDataSet();
phoneB.setUser(newUser);
phoneB.setNumber("+99999999");
List<PhoneDataSet> userPhones = List.of(phoneA, phoneB);
newUser.setPhoneDataSets(userPhones);
var id = dbServiceUser.saveUser(newUser);
Optional<User> user = dbServiceUser.getUser(id);
user.ifPresentOrElse(
crUser -> logger.info("FROM CACHE created user, name:{}", crUser.getName()),
() -> logger.info("user was not created")
);
cache.remove("1");
user = dbServiceUser.getUser(id);
user.ifPresentOrElse(
crUser -> logger.info("FROM DB created user, name:{}", crUser.getName()),
() -> logger.info("user was not created")
);
user = dbServiceUser.getUser(id);
user.ifPresentOrElse(
crUser -> logger.info("AGAIN FROM CACHE created user, name:{}", crUser.getName()),
() -> logger.info("user was not created")
);
cache.removeListener(listener);
}
private static void flywayMigrations(DataSource dataSource) {
logger.info("db migration started...");
var flyway = Flyway.configure()
.dataSource(dataSource)
.locations("classpath:/db/migration")
.load();
flyway.migrate();
logger.info("db migration finished.");
logger.info("***");
}
}
|
zhangjun1992/rtt-bsp-hpm6750evkmini | libraries/hpm_sdk/drivers/inc/hpm_cam_drv.h | <reponame>zhangjun1992/rtt-bsp-hpm6750evkmini
/*
* Copyright (c) 2021 hpmicro
*
* SPDX-License-Identifier: BSD-3-Clause
*
*/
#ifndef HPM_CAM_DRV_H
#define HPM_CAM_DRV_H
#include "hpm_common.h"
#include "hpm_display_common.h"
#include "hpm_cam_regs.h"
/**
* @brief CAM driver APIs
* @defgroup cam_interface CAM driver APIs
* @ingroup io_interfaces
* @{
*/
/**
* @brief CAM data store mode
*/
#define CAM_DATA_STORE_MODE_NORMAL (0U)
#define CAM_DATA_STORE_MODE_Y_UV_PLANES (CAM_CR1_STORAGE_MODE_SET(1))
#define CAM_DATA_STORE_MODE_Y_ONLY (CAM_CR1_STORAGE_MODE_SET(2))
#define CAM_DATA_STORE_MODE_BINARY (CAM_CR1_STORAGE_MODE_SET(3))
/**
* @brief CAM sensor bitwidth
*/
#define CAM_SENSOR_BITWIDTH_8BITS (CAM_CR1_SENSOR_BIT_WIDTH_SET(0))
#define CAM_SENSOR_BITWIDTH_10BITS (CAM_CR1_SENSOR_BIT_WIDTH_SET(1))
/**
* @brief CAM IRQ flag
*/
#define CAM_IRQ_UNSUPPORTED_CONFIGURATION (CAM_INT_EN_ERR_CL_BWID_CFG_INT_EN_MASK)
#define CAM_IRQ_HIST_CALCULATION_DONE (CAM_INT_EN_HIST_DONE_INT_EN_MASK)
#define CAM_IRQ_HRESPONSE_ERROR (CAM_INT_EN_HRESP_ERR_EN_MASK)
#define CAM_IRQ_END_OF_FRAME (CAM_INT_EN_EOF_INT_EN_MASK)
#define CAM_IRQ_STAT_FIFO_OVERRUN (CAM_INT_EN_SF_OR_INT_EN_MASK)
#define CAM_IRQ_RX_FIFO_OVERRUN (CAM_INT_EN_RF_OR_INT_EN_MASK)
#define CAM_IRQ_STAT_FIFO_DMA_TRANSFER_DONE (CAM_INT_EN_SFF_DMA_DONE_INT_EN_MASK)
#define CAM_IRQ_FB2_DMA_TRANSFER_DONE (CAM_INT_EN_FB2_DMA_DONE_INT_EN_MASK)
#define CAM_IRQ_FB1_DMA_TRANSFER_DONE (CAM_INT_EN_FB1_DMA_DONE_INT_EN_MASK)
#define CAM_IRQ_START_OF_FRAME (CAM_INT_EN_SOF_INT_EN_MASK)
/**
* @brief CAM status flag
*/
#define CAM_STATUS_UNSUPPORTED_CONFIGURATION (CAM_STA_ERR_CL_BWID_CFG_MASK)
#define CAM_STATUS_HIST_CALCULATION_DONE (CAM_STA_HIST_DONE_MASK)
#define CAM_STATUS_STAT_FIFO_OVERRUN (CAM_STA_SF_OR_INT_MASK)
#define CAM_STATUS_RX_FIFO_OVERRUN (CAM_STA_RF_OR_INT_MASK)
#define CAM_STATUS_STAT_FIFO_DMA_TRANSFER_DONE (CAM_STA_DMA_TSF_DONE_SFF_MASK)
#define CAM_STATUS_STAT_FIFO_FULL (CAM_STA_STATFF_INT_MASK)
#define CAM_STATUS_FB2_DMA_TRANSFER_DONE (CAM_STA_DMA_TSF_DONE_FB2_MASK)
#define CAM_STATUS_FB1_DMA_TRANSFER_DONE (CAM_STA_DMA_TSF_DONE_FB1_MASK)
#define CAM_STATUS_RX_FIFO_FULL (CAM_STA_RXFF_INT_MASK)
#define CAM_STATUS_END_OF_FRAME (CAM_STA_EOF_INT_MASK)
#define CAM_STATUS_START_OF_FRAME (CAM_STA_SOF_INT_MASK)
#define CAM_STATUS_HRESPONSE_ERROR (CAM_STA_HRESP_ERR_INT_MASK)
#define CAM_STATUS_DATA_READY (CAM_STA_DRDY_MASK)
/**
* @brief CAM input color format
*/
#define CAM_COLOR_FORMAT_RGB888 (CAM_CR1_COLOR_FORMATS_SET(2))
#define CAM_COLOR_FORMAT_RGB565 (CAM_CR1_COLOR_FORMATS_SET(4))
#define CAM_COLOR_FORMAT_RGB555 (CAM_CR1_COLOR_FORMATS_SET(6))
#define CAM_COLOR_FORMAT_YCBCR422 (CAM_CR1_COLOR_FORMATS_SET(7))
#define CAM_COLOR_FORMAT_YUV444 (CAM_CR1_COLOR_FORMATS_SET(8))
/**
* @brief CAM config
*/
typedef struct {
uint32_t width;
uint32_t height;
bool color_ext;
bool data_pack_msb;
bool enable_buffer2;
uint8_t data_store_mode;
uint8_t color_format;
uint8_t sensor_bitwidth;
uint32_t buffer1;
uint32_t buffer2;
display_yuv2rgb_config_t csc_config;
} cam_config_t;
#ifdef __cplusplus
extern "C" {
#endif
/**
* @brief CAM set high and low limits of color key
*
* @param [in] ptr CAM base address
* @param [in] high color key high limits
* @param [in] low color key low limits
*/
static inline void cam_set_color_key(CAM_Type *ptr, uint32_t high, uint32_t low)
{
ptr->CLRKEY_LOW = CAM_CLRKEY_LOW_LIMIT_SET(low);
ptr->CLRKEY_HIGH = CAM_CLRKEY_HIGH_LIMIT_SET(high);
}
/**
* @brief CAM get default config
*
* @param [in] ptr CAM base address
* @param [out] config cam_config_t
* @param [in] pixel_format display_pixel_format_t
*/
void cam_get_default_config(CAM_Type *ptr, cam_config_t *config, display_pixel_format_t pixel_format);
/**
* @brief CAM init
*
* @param [in] ptr CAM base address
* @param [in] config cam_config_t
*
* @retval hpm_stat_t status_invalid_argument or status_success
*/
hpm_stat_t cam_init(CAM_Type *ptr, cam_config_t *config);
/**
* @brief CAM start
*
* @param [in] ptr CAM base address
*/
void cam_start(CAM_Type *ptr);
/**
* @brief CAM stop
*
* @param [in] ptr CAM base address
*/
void cam_stop(CAM_Type *ptr);
/**
* @}
*
*/
#ifdef __cplusplus
}
#endif
#endif /* HPM_CAM_DRV_H */
|
StableAgOH/solution-codes | luogu/P1424.cpp | <filename>luogu/P1424.cpp
#include <iostream>
#include <chrono>
using namespace std;
//==========================================
typedef long long ll;
signed main(signed argc, char const *argv[])
{
#ifdef LOCAL
freopen("in.in", "r", stdin);
freopen("out.out", "w", stdout);
auto c1 = chrono::high_resolution_clock::now();
#endif
ios::sync_with_stdio(false);
cin.tie(nullptr);
//======================================
int x;
ll n;
cin>>x>>n;
ll sum = 0;
for(int i=0;i<n;i++)
{
if(x==7) x = 1;
else
{
if(x!=6) sum += 250;
x++;
}
}
cout<<sum<<endl;
//======================================
#ifdef LOCAL
auto c2 = chrono::high_resolution_clock::now();
cerr<<"Time Used:"<<chrono::duration_cast<chrono::milliseconds>(c2-c1).count()<<"ms"<<endl;
#endif
return 0;
}
|
SlimKatLegacy/android_external_chromium_org | net/quic/quic_received_packet_manager.h | // Copyright 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//
// Manages the packet entropy calculation for both sent and received packets
// for a connection.
#ifndef NET_QUIC_QUIC_RECEIVED_PACKET_MANAGER_H_
#define NET_QUIC_QUIC_RECEIVED_PACKET_MANAGER_H_
#include "net/quic/congestion_control/receive_algorithm_interface.h"
#include "net/quic/quic_framer.h"
#include "net/quic/quic_protocol.h"
namespace net {
namespace test {
class QuicConnectionPeer;
class QuicReceivedPacketManagerPeer;
} // namespace test
// Records all received packets by a connection and tracks their entropy.
// Also calculates the correct entropy for the framer when it truncates an ack
// frame being serialized.
class NET_EXPORT_PRIVATE QuicReceivedPacketManager :
public QuicReceivedEntropyHashCalculatorInterface {
public:
explicit QuicReceivedPacketManager(CongestionFeedbackType congestion_type);
virtual ~QuicReceivedPacketManager();
// Updates the internal state concerning which packets have been received.
// bytes: the packet size in bytes including Quic Headers.
// header: the packet header.
// timestamp: the arrival time of the packet.
// revived: true if the packet was lost and then recovered with help of a
// FEC packet.
void RecordPacketReceived(QuicByteCount bytes,
const QuicPacketHeader& header,
QuicTime receipt_time,
bool revived);
// Checks whether |sequence_number| is missing and less than largest observed.
bool IsMissing(QuicPacketSequenceNumber sequence_number);
// Checks if we're still waiting for the packet with |sequence_number|.
bool IsAwaitingPacket(QuicPacketSequenceNumber sequence_number);
// Update the |received_info| for an outgoing ack.
void UpdateReceivedPacketInfo(ReceivedPacketInfo* received_info,
QuicTime approximate_now);
// Should be called before sending an ACK packet, to decide if we need
// to attach a QuicCongestionFeedbackFrame block.
// Returns false if no QuicCongestionFeedbackFrame block is needed.
// Otherwise fills in feedback and returns true.
virtual bool GenerateCongestionFeedback(
QuicCongestionFeedbackFrame* feedback);
// QuicReceivedEntropyHashCalculatorInterface
// Called by QuicFramer, when the outgoing ack gets truncated, to recalculate
// the received entropy hash for the truncated ack frame.
virtual QuicPacketEntropyHash EntropyHash(
QuicPacketSequenceNumber sequence_number) const OVERRIDE;
// These two are called by OnAckFrame.
//
// Updates internal state based on |incoming_ack.received_info|.
void UpdatePacketInformationReceivedByPeer(const QuicAckFrame& incoming_ack);
// Updates internal state based on |incoming_ack.sent_info|.
void UpdatePacketInformationSentByPeer(const QuicAckFrame& incoming_ack);
// Returns whether the peer is missing packets.
bool HasMissingPackets();
// Returns true when there are new missing packets to be reported within 3
// packets of the largest observed.
bool HasNewMissingPackets();
QuicPacketSequenceNumber peer_largest_observed_packet() {
return peer_largest_observed_packet_;
}
QuicPacketSequenceNumber least_packet_awaited_by_peer() {
return least_packet_awaited_by_peer_;
}
QuicPacketSequenceNumber peer_least_packet_awaiting_ack() {
return peer_least_packet_awaiting_ack_;
}
private:
friend class test::QuicConnectionPeer;
friend class test::QuicReceivedPacketManagerPeer;
typedef std::map<QuicPacketSequenceNumber,
QuicPacketEntropyHash> ReceivedEntropyMap;
// Record the received entropy hash against |sequence_number|.
void RecordPacketEntropyHash(QuicPacketSequenceNumber sequence_number,
QuicPacketEntropyHash entropy_hash);
// Recalculate the entropy hash and clears old packet entropies,
// now that the sender sent us the |entropy_hash| for packets up to,
// but not including, |peer_least_unacked|.
void RecalculateEntropyHash(QuicPacketSequenceNumber peer_least_unacked,
QuicPacketEntropyHash entropy_hash);
// Deletes all missing packets before least unacked. The connection won't
// process any packets with sequence number before |least_unacked| that it
// received after this call. Returns true if there were missing packets before
// |least_unacked| unacked, false otherwise.
bool DontWaitForPacketsBefore(QuicPacketSequenceNumber least_unacked);
// TODO(satyamshekhar): Can be optimized using an interval set like data
// structure.
// Map of received sequence numbers to their corresponding entropy.
// Every received packet has an entry, and packets without the entropy bit set
// have an entropy value of 0.
// TODO(ianswett): When the entropy flag is off, the entropy should not be 0.
ReceivedEntropyMap packets_entropy_;
// Cumulative hash of entropy of all received packets.
QuicPacketEntropyHash packets_entropy_hash_;
// The largest sequence number cleared by RecalculateEntropyHash.
// Received entropy cannot be calculated for numbers less than it.
QuicPacketSequenceNumber largest_sequence_number_;
// Track some peer state so we can do less bookkeeping.
// Largest sequence number that the peer has observed. Mostly received,
// missing in case of truncated acks.
QuicPacketSequenceNumber peer_largest_observed_packet_;
// Least sequence number which the peer is still waiting for.
QuicPacketSequenceNumber least_packet_awaited_by_peer_;
// Least sequence number of the the packet sent by the peer for which it
// hasn't received an ack.
QuicPacketSequenceNumber peer_least_packet_awaiting_ack_;
// Received packet information used to produce acks.
ReceivedPacketInfo received_info_;
// The time we received the largest_observed sequence number, or zero if
// no sequence numbers have been received since UpdateReceivedPacketInfo.
// Needed for calculating delta_time_largest_observed.
QuicTime time_largest_observed_;
scoped_ptr<ReceiveAlgorithmInterface> receive_algorithm_;
};
} // namespace net
#endif // NET_QUIC_QUIC_RECEIVED_PACKET_MANAGER_H_
|
cping/LGame | Java/old/AWT_ver/src/org/loon/framework/javase/game/core/store/RecordStore.java | <filename>Java/old/AWT_ver/src/org/loon/framework/javase/game/core/store/RecordStore.java
package org.loon.framework.javase.game.core.store;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.Serializable;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.List;
import java.util.Vector;
import org.loon.framework.javase.game.utils.FileUtils;
import org.loon.framework.javase.game.utils.StringUtils;
/**
* Copyright 2008 - 2010
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*
* @project loonframework
* @author chenpeng
* @email:<EMAIL>
* @version 0.1
*/
public class RecordStore {
// 默认文件标识
final static public String STORE_FILENAME_PREFIX = "lgame-record-";
// 默认后缀
final static public String STORE_FILENAME_SUFFIX = ".store";
// 已查询数据缓冲
private static HashMap<String, RecordStore> stores = new HashMap<String, RecordStore>(10);
private String name;
private int openCount = 0;
private File storeFile;
public static RecordStore openRecordStore(String recordStoreName,
boolean createIfNecessary) throws RecordStoreException {
synchronized (stores) {
RecordStore store = (RecordStore) stores.get(recordStoreName);
if (store == null) {
store = new RecordStore(recordStoreName);
stores.put(recordStoreName, store);
}
store.openRecordStore(createIfNecessary);
return store;
}
}
private RecordStore(String name) {
this.name = name;
}
public static synchronized void deleteStores() {
String[] stores = listRecordStores();
for (int i = 0; i < stores.length; i++) {
String store = stores[i];
try {
deleteRecordStore(store);
} catch (RecordStoreException e) {
}
}
}
public static synchronized boolean deleteRecordStore(String recordStoreName)
throws RecordStoreException {
try {
List<?> list = FileUtils.getFiles(".", STORE_FILENAME_SUFFIX
.substring(1));
if (list != null) {
int size = list.size();
String ret, name;
for (int i = 0; i < size; i++) {
name = (String) list.get(i);
ret = FileUtils.getFileName(((String) list.get(i)));
ret = StringUtils.replaceIgnoreCase(ret.substring(0, ret
.length()
- STORE_FILENAME_SUFFIX.length()),
STORE_FILENAME_PREFIX, "");
if (recordStoreName.equals(ret)) {
stores.remove(ret);
File file = new File(name);
file.delete();
return true;
}
}
} else {
return false;
}
} catch (IOException e) {
} catch (Exception e) {
throw new RuntimeException("Store " + recordStoreName
+ "deleteRecordStore Exception!");
}
return false;
}
public static synchronized String[] listRecordStores() {
String[] result = null;
try {
List<?> list = FileUtils.getFiles(".", STORE_FILENAME_SUFFIX
.substring(1));
if (list != null) {
int size = list.size();
result = new String[size];
if (size == 0) {
result = null;
} else {
String ret;
for (int i = 0; i < size; i++) {
ret = FileUtils.getFileName(((String) list.get(i)));
result[i] = StringUtils.replaceIgnoreCase(ret
.substring(0, ret.length()
- STORE_FILENAME_SUFFIX.length()),
STORE_FILENAME_PREFIX, "");
}
}
}
} catch (IOException e) {
}
return result;
}
public synchronized void openRecordStore(boolean createIfNecessary)
throws RecordStoreException {
if (openCount > 0) {
openCount++;
return;
}
storeFile = new File(STORE_FILENAME_PREFIX + name
+ STORE_FILENAME_SUFFIX);
boolean readOk = false;
if (storeFile.exists()) {
try {
readFromDisk();
readOk = true;
} catch (Exception ex) {
if (!createIfNecessary)
throw new RecordStoreException("Store " + name
+ " could not read/find backing file " + storeFile);
}
}
if (!readOk) {
clear();
writeToDisk();
}
openCount = 1;
}
public static class RecordItem implements Serializable {
/**
*
*/
private static final long serialVersionUID = 1L;
int id;
byte[] data;
RecordItem() {
}
RecordItem(int id, byte[] data) {
this.id = id;
this.data = data;
}
}
private int nextRecordId = 1;
private Vector<RecordItem> records = new Vector<RecordItem>();
public static final String HEADER = "RecordStore:1";
private synchronized void clear() {
nextRecordId = 1;
records = new Vector<RecordItem>();
}
private synchronized void readFromDisk() throws RecordStoreException {
try {
FileInputStream fis = new FileInputStream(storeFile);
ObjectInputStream os = new ObjectInputStream(fis);
String header = os.readUTF();
if (!header.equals(HEADER)) {
extracted(header);
}
nextRecordId = os.readInt();
int size = os.readInt();
records = new Vector<RecordItem>();
for (int i = 0; i < size; i++) {
RecordItem ri = (RecordItem) os.readObject();
records.addElement(ri);
}
os.close();
} catch (Exception e) {
throw new RecordStoreException("ERROR reading store from disk ("
+ storeFile + "): " + e);
}
}
private void extracted(String header) throws RecordStoreException {
throw new RecordStoreException("Store file header mismatch: "
+ header);
}
private synchronized void writeToDisk() throws RecordStoreException {
try {
FileOutputStream fos = new FileOutputStream(storeFile);
ObjectOutputStream oos = new ObjectOutputStream(fos);
oos.writeUTF(HEADER);
oos.writeInt(nextRecordId);
oos.writeInt(records.size());
for (int i = 0; i < records.size(); i++) {
RecordItem ri = (RecordItem) records.elementAt(i);
oos.writeObject(ri);
}
oos.close();
} catch (Exception e) {
throw new RecordStoreException("Error writing store to disk: " + e);
}
}
public void checkOpen(String message) throws RecordStoreNotOpenException {
if (openCount <= 0) {
throw new RecordStoreNotOpenException(message);
}
}
public synchronized int addRecord(byte[] data, int offset, int numBytes)
throws RecordStoreException {
checkOpen("addRecord");
byte buf[] = new byte[numBytes];
if (numBytes != 0) {
System.arraycopy(data, offset, buf, 0, numBytes);
}
RecordItem ri = new RecordItem(nextRecordId++, buf);
records.addElement(ri);
writeToDisk();
return ri.id;
}
public synchronized void closeRecordStore()
throws RecordStoreNotOpenException, RecordStoreException {
checkOpen("closeRecordStore");
openCount--;
}
public synchronized void deleteRecord(int recordId)
throws RecordStoreNotOpenException, RecordStoreException {
checkOpen("deleteRecord");
for (int i = 0; i < records.size(); i++) {
RecordItem ri = (RecordItem) records.elementAt(i);
if (ri.id == recordId) {
records.removeElementAt(i);
writeToDisk();
return;
}
}
throw new InvalidRecordIDException("deleteRecord " + recordId);
}
public synchronized RecordEnumeration enumerateRecords(RecordFilter filter,
RecordComparator comparator, boolean keepUpdated)
throws RecordStoreNotOpenException {
checkOpen("enumerateRecords");
if (filter != null)
throw new RuntimeException(
"enumerateRecords with RecordFilter Unimplemented");
if (comparator != null)
throw new RuntimeException(
"enumerateRecords with RecordComparator Unimplemented");
if (keepUpdated)
throw new RuntimeException(
"enumerateRecords with keepUpdated Unimplemented");
return new RecordEnumerationImpl();
}
class RecordEnumerationImpl implements RecordEnumeration {
RecordEnumerationImpl() {
nextIndex = 0;
}
private int nextIndex;
public boolean hasNextElement() {
synchronized (RecordStore.this) {
return nextIndex < records.size();
}
}
public int nextRecordId() throws InvalidRecordIDException {
synchronized (RecordStore.this) {
if (nextIndex >= records.size()) {
throw new InvalidRecordIDException("nextRecordId at index "
+ nextIndex + "/" + records.size());
}
RecordItem ri = (RecordItem) records.elementAt(nextIndex);
nextIndex++;
return ri.id;
}
}
public void destroy() {
}
public boolean hasPreviousElement() {
return false;
}
public boolean isKeptUpdated() {
return false;
}
public void keepUpdated(boolean keepUpdated) {
}
public byte[] nextRecord() throws InvalidRecordIDException,
RecordStoreNotOpenException, RecordStoreException {
return null;
}
public int numRecords() {
return 0;
}
public byte[] previousRecord() throws InvalidRecordIDException,
RecordStoreNotOpenException, RecordStoreException {
return null;
}
public int previousRecordId() throws InvalidRecordIDException {
return 0;
}
public void rebuild() {
}
public void reset() {
}
}
public String getName() throws RecordStoreNotOpenException {
checkOpen("getName");
return name;
}
public synchronized int getNumRecords() throws RecordStoreNotOpenException {
checkOpen("getNumRecords");
return records.size();
}
private RecordItem getRecordItem(int id) {
Enumeration<RecordItem> rs = records.elements();
while (rs.hasMoreElements()) {
RecordItem ri = rs.nextElement();
if (ri.id == id) {
return ri;
}
}
return null;
}
public synchronized byte[] getRecord(int recordId)
throws RecordStoreNotOpenException, RecordStoreException {
checkOpen("getRecord");
RecordItem ri = getRecordItem(recordId);
if (ri == null) {
throw new InvalidRecordIDException("record " + recordId
+ " not found");
}
return ri.data;
}
public synchronized int getRecord(int recordId, byte[] buffer, int offset)
throws RecordStoreNotOpenException, InvalidRecordIDException,
RecordStoreException {
checkOpen("getRecord");
RecordItem ri = getRecordItem(recordId);
if (ri == null) {
throw new InvalidRecordIDException("record " + recordId
+ " not found");
}
byte[] data = ri.data;
int recordSize = data.length;
System.arraycopy(data, 0, buffer, offset, recordSize);
return recordSize;
}
public synchronized int getRecordSize(int recordId)
throws RecordStoreNotOpenException, InvalidRecordIDException,
RecordStoreException {
checkOpen("getRecordSize");
RecordItem ri = getRecordItem(recordId);
if (ri == null) {
throw new InvalidRecordIDException("record " + recordId
+ " not found");
}
byte[] data = (byte[]) ri.data;
if (data == null) {
throw new InvalidRecordIDException();
}
return data.length;
}
public synchronized int getNextRecordID()
throws RecordStoreNotOpenException, RecordStoreException {
return nextRecordId;
}
public synchronized int getSize() throws RecordStoreNotOpenException {
try {
return getRecordSize(nextRecordId);
} catch (Exception e) {
throw new RecordStoreNotOpenException();
}
}
public synchronized void setRecord(int recordId, byte[] newData,
int offset, int numBytes) throws RecordStoreNotOpenException,
RecordStoreException {
checkOpen("setRecord");
RecordItem ri = getRecordItem(recordId);
if (ri == null) {
throw new InvalidRecordIDException("record " + recordId
+ " not found");
}
byte buf[] = new byte[numBytes];
if (numBytes != 0) {
System.arraycopy(newData, offset, buf, 0, numBytes);
}
ri.data = buf;
writeToDisk();
}
}
|
xzyaoi/AID | components/cmd/pkg/utilities/rotatefilehook.go | <filename>components/cmd/pkg/utilities/rotatefilehook.go
// Copyright (c) 2020 <NAME> & AICAMP.CO.,LTD
//
// This software is released under the MIT License.
// https://opensource.org/licenses/MIT
package utilities
import (
"io"
"github.com/sirupsen/logrus"
"gopkg.in/natefinch/lumberjack.v2"
)
// RotateFileConfig sets the config for rotate file
type RotateFileConfig struct {
Filename string
MaxSize int
MaxBackups int
MaxAge int
Level logrus.Level
Formatter logrus.Formatter
}
// RotateFileHook is the hook for logrus
type RotateFileHook struct {
Config RotateFileConfig
logWriter io.Writer
}
// NewRotateFileHook creates new RotateFileHook
func NewRotateFileHook(config RotateFileConfig) (logrus.Hook, error) {
hook := RotateFileHook{
Config: config,
}
hook.logWriter = &lumberjack.Logger{
Filename: config.Filename,
MaxSize: config.MaxSize,
MaxBackups: config.MaxBackups,
MaxAge: config.MaxAge,
}
return &hook, nil
}
// Levels returns the logrus levels
func (hook *RotateFileHook) Levels() []logrus.Level {
return logrus.AllLevels[:hook.Config.Level+1]
}
// Fire writes logs to the file
func (hook *RotateFileHook) Fire(entry *logrus.Entry) (err error) {
b, err := hook.Config.Formatter.Format(entry)
if err != nil {
return err
}
hook.logWriter.Write(b)
return nil
}
|
desspert/mogupro | mogupro/game/include/Sound/Source.h | #pragma once
#define NOMINMAX
#pragma comment(lib,"OpenAL32.lib")
#include <OpenAL/al.h>
#include <OpenAL/alc.h>
#include <Sound/Buffer.h>
namespace Sound
{
class Source
{
public:
Source();
~Source();
Source(const Source&) = delete;
Source& operator=(const Source&) = delete;
void unbind()
{
unbindBuffer();
if (id_ != 0)
alDeleteSources(1, &id_);
}
ALuint id() const;
void bindBuffer(const Buffer& buffer) const;
void unbindBuffer() const;
void play() const;
void stop() const;
void pause() const;
void gain(const float value) const;
void pitch(const float value) const;
void looping(const bool value) const;
void position(const float x, const float y, const float z) const;
void direction(const float x,const float y,const float z);
void soundPosition(const float x, const float y, const float z);
bool isPlaying() const;
float currentTime() const;
private:
ALuint id_;
};
}
|
Warm-men/caizi-ad | src/view/dataBoard/echart/option.js | import echarts from 'echarts'
export default (data = [], isRange = false, tooltipExtra) => {
const name = data.map((item) => item.date.slice(-5))
return {
grid: {
containLabel: true,
top: 40,
left: 20,
right: 20,
bottom: 20
},
xAxis: {
data: name,
axisLabel: {
textStyle: {
color: '#999'
}
},
axisTick: {
show: false
},
axisLine: {
lineStyle: {
color: '#ddd'
}
}
},
yAxis: {
type: 'value',
axisLine: {
show: false
},
show: true,
axisLabel: {
show: true,
interval: 'auto',
color: '#999',
formatter: isRange ? '{value}%' : '{value}'
},
axisTick: {
show: false
},
splitLine: {
lineStyle: {
color: 'rgba(153,153,153,0.1)'
}
}
},
tooltip: {
trigger: 'axis',
axisPointer: {
type: 'none'
},
padding: 0,
backgroundColor: 'transparent',
formatter (info) {
const { data } = info[0]
const text = isRange ? data.value + '%' : data.value
const rangeText = tooltipExtra === '产品点击购买率' && data.visitProductClientNum
? `(${data.hitBuyProductClientNum}/${data.visitProductClientNum})`
: ``
return `<div class="data-plane-tooltip">
${data.date}<br/>
${tooltipExtra}:<br/> ${text || 0} ${rangeText}
</div>`
}
},
series: [
{
name: '使用人数',
type: 'line',
itemStyle: {
normal: {
color: '#CFBB9A',
label: {
show: true,
position: 'top',
formatter: (item) => {
if (item.value > 0) {
const rangeText = tooltipExtra === '产品点击购买率' && item.data.visitProductClientNum
? `${item.value}%\n (${item.data.hitBuyProductClientNum}/${item.data.visitProductClientNum})`
: `${item.value}%`
return isRange ? rangeText : `${item.value}`
} else {
return ''
}
}
}
}
},
label: { show: true, color: '#CFBB9A' },
data: data.map((item) => {
const value = isRange ? item.value.slice(0, -1) : item.value
return {...item, value}
}),
areaStyle: {
color: new echarts.graphic.LinearGradient(0, 0, 0, 1, [{
offset: 0,
color: '#ebe2d4'
}, {
offset: 1,
color: '#fff'
}])
}
}
]
}
}
|
xincao9/Zebra | zebra-client/src/main/java/com/dianping/zebra/util/JDBCUtils.java | /*
* Copyright (c) 2011-2018, <NAME>. All Rights Reserved.
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.dianping.zebra.util;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.List;
/**
* Dozer @ 2015-02
* <EMAIL>
* http://www.dozer.cc
*/
public class JDBCUtils {
// ERROR 1290 (HY000): The MySQL server is running with the --read-only option so it cannot execute this statement
private final static int READ_ONLY_ERROR_CODE = 1290;
private final static String READ_ONLY_ERROR_MESSAGE = "read-only";
public static void throwSQLExceptionIfNeeded(List<SQLException> exceptions) throws SQLException {
if (exceptions != null && !exceptions.isEmpty()) {
StringWriter buffer = new StringWriter();
PrintWriter out = null;
try {
out = new PrintWriter(buffer);
for (SQLException exception : exceptions) {
exception.printStackTrace(out);
}
} finally {
if (out != null) {
out.close();
}
}
throw new SQLException(buffer.toString());
}
}
public static void throwWrappedSQLException(SQLException e) throws SQLException {
if (isReadOnlyException(e)) {
throw new SQLException("Write dataSource is currently in the maintaining stage. ", e);
} else {
throw e;
}
}
public static boolean isReadOnlyException(SQLException e) {
return e.getErrorCode() == READ_ONLY_ERROR_CODE && e.getMessage().contains(READ_ONLY_ERROR_MESSAGE);
}
public static void closeAll(Statement statement, Connection connection) {
closeAll(null, statement, connection);
}
public static void closeAll(ResultSet resultSet, Statement statement, Connection connection) {
if (resultSet != null) {
try {
resultSet.close();
} catch (SQLException ignore) {
}
}
if (statement != null) {
try {
statement.close();
} catch (SQLException ignore) {
}
}
if (connection != null) {
try {
connection.close();
} catch (SQLException ignore) {
}
}
}
}
|
gmsmirnov/gsmirnov | chapter_2-001/src/test/java/ru/job4j/trie/ReaderTest.java | <reponame>gmsmirnov/gsmirnov
package ru.job4j.trie;
import org.junit.Before;
import org.junit.Test;
import static org.hamcrest.collection.IsArrayContainingInAnyOrder.arrayContainingInAnyOrder;
import static org.junit.Assert.*;
public class ReaderTest {
Reader reader;
@Before
public void init() {
this.reader = new Reader();
this.reader.createTree("text.txt");
}
/* TravisCi fail this test
@Test
public void testTrieWithTextFile() {
System.out.println("size: " + this.reader.trie.getIndexes("size"));
System.out.println("buffer: " + this.reader.trie.getIndexes("buffer"));
System.out.println("reads: " + this.reader.trie.getIndexes("reads"));
System.out.println("from: " + this.reader.trie.getIndexes("from"));
System.out.println("reading: " + this.reader.trie.getIndexes("reading"));
System.out.println("characters: " + this.reader.trie.getIndexes("characters"));
System.out.println("arrays: " + this.reader.trie.getIndexes("arrays"));
System.out.println("the: " + this.reader.trie.getIndexes("the"));
Integer[] expectedSize = {153, 191};
Integer[] expectedBuffer = {146};
Integer[] expectedReads = {1};
Integer[] expectedFrom = {12};
Integer[] expectedReading = {99};
Integer[] expectedCharacters = {53, 110};
Integer[] expectedArrays = {122};
Integer[] expectedThe = {85, 142, 179, 209, 354};
assertThat(this.reader.trie.getIndexes("size").toArray(), arrayContainingInAnyOrder(expectedSize));
assertThat(this.reader.trie.getIndexes("buffer").toArray(), arrayContainingInAnyOrder(expectedBuffer));
assertThat(this.reader.trie.getIndexes("reads").toArray(), arrayContainingInAnyOrder(expectedReads));
assertThat(this.reader.trie.getIndexes("from").toArray(), arrayContainingInAnyOrder(expectedFrom));
assertThat(this.reader.trie.getIndexes("reading").toArray(), arrayContainingInAnyOrder(expectedReading));
assertThat(this.reader.trie.getIndexes("characters").toArray(), arrayContainingInAnyOrder(expectedCharacters));
assertThat(this.reader.trie.getIndexes("arrays").toArray(), arrayContainingInAnyOrder(expectedArrays));
assertThat(this.reader.trie.getIndexes("the").toArray(), arrayContainingInAnyOrder(expectedThe));
}*/
} |
Thalhammer/ttl | ttl/include/ttl/config.h | #pragma once
#include <string>
#include <unordered_map>
#include <fstream>
#include <functional>
#include <memory>
#include "string_util.h"
namespace ttl {
/* A very simple configuration parser */
class config {
public:
class transaction;
/* Callback type for handling config file includes */
typedef std::function<bool(const std::string&, transaction&)> include_handler_t;
private:
std::unordered_map<std::string, std::string> entries;
std::string error;
include_handler_t include_handler;
public:
/* Read config entries from a std::istream. */
inline bool read(std::istream& input);
/* Read config entries from a std::istream, using an existing transaction. */
inline bool read(transaction& trans, std::istream& input);
/* Read config entries from a file. */
bool read_file(const std::string& file) {
std::ifstream stream(file, std::ios::in);
if (!stream.is_open() || !stream.good()) {
this->error = "Could not open file";
return false;
}
return read(stream);
}
/* Read config entries from a file, using an existing transaction. */
bool read_file(transaction& trans, const std::string& file) {
std::ifstream stream(file, std::ios::in);
if (!stream.is_open() || !stream.good()) {
this->error = "Could not open file";
return false;
}
return read(trans, stream);
}
/* Try to get a config entry. Returns true if the key existed, if not found value remains unchanged. */
bool get(const std::string& key, std::string& value) const {
auto it = entries.find(key);
if (it == entries.end())
return false;
value = it->second;
return true;
}
/* Try to get a config entry and return an alternative value if not found. */
std::string get_optional(const std::string& key, const std::string& alternative) const {
std::string res;
if (get(key, res)) return res;
return alternative;
}
/* Get a string description of the last error encountered by read. */
const std::string& errormsg() const {
return this->error;
}
/* Set a config entry. Existing entries get overwritten. */
void set(const std::string& key, const std::string& value) {
entries[key]=value;
}
auto cbegin() const -> decltype(entries.cbegin()) { return entries.cbegin(); }
auto cend() const -> decltype(entries.cend()) { return entries.cend(); }
auto begin() const -> decltype(cbegin()) { return cbegin(); }
auto end() const -> decltype(cend()) { return cend(); }
auto size() const -> decltype(entries.size()) { return entries.size(); }
/* Start a transaction. */
inline transaction begin_transaction();
/* Set a function to handle includes. */
void set_include_handler(include_handler_t fn) { include_handler = fn; }
/* Get the current include handler. */
include_handler_t get_include_handler() const { return include_handler; }
};
/* A config transaction. */
class config::transaction {
std::unordered_map<std::string, std::string> tentries;
config& cfg;
friend class config;
explicit transaction(config& pcfg)
: cfg(pcfg)
{
}
public:
/* Rollback all changes. */
void rollback() {
tentries.clear();
}
/* Commit all changed entries. */
void commit() {
if (!changed())
return;
// Insert untouched entries
for (auto& e : cfg) {
if (!tentries.count(e.first)) {
tentries.insert(e);
}
}
// And exchange
cfg.entries.swap(tentries);
tentries.clear();
}
/* Number of changes in this transaction. */
auto changes() const -> decltype(tentries.size()) { return tentries.size(); }
/* changes() != 0 */
bool changed() const { return changes() != 0; }
/* Try to get a value. Returns the transaction's copy if changed or checks parent config if not. */
bool get(const std::string& key, std::string& value) const {
auto it = tentries.find(key);
if (it == tentries.end()) {
// Check in original config
return cfg.get(key, value);
}
value = it->second;
return true;
}
/* Try to get a value or return alternative if not found. */
std::string get_optional(const std::string& key, const std::string& alternative) const {
std::string res;
if (get(key, res)) return res;
return alternative;
}
/* Update a value in transaction. This will not affect parent config until commit is called. */
void set(const std::string& key, const std::string& value) {
tentries[key]=value;
}
};
bool config::read(std::istream& input) {
auto trans = this->begin_transaction();
if (this->read(trans, input))
{
trans.commit();
return true;
}
return false;
}
bool config::read(transaction& trans, std::istream& input) {
const static std::string include_start = "include ";
size_t cnt_line = 0;
std::string line;
while (std::getline(input, line)) {
cnt_line++;
string::trim(line);
if (line.substr(0, 1) != "#" && !line.empty()) {
if (string::starts_with(line, include_start)) {
if (include_handler) {
auto file = line.substr(include_start.size());
string::trim(file);
auto res = include_handler(file, trans);
if (!res)
return false;
}
}
else {
auto parts = string::split(line, std::string("="), 2);
if (parts.size() != 2) {
this->error = "Invalid entry on line " + std::to_string(cnt_line) + ": Missing \"=\"";
return false;
}
string::trim(parts[0]);
string::trim(parts[1]);
trans.set(parts[0], parts[1]);
}
}
}
return true;
}
config::transaction config::begin_transaction()
{
return transaction(*this);
}
}
#ifdef TTL_OLD_NAMESPACE
namespace thalhammer = ttl;
#endif
|
potassco/gringo | libgringo/gringo/output/aggregates.hh | <filename>libgringo/gringo/output/aggregates.hh
// {{{ MIT License
// Copyright 2017 <NAME>
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to
// deal in the Software without restriction, including without limitation the
// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
// sell copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
// IN THE SOFTWARE.
// }}}
#ifndef _GRINGO_OUTPUT_AGGREGATES_HH
#define _GRINGO_OUTPUT_AGGREGATES_HH
#include <gringo/terms.hh>
#include <gringo/domain.hh>
#include <gringo/intervals.hh>
#include <gringo/output/literal.hh>
namespace Gringo { namespace Output {
struct TupleId;
using BodyAggregateElements = UniqueVec<std::pair<TupleId, Formula>, HashFirst<TupleId>, EqualToFirst<TupleId>>;
using HeadFormula = std::vector<std::pair<LiteralId, ClauseId>>;
using HeadAggregateElements = UniqueVec<std::pair<TupleId, HeadFormula>, HashFirst<TupleId>, EqualToFirst<TupleId>>;
using DisjunctiveBounds = IntervalSet<Symbol>;
using Interval = DisjunctiveBounds::Interval;
using ConjunctiveBounds = std::vector<std::pair<Interval, Interval>>;
using PlainBounds = std::vector<std::pair<Relation, Symbol>>;
using LitValVec = std::vector<std::pair<LiteralId, Symbol>>;
using LitUintVec = std::vector<std::pair<LiteralId, unsigned>>;
struct AggregateAnalyzer {
enum Monotonicity { MONOTONE, ANTIMONOTONE, CONVEX, NONMONOTONE };
enum WeightType { MIXED, POSITIVE, NEGATIVE };
enum Truth { True, False, Open };
using ConjunctiveBounds = std::vector<std::pair<Interval, Interval>>;
AggregateAnalyzer(DomainData &data, NAF naf, DisjunctiveBounds const &disjunctiveBounds, AggregateFunction fun, Interval range, BodyAggregateElements const &elems);
void print(std::ostream &out);
LitValVec translateElems(DomainData &data, Translator &x, AggregateFunction fun, BodyAggregateElements const &bdElems, bool incomplete);
Monotonicity monotonicity;
WeightType weightType;
Truth truth;
ConjunctiveBounds bounds;
Interval range;
};
inline Symbol getNeutral(AggregateFunction fun) {
switch (fun) {
case AggregateFunction::COUNT:
case AggregateFunction::SUMP:
case AggregateFunction::SUM: { return Symbol::createNum(0); }
case AggregateFunction::MIN: { return Symbol::createSup(); }
case AggregateFunction::MAX: { return Symbol::createInf(); }
}
assert(false);
return {};
}
LiteralId getEqualClause(DomainData &data, Translator &x, std::pair<Id_t, Id_t> clause, bool conjunctive, bool equivalence);
LiteralId getEqualFormula(DomainData &data, Translator &x, Formula const &formula, bool conjunctive, bool equivalence);
LiteralId getEqualAggregate(DomainData &data, Translator &x, AggregateFunction fun, NAF naf, DisjunctiveBounds const &bounds, Interval const &range, BodyAggregateElements const &bdElems, bool recursive);
class MinMaxTranslator {
public:
LiteralId translate(DomainData &data, Translator &x, AggregateAnalyzer &res, bool isMin, LitValVec &&elems, bool incomplete);
};
struct SumTranslator {
SumTranslator() { }
void addLiteral(DomainData &data, LiteralId const &lit, Potassco::Weight_t weight, bool recursive);
void translate(DomainData &data, Translator &x, LiteralId const &head, Potassco::Weight_t bound, LitUintVec const &litsPosRec, LitUintVec const &litsNegRec, LitUintVec const &litsPosStrat, LitUintVec const &litsNegStrat);
LiteralId translate(DomainData &data, Translator &x, ConjunctiveBounds &bounds, bool convex, bool invert);
LitUintVec litsPosRec;
LitUintVec litsNegRec;
LitUintVec litsPosStrat;
LitUintVec litsNegStrat;
};
} } // namespace Output Gringo
#endif // _GRINGO_OUTPUT_AGGREGATES_HH
|
SanHime/bfassist | bfassist/standalone/monitoring/realtimeevent.py | <reponame>SanHime/bfassist
#############################################################################
#
#
# Module of BFA that manages server statistics in realtime
#
#
#############################################################################
""" This module implements the real-time logging of in-game statistics specifically that of bf events.
Dependencies:
None
note:: Author(s): Mitch last-check: 08.07.2021 """
# noinspection PyUnusedLocal
def __preload__(forClient: bool = True):
pass
# noinspection PyUnusedLocal
def __postload__(forClient: bool = True):
pass
class RealTimeEvent:
""" Any event in the event log is supposed to be instantiated as real time event and saved in the event dictionary
of the corresponding real time round.
:param eventType: Type of the event.
:param parameters: All parameters of this event and their values.
note:: Author(s): Mitch """
def __init__(self, eventType: str, parameters: dict = None):
self.eventType = eventType
if parameters:
self.parameters = parameters
else:
self.parameters = {}
|
lafer-m/containerd | pkg/dacscri/server/list.go | package server
import (
"context"
criapi "github.com/containerd/containerd/api/services/dacscri/v1"
"github.com/containerd/containerd/namespaces"
)
func (c *service) List(ctx context.Context, req *criapi.ListContainersRequest) (*criapi.ListContainersResponse, error) {
client := c.client
ctx = namespaces.WithNamespace(ctx, "default")
var cancel context.CancelFunc
ctx, cancel = context.WithCancel(ctx)
defer cancel()
resp, err := client.ContainerService().List(ctx, req.Filters...)
if err != nil {
return nil, err
}
containers := []criapi.Container{}
for _, ct := range resp {
container := criapi.Container{
ID: ct.ID,
Labels: ct.Labels,
Image: ct.Image,
Runtime: &criapi.Container_Runtime{
Name: ct.Runtime.Name,
Options: ct.Runtime.Options,
},
Spec: ct.Spec,
CreatedAt: ct.CreatedAt,
UpdatedAt: ct.UpdatedAt,
}
containers = append(containers, container)
}
return &criapi.ListContainersResponse{
Containers: containers,
}, nil
}
|
dj3vande/tendra | lexi/src/adt/group.c | <filename>lexi/src/adt/group.c
/*
* Copyright 2002-2011, The TenDRA Project.
* Copyright 1997, United Kingdom Secretary of State for Defence.
*
* See doc/copyright/ for the full copyright terms.
*/
#include <assert.h>
#include <string.h>
#include <shared/string.h>
#include <shared/xalloc.h>
#include <shared/error.h>
#include <adt/group.h>
#include <adt/zone.h>
#include <adt/trie.h>
#include "../ast.h"
static void
unescape_string(struct zone *z, int *o, char *s)
{
const char *p;
unsigned int i;
for (i = 0; i <= 255; i++) {
o[i] = 0;
}
/* TODO: this is strikngly similar to add_string(). fold both into the .lxi file? */
for (p = s; *p != '\0'; p++) {
struct group_name *gn;
char *e;
int not;
int c;
switch (*p) {
case '[':
not = 0;
p++;
if (*p == '^') {
not = 1;
p++;
}
e = strchr(p, ']');
if (e == NULL || *p == '\0') {
error(ERR_SERIOUS, "Unterminated group");
break;
}
*e = '\0';
gn = find_group(z, p);
if (gn == NULL) {
error(ERR_SERIOUS, "Unknown group '%s'", p);
break;
}
/* merge in the named group */
for (i = 0; i <= 255; i++) {
o[i] |= not ? !in_group(gn->g, i) : in_group(gn->g, i);
}
p = e;
break;
case '\\':
p++;
if (*p == '\0') {
error(ERR_SERIOUS, "Missing escape");
break;
}
c = find_escape(*p);
if (c == EOF) {
error(ERR_SERIOUS, "Groups may not contain EOF");
break;
}
i = (unsigned char) c;
o[i] = 1;
break;
default:
i = (unsigned char ) *p;
o[i] = 1;
break;
}
}
}
/*
* CREATE A NEW GROUP
*
* This routine creates a new character group with definition s, which is
* a null-terminated string of escaped values as per find_escape(). It may
* not contain "\e" (for EOF), since EOF is not permitted in groups.
*
* s may be NULL to indicate the empty group.
*/
struct group_name *
make_group(struct zone *z, char *name, char *defn)
{
struct group_name *gn;
assert(z != NULL);
assert(name != NULL);
if (strlen(defn) == 0) {
defn = NULL;
}
{
struct group_name *gn;
gn = find_group(z, name);
if (gn != NULL && gn->z == z) {
error(ERR_SERIOUS, "Group '%s' already defined for this zone", name);
return NULL;
}
}
gn = xmalloc(sizeof *gn);
gn->name = name;
gn->z = z;
gn->next = z->groups;
z->groups = gn;
{
struct group *new;
struct group *old;
new = xmalloc(sizeof *new);
new->next = NULL;
if (defn == NULL) {
unescape_string(z, new->defn, "");
} else {
unescape_string(z, new->defn, defn);
}
old = tree_find_group(z->ast, new);
if (old != NULL) {
xfree(new);
gn->g = old;
} else {
gn->g = new;
new->next = z->ast->groups;
z->ast->groups = new;
}
}
return gn;
}
/*
* IS A LETTER IN A GROUP?
*/
int
in_group(struct group *g, char c)
{
assert(g != NULL);
return g->defn[(unsigned char) c];
}
/*
* IS A GROUP EMPTY?
*/
int
is_group_empty(struct group *g)
{
unsigned int i;
assert(g != NULL);
for (i = 0; i <= 255; i++) {
if (g->defn[i]) {
return 0;
}
}
return 1;
}
/*
* ARE TWO GROUPS EQUIVALENT?
*/
int
is_group_equal(struct group *a, struct group *b)
{
unsigned int i;
assert(a != NULL);
assert(b != NULL);
for (i = 0; i <= 255; i++) {
if (!!a->defn[i] != !!b->defn[i]) {
return 0;
}
}
return 1;
}
/*
* FIND A GROUP BY NAME
*
* This searches within the list of groups specific to a zone and its parent
* zones, rather than in all groups globally.
*/
struct group_name *
find_group(const struct zone *z, const char *name)
{
struct group_name *p;
for (p = z->groups; p != NULL; p = p->next) {
if (streq(name, p->name)) {
return p;
}
}
if (z->up == NULL) {
return NULL;
}
return find_group(z->up, name);
}
|
koshacool/api | client/src/redux/answers/answersReducer.js | import {
CREATE_ANSWER, REMOVE_ANSWER, UPDATE_ANSWER,
UPDATE_ANSWERS,
} from './types';
const initialState = {
answers: [],
};
const updpateAnswers = (answers, answer) => {
const newArr = [...answers];
const index = newArr.findIndex(({ _id }) => _id === answer._id);
if (index < 0) {
newArr.push(answer);
return newArr;
}
newArr[index] = answer;
return newArr;
};
const updateManyAnswers = (answers, newAnswers) =>
newAnswers.reduce((prev, answer) => updpateAnswers(prev, answer), answers);
const answersReducer = (state = initialState, { type, payload }) => {
switch (type) {
case CREATE_ANSWER:
return { ...state, answers: state.answers.concat([payload.answer]) };
case UPDATE_ANSWER:
return {
...state,
answers: updpateAnswers(state.answers, payload.answer),
};
case UPDATE_ANSWERS:
return {
...state,
answers: updateManyAnswers(state.answers, payload.answers),
};
case REMOVE_ANSWER:
return {
...state,
answers: state.answers.filter(({ _id }) => _id !== payload.id),
};
default:
return state;
}
};
export default answersReducer;
|
minseongg/dynamatic | Regression_test/examples/simple_example_1/src/simple_example_1.cpp | #include "simple_example_1.h"
void simple_example_1(inout_int_t a[100]){
int x = 0;
for (int i = 0; i < 100; ++i){
x++;
}
a[0] = x;
}
#define AMOUNT_OF_TEST 1
int main(void){
in_int_t a[AMOUNT_OF_TEST][100];
in_int_t b[AMOUNT_OF_TEST][100];
in_int_t c[AMOUNT_OF_TEST];
for(int i = 0; i < AMOUNT_OF_TEST; ++i){
c[i] = 3;
for(int j = 0; j < 100; ++j){
a[i][j] = j;
b[i][j] = 99 - j;
}
}
for(int i = 0; i < AMOUNT_OF_TEST; ++i){
simple_example_1(a[0]);
}
} |
pa28/iir1 | docs/search/all_7.js | <reponame>pa28/iir1
var searchData=
[
['butterworth_41',['Butterworth',['../namespaceIir_1_1Butterworth.html',1,'Iir']]],
['chebyshevi_42',['ChebyshevI',['../namespaceIir_1_1ChebyshevI.html',1,'Iir']]],
['chebyshevii_43',['ChebyshevII',['../namespaceIir_1_1ChebyshevII.html',1,'Iir']]],
['custom_44',['Custom',['../namespaceIir_1_1Custom.html',1,'Iir']]],
['iir_45',['Iir',['../namespaceIir.html',1,'']]],
['iirnotch_46',['IIRNotch',['../structIir_1_1RBJ_1_1IIRNotch.html',1,'Iir::RBJ']]],
['ismatchedpair_47',['isMatchedPair',['../structIir_1_1ComplexPair.html#a79d121320c8b042faebcc0364398b071',1,'Iir::ComplexPair']]]
];
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.