repo_name stringlengths 6 101 | path stringlengths 4 300 | text stringlengths 7 1.31M |
|---|---|---|
yeepio/yeep | admin_ui/app/role/RoleEditPage.js | <reponame>yeepio/yeep
import React from 'react';
import PropTypes from 'prop-types';
import { Link } from '@reach/router';
import useDocumentTitle from '@rehooks/document-title';
import { useDispatch, useSelector } from 'react-redux';
import find from 'lodash/find';
import RoleDeleteModal from './RoleDeleteModal';
import RoleForm from './RoleForm';
import {
updateRole,
setRoleUpdateRecord,
clearRoleUpdateForm,
setRoleDeleteRecord,
showRoleDeleteForm,
} from './roleStore';
import LoadingIndicator from '../../components/LoadingIndicator';
import yeepClient from '../yeepClient';
import { gotoRoleListPage } from './roleURL';
function getRoleInfo({ id }) {
return yeepClient
.api()
.then((api) =>
api.role.info({
id,
cancelToken: yeepClient.issueCancelTokenAndRedeemPrevious(getRoleInfo),
})
)
.then((data) => data.role);
}
const RoleEditPage = ({ roleId }) => {
const records = useSelector((state) => state.role.list.records);
const errors = useSelector((state) => state.role.update.errors);
const record = useSelector((state) => state.role.update.record);
const isSavePending = useSelector((state) => state.role.update.isSavePending);
const dispatch = useDispatch();
useDocumentTitle(`Edit role #${roleId}`);
React.useEffect(() => {
// check if role info already exists in store
const role = find(records, (e) => e.id === roleId);
if (role) {
dispatch(setRoleUpdateRecord(role));
} else {
// role does not exist in memory - retrieve from API
getRoleInfo({ id: roleId }).then((role) => {
dispatch(setRoleUpdateRecord(role));
});
}
return () => {
yeepClient.redeemCancelToken(getRoleInfo);
dispatch(clearRoleUpdateForm());
};
}, [roleId, records, dispatch]);
const onRoleDelete = React.useCallback(
(role) => {
dispatch(setRoleDeleteRecord(role));
dispatch(showRoleDeleteForm());
},
[dispatch]
);
const submitForm = React.useCallback(
(values) => {
dispatch(
updateRole({
id: roleId,
name: values.name,
description: values.description,
permissions: values.permissions.map((e) => e.id),
})
).then((isRoleUpdated) => {
if (isRoleUpdated) {
gotoRoleListPage();
}
});
},
[dispatch, roleId]
);
if (record.id == null) {
return <LoadingIndicator />;
}
return (
<React.Fragment>
<RoleDeleteModal onSuccess={gotoRoleListPage} onError={(err) => console.error(err)} />
<h1 className="font-semibold text-3xl mb-6">Edit role #{roleId}</h1>
<RoleForm
defaultValues={record}
isSavePending={isSavePending}
errors={errors}
onCancel={gotoRoleListPage}
onSubmit={submitForm}
onDelete={onRoleDelete}
/>
<Link to="/roles">Return to the list of roles</Link>
</React.Fragment>
);
};
RoleEditPage.propTypes = {
roleId: PropTypes.string,
};
export default RoleEditPage;
|
centraldesktop/capistrano | lib/capistrano/configuration/actions/inspect.rb | require 'capistrano/errors'
module Capistrano
class Configuration
module Actions
module Inspect
# Streams the result of the command from all servers that are the
# target of the current task. All these streams will be joined into a
# single one, so you can, say, watch 10 log files as though they were
# one. Do note that this is quite expensive from a bandwidth
# perspective, so use it with care.
#
# The command is invoked via #invoke_command.
#
# Usage:
#
# desc "Run a tail on multiple log files at the same time"
# task :tail_fcgi, :roles => :app do
# stream "tail -f #{shared_path}/log/fastcgi.crash.log"
# end
def stream(command, options={})
invoke_command(command, options) do |ch, stream, out|
puts out if stream == :out
warn "[err :: #{ch[:server]}] #{out}" if stream == :err
end
end
# Executes the given command on the first server targetted by the
# current task, collects it's stdout into a string, and returns the
# string. The command is invoked via #invoke_command.
def capture(command, options={})
output = ""
invoke_command(command, options.merge(:once => true)) do |ch, stream, data|
case stream
when :out then output << data
when :err then warn "[err :: #{ch[:server]}] #{data}"
end
end
output
end
end
end
end
end
|
Praetonus/ShaderWriter | source/ShaderWriter/BaseTypes/Sampler.cpp | <gh_stars>100-1000
/*
See LICENSE file in root folder
*/
#include "ShaderWriter/BaseTypes/Sampler.hpp"
namespace sdw
{
Sampler::Sampler( ShaderWriter & writer
, expr::ExprPtr expr
, bool enabled )
: Value{ writer, std::move( expr ), enabled }
{
}
Sampler::operator uint32_t()
{
return 0u;
}
ast::type::TypePtr Sampler::makeType( ast::type::TypesCache & cache )
{
return cache.getSampler();
}
}
|
bavardage/plottable | quicktests/quicktests/animate_line.js | <gh_stars>0
// Will receive function arguments: (svg, data, Plottable)
var doAnimate = true;
var lineRenderer;
var xScale = new Plottable.Scale.Linear();
var xAxis = new Plottable.Axis.XAxis(xScale, "bottom");
var yScale = new Plottable.Scale.Linear();
var yAxis = new Plottable.Axis.YAxis(yScale, "left");
lineRenderer = new Plottable.Plot.Line(data[0].slice(0, 20), xScale, yScale);
lineRenderer.project("opacity", 0.75);
lineRenderer.animate(doAnimate);
var lineChart = new Plottable.Component.Table([[yAxis, lineRenderer],
[null, xAxis]]);
lineChart.renderTo(svg);
|
jddixon/xlCoreXml_java | src/java/org/xlattice/corexml/bind/TextBinding.java | <gh_stars>0
/* TextBinding.java */
package org.xlattice.corexml.bind;
import org.xlattice.corexml.CoreXmlException;
import org.xlattice.corexml.om.Element;
import org.xlattice.corexml.om.Node;
import org.xlattice.corexml.om.Text;
/**
* Bind a text value in an XML element to the value of the named
* field in the associated class. The field being bound to must
* be of type String.
*
* If the <code>repeats()</code> modifier is present on this Binding
* in the Mapping definition, it will be ignored. The <code>maxOccur</code>
* parameter will always be 1.
*
* @author <NAME>
*/
public class TextBinding extends Binding {
/**
* Create the binding.
* @param fieldName name of the field in the associated class
*/
public TextBinding (String fieldName) {
super ("", fieldName);
}
// OTHER METHODS ////////////////////////////////////////////////
/**
* Called by recursion from the <code>Mapping.join()</code> call
* after completing definition of the Mapping.
* @param clazz parent in a field of which the text appears
*/
protected void join (Class clazz) throws CoreXmlException {
maxOccur = 1; // just ignore any change
super.join(clazz);
}
/** @return the Join type index for a TextBinding */
protected int joinType() {
return TEXT;
}
/**
* Use the text from an XML Text node to set a field in the
* bound object, the field being of type String.
*
* @param node the Text node
* @param o the object whose field is to be set
*/
protected void apply (Node node, Object o) throws CoreXmlException {
checkJoinObject(o);
if (! (node instanceof Text) )
throw new IllegalArgumentException (
"binding requires Text node but found: " + node);
setField (o, ((Text)node).getText());
}
/**
*
* @param parent the Element whose NodeList the Text node is added to
* @param o the object whose value is checked
*/
protected void generate (Node parent, Object o)
throws CoreXmlException {
checkElmJoin(parent, o)
.addChild(new Text(getField(o)));
}
}
|
mimifitz/IOMED | node_modules/@elastic/eui/test-env/components/datagrid/data_grid_inmemory_renderer.js | "use strict";
var _interopRequireWildcard = require("@babel/runtime/helpers/interopRequireWildcard");
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.EuiDataGridInMemoryRenderer = void 0;
var _slicedToArray2 = _interopRequireDefault(require("@babel/runtime/helpers/slicedToArray"));
var _react = _interopRequireWildcard(require("react"));
var _propTypes = _interopRequireDefault(require("prop-types"));
var _reactDom = require("react-dom");
var _react2 = require("../../services/react");
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
function noop() {}
function getElementText(element) {
return 'innerText' in element ? element.innerText : // TS thinks element.innerText always exists, however it doesn't in jest/jsdom enviornment
// @ts-ignore-next-line
element.textContent || undefined;
}
var ObservedCell = function ObservedCell(_ref) {
var renderCellValue = _ref.renderCellValue,
i = _ref.i,
column = _ref.column,
onCellRender = _ref.onCellRender,
isExpandable = _ref.isExpandable;
var _useState = (0, _react.useState)(),
_useState2 = (0, _slicedToArray2.default)(_useState, 2),
ref = _useState2[0],
setRef = _useState2[1];
(0, _react.useEffect)(function () {
if (ref) {
// this is part of React's component lifecycle, onCellRender->setState are automatically batched
onCellRender(i, column, getElementText(ref));
var observer = new MutationObserver(function () {
// onMutation callbacks aren't in the component lifecycle, intentionally batch any effects
(0, _react2.enqueueStateChange)(onCellRender.bind(null, i, column, getElementText(ref)));
});
observer.observe(ref, {
characterData: true,
subtree: true,
attributes: true,
childList: true
});
return function () {
observer.disconnect();
};
}
}, [column, i, onCellRender, ref]);
var CellElement = renderCellValue;
return _react.default.createElement("div", {
ref: setRef
}, _react.default.createElement(CellElement, {
rowIndex: i,
columnId: column.id,
setCellProps: noop,
isExpandable: isExpandable,
isExpanded: false,
isDetails: false
}));
};
ObservedCell.propTypes = {
renderCellValue: _propTypes.default.oneOfType([_propTypes.default.func.isRequired, _propTypes.default.func.isRequired]).isRequired,
onCellRender: _propTypes.default.func.isRequired,
i: _propTypes.default.number.isRequired,
column: _propTypes.default.shape({
/**
* The unique identifier for this column
*/
id: _propTypes.default.string.isRequired,
/**
* A `ReactNode` used when rendering the column header. When providing complicated content, please make sure to utilize CSS to respect truncation as space allows. Check the docs example.
*/
display: _propTypes.default.node,
/**
* A Schema to use for the column. Built-in values are ['boolean', 'currency', 'datetime', 'numeric', 'json'] but can be expanded by defining your own #EuiDataGrid `schemaDetectors` (for in-memory detection). In general, it is advised to pass in a value here when you are sure of the schema ahead of time, so that you don't need to rely on the automatic detection.
*/
schema: _propTypes.default.string,
/**
* Defaults to true. Defines whether or not the column's cells can be expanded with a popup onClick / keydown.
*/
isExpandable: _propTypes.default.bool,
/**
* Whether this column's width can be changed by the user, defaults to true
*/
isResizable: _propTypes.default.bool,
/**
* Initial width (in pixels) of the column
*/
initialWidth: _propTypes.default.number,
/**
* Whether this column is sortable
*/
isSortable: _propTypes.default.bool,
/**
* Default sort direction of the column
*/
defaultSortDirection: _propTypes.default.oneOf(["asc", "desc"]),
/**
* Display name as text for column. This can be used to display column name in column selector and column sorting where `display` won't be used. If not used `id` will be shown as column name in column selector and column sorting.
*/
displayAsText: _propTypes.default.string
}).isRequired,
isExpandable: _propTypes.default.bool.isRequired
};
var EuiDataGridInMemoryRenderer = function EuiDataGridInMemoryRenderer(_ref2) {
var inMemory = _ref2.inMemory,
columns = _ref2.columns,
rowCount = _ref2.rowCount,
renderCellValue = _ref2.renderCellValue,
onCellRender = _ref2.onCellRender;
var _useState3 = (0, _react.useState)(function () {
return document.createDocumentFragment();
}),
_useState4 = (0, _slicedToArray2.default)(_useState3, 1),
documentFragment = _useState4[0];
var rows = (0, _react.useMemo)(function () {
var rows = [];
var _loop = function _loop(i) {
rows.push(_react.default.createElement(_react.Fragment, {
key: i
}, columns.map(function (column) {
var skipThisColumn = inMemory.skipColumns && inMemory.skipColumns.indexOf(column.id) !== -1;
if (skipThisColumn) {
return null;
}
var isExpandable = column.isExpandable !== undefined ? column.isExpandable : true;
return _react.default.createElement(ObservedCell, {
key: column.id,
i: i,
renderCellValue: renderCellValue,
column: column,
onCellRender: onCellRender,
isExpandable: isExpandable
});
}).filter(function (cell) {
return cell != null;
})));
};
for (var i = 0; i < rowCount; i++) {
_loop(i);
}
return rows;
}, [rowCount, columns, inMemory.skipColumns, renderCellValue, onCellRender]);
return (0, _reactDom.createPortal)(_react.default.createElement(_react.Fragment, null, rows), documentFragment);
};
exports.EuiDataGridInMemoryRenderer = EuiDataGridInMemoryRenderer;
EuiDataGridInMemoryRenderer.propTypes = {
inMemory: _propTypes.default.shape({
/**
Given the data flow Sorting->Pagination:
Each step can be performed by service calls or in-memory by the grid.
However, we cannot allow any service calls after an in-memory operation.
E.g. if Pagination requires a service call the grid cannot perform
in-memory Sorting. This means a single value representing the
service / in-memory boundary can be used. Thus there are four states for in-memory's level:
* "enhancements" - no in-memory operations, but use the available data to enhance the grid
* "pagination" - only pagination is performed in-memory
* "sorting" - sorting & pagination is performed in-memory
*/
level: _propTypes.default.oneOf(["enhancements", "pagination", "sorting"]).isRequired,
/**
* An array of column ids for the in-memory processing to skip
*/
skipColumns: _propTypes.default.arrayOf(_propTypes.default.string.isRequired)
}).isRequired,
columns: _propTypes.default.arrayOf(_propTypes.default.shape({
id: _propTypes.default.string.isRequired,
display: _propTypes.default.node,
schema: _propTypes.default.string,
isExpandable: _propTypes.default.bool,
isResizable: _propTypes.default.bool,
initialWidth: _propTypes.default.number,
isSortable: _propTypes.default.bool,
defaultSortDirection: _propTypes.default.oneOf(["asc", "desc"]),
displayAsText: _propTypes.default.string
}).isRequired).isRequired,
rowCount: _propTypes.default.number.isRequired,
renderCellValue: _propTypes.default.oneOfType([_propTypes.default.func.isRequired, _propTypes.default.func.isRequired]).isRequired,
onCellRender: _propTypes.default.func.isRequired
}; |
simphony/osp-core | osp/wrappers/sqlite/sqlite_session.py | """A session to connect osp-core to a SQLite backend."""
import sqlite3
import rdflib
from osp.core.ontology.cuba import rdflib_cuba
from osp.core.session.db.sql_util import EqualsCondition, AndCondition, \
JoinCondition
from osp.core.session.db.sql_wrapper_session import SqlWrapperSession
class SqliteSession(SqlWrapperSession):
"""A session to connect osp-core to a SQLite backend.
This SQLite backend can be used to store CUDS in an SQLite database.
"""
def __init__(self, path, check_same_thread=True, **kwargs):
"""Initialize the SqliteSession.
Args:
path (str): The path to the sqlite database file. Will be created
if it doesn't exist.
check_same_thread (bool, optional): Argument of sqlite.
Defaults to True.
"""
conn = sqlite3.connect(path,
isolation_level=None,
check_same_thread=check_same_thread)
super().__init__(engine=conn, **kwargs)
def __str__(self):
"""Convert the Session to a static string."""
return "Sqlite Wrapper Session"
# OVERRIDE
def close(self):
"""Close the connection to the SQLite database."""
self._engine.close()
# OVERRIDE
def _commit(self):
"""Commit the data to the SQLite database."""
self._engine.commit()
# OVERRIDE
def _init_transaction(self):
c = self._engine.cursor()
c.execute("BEGIN;")
# OVERRIDE
def _rollback_transaction(self):
c = self._engine.cursor()
c.execute("ROLLBACK;")
@staticmethod
def _sql_list_pattern(prefix, values, join_pattern=True):
"""Transform a list of values to corresponding pattern and value dict.
Args:
prefix (str): The prefix to use for the pattern
values (List[Any]): The list of values
join_pattern (bool): Whether to join the pattern by a comma,
defaults to True
Returns:
Tuple[str, Dict]: The pattern and the value dict.
"""
pattern = [":%s_%s" % (prefix, i) for i in range(len(values))]
if join_pattern:
pattern = ", ".join(pattern)
values = {
("%s_%s" % (prefix, i)): val for i, val in enumerate(values)
}
return pattern, values
# OVERRIDE
def _db_select(self, query):
cond_pattern, cond_values = self._get_condition_pattern(
query.condition)
columns = ["`%s`.`%s`" % (a, c) for a, c in query.columns]
tables = ["`%s` AS `%s`" % (t, a)
for a, t in query.tables.items()]
sql_pattern = "SELECT %s FROM %s WHERE %s;" % ( # nosec
", ".join(columns), ", ".join(tables), cond_pattern
)
c = self._engine.cursor()
c.execute(sql_pattern, cond_values)
return c
# OVERRIDE
def _db_create(self, table_name, columns, datatypes,
primary_key, generate_pk, foreign_key, indexes):
columns = [
c if c not in datatypes
else "`%s` `%s`" % (c, self._to_sqlite_datatype(datatypes[c]))
for c in columns
]
constraints = []
if primary_key:
constraints += [
"PRIMARY KEY(%s)" % ", ".join(
map(lambda x: "`%s`" % x, primary_key)
)
]
constraints += [
"FOREIGN KEY(`%s`) REFERENCES `%s`(`%s`)" % (col, ref[0], ref[1])
for col, ref in foreign_key.items()
]
c = self._engine.cursor()
sql = "CREATE TABLE IF NOT EXISTS `%s` (%s);" % (
table_name,
", ".join(columns + constraints)
)
c.execute(sql)
for index in indexes:
sql = "CREATE INDEX IF NOT EXISTS `idx_%s_%s` ON `%s`(%s)" % (
table_name, "_".join(index),
table_name, ", ".join(map(lambda x: "`%s`" % x, index))
)
c.execute(sql)
# OVERRIDE
def _db_insert(self, table_name, columns, values, datatypes):
val_pattern, val_values = self._sql_list_pattern("val", values)
columns = map(lambda x: "`%s`" % x, columns)
sql_pattern = "INSERT INTO `%s` (%s) VALUES (%s);" % ( # nosec
table_name, ", ".join(columns), val_pattern
)
c = self._engine.cursor()
try:
c.execute(sql_pattern, val_values)
return c.lastrowid
except sqlite3.IntegrityError as e:
if "UNIQUE constraint failed" in str(e):
return
raise e
# OVERRIDE
def _db_update(self, table_name, columns, values, condition, datatypes):
cond_pattern, cond_values = self._get_condition_pattern(condition)
val_pattern, val_values = self._sql_list_pattern("val", values, False)
update_pattern = ", ".join(
("`%s` = %s" % (c, v) for c, v in zip(columns, val_pattern))
)
sql_pattern = "UPDATE `%s` SET %s WHERE %s;" % ( # nosec
table_name, update_pattern, cond_pattern
)
sql_values = dict(**val_values, **cond_values)
c = self._engine.cursor()
c.execute(sql_pattern, sql_values)
# OVERRIDE
def _db_delete(self, table_name, condition):
cond_pattern, cond_values = self._get_condition_pattern(condition)
sql_pattern = ("DELETE FROM `%s` WHERE %s;" # nosec
% (table_name, cond_pattern))
c = self._engine.cursor()
c.execute(sql_pattern, cond_values)
# OVERRIDE
def _db_drop(self, table_name):
sql_command = (f"DROP TABLE IF EXISTS `{table_name}`")
c = self._engine.cursor()
c.execute(sql_command)
# OVERRIDE
def _get_table_names(self, prefix):
c = self._engine.cursor()
sql = "SELECT name FROM sqlite_master WHERE type='table';"
tables = c.execute(sql)
return set([x[0] for x in tables if x[0].startswith(prefix)])
def _get_condition_pattern(self, condition, prefix="cond"):
"""Convert the given condition.
It should be converted to a Sqlite condition pattern
and the corresponding values.
Args:
condition (Union[AndCondition, EqualsCondition]): The Condition.
Raises:
NotImplementedError: Unknown condition type
Returns:
str: The resulting condition
"""
if condition is None:
return "1", dict()
if isinstance(condition, EqualsCondition):
value = condition.value
pattern = "`%s`.`%s`=:%s_value" % (
condition.table_name, condition.column, prefix
)
values = {
"%s_value" % prefix: value
}
return pattern, values
if isinstance(condition, JoinCondition):
return f"`{condition.table_name1}`.`{condition.column1}` = " \
f"`{condition.table_name2}`.`{condition.column2}`", {}
if isinstance(condition, AndCondition):
if not condition.conditions:
return "1", dict()
pattern = ""
values = dict()
for i, sub_condition in enumerate(condition.conditions):
if pattern:
pattern += " AND "
sub_prefix = prefix + str(i)
sub_pattern, sub_values = self._get_condition_pattern(
sub_condition, sub_prefix
)
pattern += sub_pattern
values.update(sub_values)
return pattern, values
raise NotImplementedError(f"Unsupported condition {condition}")
def _to_sqlite_datatype(self, rdflib_datatype):
"""Convert the given Cuds datatype to a datatype of sqlite.
rdflib_datatype: The given cuds_object datatype.
:type rdflib_datatype: URIRef
:raises NotImplementedError: Unsupported datatype given.
:return: A sqlite datatype.
:rtype: str
"""
if rdflib_datatype is None:
return "TEXT"
if rdflib_datatype == "UID":
return "TEXT"
if rdflib_datatype == rdflib.XSD.integer:
return "INTEGER"
if rdflib_datatype == rdflib.XSD.boolean:
return "BOOLEAN"
if rdflib_datatype == rdflib.XSD.float:
return "REAL"
if rdflib_datatype == rdflib.XSD.string:
return "REAL"
if str(rdflib_datatype).startswith(
str(rdflib_cuba["_datatypes/STRING-"])):
return "TEXT"
else:
raise NotImplementedError(f"Unsupported data type "
f"{rdflib_datatype}!")
|
SURAJGUPTA0414/javaprojects | women-empowerment-master-main-main/src/main/java/com/capgemini/exception/NoSuchRecordException.java | <reponame>SURAJGUPTA0414/javaprojects<gh_stars>0
package com.capgemini.exception;
public class NoSuchRecordException extends RuntimeException
{
/**
*
*/
private static final long serialVersionUID = 1L;
public NoSuchRecordException()
{}
public NoSuchRecordException(String msg)
{
super(msg);
}
}
|
BytemarkHosting/bytemark-client | cmd/bytemark/commands/admin/delete_test.go | <reponame>BytemarkHosting/bytemark-client<filename>cmd/bytemark/commands/admin/delete_test.go
package admin_test
import (
"fmt"
"testing"
"github.com/BytemarkHosting/bytemark-client/cmd/bytemark/commands/admin"
"github.com/BytemarkHosting/bytemark-client/cmd/bytemark/testutil"
"github.com/cheekybits/is"
)
func TestDeleteVLAN(t *testing.T) {
is := is.New(t)
_, c, app := testutil.BaseTestAuthSetup(t, true, admin.Commands)
c.When("DeleteVLAN", 25).Return(nil).Times(1)
err := app.Run([]string{"bytemark", "delete", "vlan", "25"})
is.Nil(err)
if ok, err := c.Verify(); !ok {
t.Fatal(err)
}
}
func TestDeleteVLANError(t *testing.T) {
is := is.New(t)
_, c, app := testutil.BaseTestAuthSetup(t, true, admin.Commands)
c.When("DeleteVLAN", 204).Return(fmt.Errorf("Could not delete VLAN")).Times(1)
err := app.Run([]string{"bytemark", "delete", "vlan", "204"})
is.NotNil(err)
if ok, err := c.Verify(); !ok {
t.Fatal(err)
}
}
|
DemetriusHR/AutoAtendimentoMarcusVinicius | packages/Server/src/core/repositories/login/index.js | <filename>packages/Server/src/core/repositories/login/index.js
const pool = require('../../../shared/connection');
function loginRepository(cpf, tel, senha) {
return new Promise(async function (resolve, reject) {
pool.connect(function (err, client, done) {
if (err) {
reject(err);
console.log(err);
return;
}
client.query(
`SELECT login_atendimento($1, $2, $3)`,
[cpf, tel, senha],
function (erro, result) {
if (erro) {
reject(erro);
console.log(erro);
return;
}
const { login_atendimento } = result.rows[0];
const idUsuario = parseInt(
login_atendimento.substring(1, login_atendimento.indexOf(','))
);
const funcionario =
login_atendimento.substring(
login_atendimento.indexOf(',') + 1,
login_atendimento.indexOf(')')
) === 't';
resolve({
idUsuario,
funcionario,
});
done();
}
);
});
});
}
function cadastrarUsuarioRepository(nome, cpf, senha, tel) {
return new Promise(async function (resolve, reject) {
let idUsuario = 0;
await pool.connect(async function (err, client, done) {
if (err) {
reject(err);
console.log(err);
return;
}
await client.query(
`SELECT cadastrar_usuario($1, $2, $3, $4)`,
[nome, cpf, senha, tel],
function (erro, result) {
if (erro) {
reject(erro);
console.log(erro);
return;
}
idUsuario = result.rows[0].cadastrar_usuario;
resolve({
idUsuario,
});
done();
}
);
});
});
}
module.exports = {
login: loginRepository,
cadastrarUsuario: cadastrarUsuarioRepository,
};
|
consulo-trash/consulo-hibernate | plugin/src/main/java/com/intellij/hibernate/model/xml/mapping/HbmLoadCollection.java | <filename>plugin/src/main/java/com/intellij/hibernate/model/xml/mapping/HbmLoadCollection.java
// Generated on Fri Nov 17 19:09:30 MSK 2006
// DTD/Schema : hibernate-mapping-3.0.dtd
package com.intellij.hibernate.model.xml.mapping;
import java.util.List;
import javax.annotation.Nonnull;
import com.intellij.hibernate.model.enums.LockModeType;
import com.intellij.jam.model.common.CommonDomModelElement;
import com.intellij.util.xml.GenericAttributeValue;
import com.intellij.util.xml.Required;
/**
* hibernate-mapping-3.0.dtd:load-collection interface.
*/
public interface HbmLoadCollection extends CommonDomModelElement
{
/**
* Returns the value of the alias child.
* Attribute alias
* @return the value of the alias child.
*/
@Nonnull
@Required
GenericAttributeValue<String> getAlias();
/**
* Returns the value of the lock-mode child.
* Attribute lock-mode
* @return the value of the lock-mode child.
*/
@Nonnull
GenericAttributeValue<LockModeType> getLockMode();
/**
* Returns the value of the role child.
* Attribute role
* @return the value of the role child.
*/
@Nonnull
@Required
GenericAttributeValue<String> getRole();
/**
* Returns the list of return-property children.
* @return the list of return-property children.
*/
@Nonnull
List<HbmReturnProperty> getReturnProperties();
/**
* Adds new child to the list of return-property children.
* @return created child
*/
HbmReturnProperty addReturnProperty();
}
|
Hendrikto/jena | jena-geosparql/src/test/java/org/apache/jena/geosparql/spatial/ConvertLatLonBoxTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jena.geosparql.spatial;
import org.apache.jena.geosparql.implementation.datatype.WKTDatatype;
import org.apache.jena.graph.Node;
import org.apache.jena.graph.NodeFactory;
import org.apache.jena.rdf.model.Literal;
import org.apache.jena.rdf.model.ResourceFactory;
import org.apache.jena.sparql.expr.NodeValue;
import org.junit.After;
import org.junit.AfterClass;
import static org.junit.Assert.*;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
/**
*
*
*/
public class ConvertLatLonBoxTest {
public ConvertLatLonBoxTest() {
}
@BeforeClass
public static void setUpClass() {
}
@AfterClass
public static void tearDownClass() {
}
@Before
public void setUp() {
}
@After
public void tearDown() {
}
/**
* Test of toWKT method, of class ConvertLatLonBox.
*/
@Test
public void testToWKT() {
float latMin = 0.0F;
float lonMin = 1.0F;
float latMax = 10.0F;
float lonMax = 11.0F;
String expResult = "<http://www.opengis.net/def/crs/EPSG/0/4326> POLYGON((0 1, 10 1, 10 11, 0 11, 0 1))";
String result = ConvertLatLonBox.toWKT(latMin, lonMin, latMax, lonMax);
assertEquals(expResult, result);
}
/**
* Test of toLiteral method, of class ConvertLatLonBox.
*/
@Test
public void testToLiteral() {
float latMin = 0.0F;
float lonMin = 1.0F;
float latMax = 10.0F;
float lonMax = 11.0F;
Literal expResult = ResourceFactory.createTypedLiteral("<http://www.opengis.net/def/crs/EPSG/0/4326> POLYGON((0 1, 10 1, 10 11, 0 11, 0 1))", WKTDatatype.INSTANCE);
Literal result = ConvertLatLonBox.toLiteral(latMin, lonMin, latMax, lonMax);
assertEquals(expResult, result);
}
/**
* Test of toNodeValue method, of class ConvertLatLonBox.
*/
@Test
public void testConvert_4args_1() {
NodeValue v1 = NodeValue.makeFloat(0.0f);
NodeValue v2 = NodeValue.makeFloat(1.0f);
NodeValue v3 = NodeValue.makeFloat(10.0f);
NodeValue v4 = NodeValue.makeFloat(11.0f);
NodeValue expResult = NodeValue.makeNode("<http://www.opengis.net/def/crs/EPSG/0/4326> POLYGON((0 1, 10 1, 10 11, 0 11, 0 1))", WKTDatatype.INSTANCE);
NodeValue result = ConvertLatLonBox.toNodeValue(v1, v2, v3, v4);
assertEquals(expResult, result);
}
/**
* Test of toNodeValue method, of class ConvertLatLonBox.
*/
@Test
public void testConvert_4args_2() {
Node n1 = NodeValue.makeFloat(0.0f).asNode();
Node n2 = NodeValue.makeFloat(1.0f).asNode();
Node n3 = NodeValue.makeFloat(10.0f).asNode();
Node n4 = NodeValue.makeFloat(11.0f).asNode();
Node expResult = NodeFactory.createLiteral("<http://www.opengis.net/def/crs/EPSG/0/4326> POLYGON((0 1, 10 1, 10 11, 0 11, 0 1))", WKTDatatype.INSTANCE);
Node result = ConvertLatLonBox.toNode(n1, n2, n3, n4);
assertEquals(expResult, result);
}
/**
* Test of toNodeValue method, of class ConvertLatLonBox.
*/
@Test
public void testConvert_4args_3() {
NodeValue v1 = NodeValue.makeString("0.0");
NodeValue v2 = NodeValue.makeString("1.0");
NodeValue v3 = NodeValue.makeString("10.0");
NodeValue v4 = NodeValue.makeString("11.0");
NodeValue expResult = NodeValue.makeNode("<http://www.opengis.net/def/crs/EPSG/0/4326> POLYGON((0 1, 10 1, 10 11, 0 11, 0 1))", WKTDatatype.INSTANCE);
NodeValue result = ConvertLatLonBox.toNodeValue(v1, v2, v3, v4);
assertEquals(expResult, result);
}
}
|
Sajaki/intellij-community | java/java-tests/testData/refactoring/extractMethodNew/VerboseArrayAccess.java | class Test {
void foo(String[] ss) {
for(int i = 0; i < ss.length; i++) {
<selection>
System.out.println(ss[i]);
System.out.println(ss[i + 1]);
</selection>
}
}
} |
asposemarketplace/Aspose-Pdf-Java | src/programmersguide/workingwithasposepdf/workingwithannotations/getparticularannotationfrompdffile/java/GetParticularAnnotationFromPDFFile.java | <reponame>asposemarketplace/Aspose-Pdf-Java
/*
* Copyright 2001-2013 Aspose Pty Ltd. All Rights Reserved.
*
* This file is part of Aspose.Pdf. The source code in this file
* is only intended as a supplement to the documentation, and is provided
* "as is", without warranty of any kind, either expressed or implied.
*/
package programmersguide.workingwithasposepdf.workingwithannotations.getparticularannotationfrompdffile.java;
import com.aspose.pdf.*;
public class GetParticularAnnotationFromPDFFile
{
public static void main(String[] args) throws Exception
{
// The path to the documents directory.
String dataDir = "src/programmersguide/workingwithasposepdf/workingwithannotations/getparticularannotationfrompdffile/data/";
//open source PDF document
com.aspose.pdf.Document pdfDocument = new com.aspose.pdf.Document(dataDir + "input.pdf");
//get particular annotation
com.aspose.pdf.TextAnnotation textAnnotation = (com.aspose.pdf.TextAnnotation)pdfDocument.getPages().get_Item(1).getAnnotations().get_Item(1);
//get annotation properties
System.out.printf("Title :- " + textAnnotation.getTitle());
System.out.printf("Subject :- " + textAnnotation.getSubject());
System.out.printf("Contents :- " + textAnnotation.getContents());
}
}
|
windystrife/UnrealEngine_NVIDIAGameWork | Engine/Source/Runtime/Engine/Classes/Engine/PendingNetGame.h | <gh_stars>1-10
// Copyright 1998-2017 Epic Games, Inc. All Rights Reserved.
#pragma once
#include "CoreMinimal.h"
#include "UObject/ObjectMacros.h"
#include "UObject/UObjectGlobals.h"
#include "UObject/Object.h"
#include "Engine/EngineBaseTypes.h"
#include "NetworkDelegates.h"
#include "PendingNetGame.generated.h"
class UEngine;
class UNetConnection;
class UNetDriver;
struct FWorldContext;
UCLASS(customConstructor, transient)
class UPendingNetGame :
public UObject,
public FNetworkNotify
{
GENERATED_UCLASS_BODY()
/**
* Net driver created for contacting the new server
* Transferred to world on successful connection
*/
UPROPERTY()
class UNetDriver* NetDriver;
/**
* Demo Net driver created for loading demos, but we need to go through pending net game
* Transferred to world on successful connection
*/
UPROPERTY()
class UDemoNetDriver* DemoNetDriver;
/**
* Setup the connection for encryption with a given key
* All future packets are expected to be encrypted
*
* @param Response response from the game containing its encryption key or an error message
* @param WeakConnection the connection related to the encryption request
*/
void FinalizeEncryptedConnection(const FEncryptionKeyResponse& Response, TWeakObjectPtr<UNetConnection> WeakConnection);
public:
/** URL associated with this level. */
FURL URL;
/** @todo document */
bool bSuccessfullyConnected;
/** @todo document */
bool bSentJoinRequest;
/** @todo document */
FString ConnectionError;
// Constructor.
void Initialize(const FURL& InURL);
// Constructor.
UPendingNetGame(const FObjectInitializer& ObjectInitializer = FObjectInitializer::Get());
void InitNetDriver();
/**
* Send the packet for triggering the initial join
*/
void SendInitialJoin();
//~ Begin FNetworkNotify Interface.
virtual EAcceptConnection::Type NotifyAcceptingConnection() override;
virtual void NotifyAcceptedConnection( class UNetConnection* Connection ) override;
virtual bool NotifyAcceptingChannel( class UChannel* Channel ) override;
virtual void NotifyControlMessage(UNetConnection* Connection, uint8 MessageType, class FInBunch& Bunch) override;
//~ End FNetworkNotify Interface.
/** Update the pending level's status. */
virtual void Tick( float DeltaTime );
/** @todo document */
virtual UNetDriver* GetNetDriver() { return NetDriver; }
/** Send JOIN to other end */
virtual void SendJoin();
//~ Begin UObject Interface.
virtual void Serialize( FArchive& Ar ) override;
virtual void FinishDestroy() override
{
NetDriver = NULL;
Super::FinishDestroy();
}
static void AddReferencedObjects(UObject* InThis, FReferenceCollector& Collector);
//~ End UObject Interface.
/** Create the peer net driver and a socket to listen for new client peer connections. */
void InitPeerListen();
/** Called by the engine after it calls LoadMap for this PendingNetGame. */
virtual void LoadMapCompleted(UEngine* Engine, FWorldContext& Context, bool bLoadedMapSuccessfully, const FString& LoadMapError);
};
|
TacocaTYT/Tetra-Pak | src/main/java/mod/noobulus/tetrapak/loot/GenericModifierSerializer.java | package mod.noobulus.tetrapak.loot;
import com.google.gson.JsonObject;
import net.minecraft.loot.conditions.ILootCondition;
import net.minecraft.util.ResourceLocation;
import net.minecraftforge.common.loot.GlobalLootModifierSerializer;
import net.minecraftforge.common.loot.LootModifier;
import java.util.function.Function;
public class GenericModifierSerializer<T extends LootModifier> extends GlobalLootModifierSerializer<T> {
private final Function<ILootCondition[], T> lootModifier;
public GenericModifierSerializer(Function<ILootCondition[], T> lootModifier) {
this.lootModifier = lootModifier;
}
@Override
public T read(ResourceLocation location, JsonObject object, ILootCondition[] ailootcondition) {
return lootModifier.apply(ailootcondition);
}
@Override
public JsonObject write(T instance) {
return new JsonObject();
}
}
|
myArea51/binnavi | src/main/java/com/google/security/zynamics/binnavi/disassembly/INaviTextNodeListener.java | <gh_stars>10-100
/*
Copyright 2011-2016 Google Inc. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.google.security.zynamics.binnavi.disassembly;
import com.google.security.zynamics.binnavi.Gui.GraphWindows.CommentDialogs.Interfaces.IComment;
import com.google.security.zynamics.zylib.gui.zygraph.nodes.ITextNodeListener;
import java.util.List;
/**
* Interface for classes that want to be informed about changes in text nodes.
*
* @author timkornau
*
*/
public interface INaviTextNodeListener extends INaviViewNodeListener, ITextNodeListener<CTextNode> {
/**
* Invoked after a text node comment has been appended.
*
* @param node The text node where the comment has been appended.
* @param comment The comment which has been appended.
*/
void appendedTextNodeComment(INaviTextNode node, IComment comment);
/**
* Invoked after a text node comment has been deleted.
*
* @param node The text node where the comment has been deleted.
* @param comment The comment which has been deleted.
*/
void deletedTextNodeComment(INaviTextNode node, IComment comment);
/**
* Invoked after a text node comment has been edited.
*
* @param node The text node where the comment has been edited.
* @param comment The comment which has been edited.
*/
void editedTextNodeComment(INaviTextNode node, IComment comment);
/**
* Invoked after the comments of a text node have been initialized.
*
* @param node The text node where the comments have been initialized.
* @param comments The comments with which the the text node comments have been initialized.
*/
void initializedTextNodeComment(INaviTextNode node, List<IComment> comments);
}
|
peeesspee/BitOJ | Client_Linux/manage_code.py | <gh_stars>10-100
import time
import os
import json
from login import authenticate_login
from connection import manage_connection
# Class to handle sending of code
class send_code():
client_id, username = authenticate_login.get_user_details()
channel,host = manage_connection.channel_host()
extention = None
# Solution request function
def solution_request(problem_Code,selected_language,time_stamp,code,local_run_id,client_key,username,ip):
if(selected_language == 'C'):
send_code.extention = '.c'
language_code = 'C'
elif(selected_language == 'C++'):
send_code.extention = '.cpp'
language_code = 'C++'
elif(selected_language == 'JAVA'):
send_code.extention = '.java'
language_code = 'JAVA'
elif(selected_language == 'PYTHON-3'):
send_code.extention = '.py'
language_code = 'PYTHON-3'
else:
send_code.extention = '.py'
language_code = 'PYTHON-2'
final_data = {
'Code' : 'SUBMT',
'IP' : ip,
'Username' : username,
'Client Key': client_key,
'Local Run ID' : local_run_id,
'ID' : authenticate_login.client_id,
'PCode' : problem_Code,
'Language' : language_code,
'Time' : time_stamp,
'Source' : code,
'Type' : "CLIENT"
}
final_data = json.dumps(final_data)
print("[ Sending CODE ] " + problem_Code + ' ' + language_code + ' ' + time_stamp)
try:
authenticate_login.channel.basic_publish(
exchange = 'connection_manager',
routing_key = 'client_requests',
body = final_data,
)
except Exception as error:
print('[ Error ] ' + str(error))
print("Your code is running \nWait for the judgement")
def query_request(client_id,client_key,query,username,ip):
final_data ={
'Code' : 'QUERY',
"IP" : ip,
"Username" : username,
'ID' : client_id,
'Client Key': client_key,
'Query' : query,
'Type' : 'CLIENT'
}
final_data = json.dumps(final_data)
print('[QUERY] Sending.....')
try:
authenticate_login.channel.basic_publish(
exchange = 'connection_manager',
routing_key = 'client_requests',
body = final_data,
)
except Exception as error:
print('[ Error ] ' + str(error))
print('[QUERY] Successfully Send')
print('[QUERY] Waiting for response .....') |
bjhall/loqusdb | tests/vcf_tools/test_check_vcf.py | <reponame>bjhall/loqusdb
import pytest
from loqusdb.utils.vcf import (check_vcf, get_vcf)
from loqusdb.exceptions import VcfError
def test_check_vcf_correct(vcf_path):
## GIVEN a vcf file and a counter that checks the number of variants
true_nr = 0
with open(vcf_path, 'r') as f:
for line in f:
if not line.startswith('#'):
true_nr += 1
## WHEN collecting the VCF info
vcf_info = check_vcf(vcf_path)
## THEN assert that the number of variants collected is correct
assert vcf_info['nr_variants'] == true_nr
## THEN assert that the variant type is correct
assert vcf_info['variant_type'] == 'snv'
def test_check_vcf_double_variant(double_vcf_path):
## GIVEN a variant file where a variant is duplicated
## WHEN checking the vcf
## THEN assert that the function raises a VcfError
with pytest.raises(VcfError):
check_vcf(double_vcf_path)
def test_check_vcf_unsorted(unsorted_vcf_path):
## GIVEN a vcf file with unsorted variants
## WHEN checking the vcf
## THEN assert that the function raises a VcfError
with pytest.raises(VcfError):
check_vcf(unsorted_vcf_path)
def test_check_sv_vcf(sv_vcf_path):
## GIVEN a vcf file and a counter that checks the number of variants
true_nr = 0
with open(sv_vcf_path, 'r') as f:
for line in f:
if not line.startswith('#'):
true_nr += 1
## WHEN collecting the VCF info
vcf_info = check_vcf(sv_vcf_path, 'sv')
## THEN assert that the number of variants collected is correct
assert vcf_info['nr_variants'] == true_nr
## THEN assert that the variant type is correct
assert vcf_info['variant_type'] == 'sv'
def test_check_vcf_wrong_type(sv_vcf_path):
## GIVEN a sv vcf file
## WHEN collecting the VCF info with wrong variant type
## THEN assert that a VcfError is raised
with pytest.raises(VcfError):
vcf_info = check_vcf(sv_vcf_path, 'snv')
|
KihongHeo/petablox | src/petablox/analyses/syntax/RelDivExpr.java | package petablox.analyses.syntax;
import petablox.program.visitors.IExprVisitor;
import petablox.project.Petablox;
import petablox.project.analyses.ProgramRel;
import soot.SootClass;
import soot.SootMethod;
import soot.Unit;
import soot.Value;
import soot.jimple.DivExpr;
@Petablox(name = "DivExpr", sign = "EXPR0,EXPR1,EXPR2:EXPR0_EXPR1xEXPR2")
public class RelDivExpr extends ProgramRel implements IExprVisitor {
@Override
public void visit(Unit q) { }
@Override
public void visit(SootMethod m) { }
@Override
public void visit(SootClass c) { }
@Override
public void visit(Value e) {
if (e instanceof DivExpr) {
DivExpr de = (DivExpr) e;
add(e, de.getOp1(), de.getOp2());
}
}
}
|
grinisrit/tnl-dev | src/TNL/base64.h | <reponame>grinisrit/tnl-dev
// Copyright (c) 2004-2022 <NAME> et al.
//
// This file is part of TNL - Template Numerical Library (https://tnl-project.org/)
//
// SPDX-License-Identifier: MIT
#pragma once
#include <cstddef>
#include <cstdint>
#include <memory>
#include <utility>
#include <stdexcept>
#include <cmath> // std::ceil
namespace TNL {
/**
* \brief Namespace for base64 encoding and decoding functions.
*
* The actual algorithms are based on these sources:
*
* - http://web.mit.edu/freebsd/head/contrib/wpa/src/utils/base64.c
* - https://stackoverflow.com/questions/180947/base64-decode-snippet-in-c/
* - https://stackoverflow.com/questions/342409/how-do-i-base64-encode-decode-in-c
*/
namespace base64 {
/**
* \brief Get the length of base64-encoded block for given data byte length.
*/
inline std::size_t
get_encoded_length( std::size_t byte_length )
{
std::size_t encoded = std::ceil( byte_length * ( 4.0 / 3.0 ) );
// base64 uses padding to a multiple of 4
if( encoded % 4 == 0 )
return encoded;
return encoded + 4 - ( encoded % 4 );
}
/**
* \brief Static table for base64 encoding.
*/
static constexpr unsigned char encoding_table[ 65 ] = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
/**
* \brief Static table for base64 decoding.
*
* Can be built with the following code:
*
* \code
* std::uint8_t decoding_table[256];
* for( int i = 0; i < 256; i++ )
* decoding_table[i] = 128;
* for( std::uint8_t i = 0; i < sizeof(encoding_table) - 1; i++ )
* decoding_table[encoding_table[i]] = i;
* decoding_table[(int) '='] = 0;
* \endcode
*/
static constexpr std::uint8_t decoding_table[ 256 ] = {
128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128,
128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 62, 128, 128, 128, 63,
52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 128, 128, 128, 0, 128, 128, 128, 0, 1, 2, 3, 4, 5, 6,
7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 128, 128, 128, 128, 128,
128, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48,
49, 50, 51, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128,
128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128,
128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128,
128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128,
128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128,
128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128
};
/**
* \brief Do a base64 encoding of the given data.
*
* \param data Pointer to the data to be encoded.
* \param data_size Length of the input data (in bytes).
* \return A \ref std::unique_ptr to the encoded data.
*/
inline std::unique_ptr< char[] >
encode( const std::uint8_t* data, std::size_t data_size )
{
const std::size_t output_length = get_encoded_length( data_size );
std::unique_ptr< char[] > encoded_data{ new char[ output_length + 1 ] };
const std::uint8_t* end = data + data_size;
const std::uint8_t* in = data;
char* out = encoded_data.get();
char* pos = out;
while( end - in >= 3 ) {
*pos++ = encoding_table[ in[ 0 ] >> 2 ];
*pos++ = encoding_table[ ( ( in[ 0 ] & 0x03 ) << 4 ) | ( in[ 1 ] >> 4 ) ];
*pos++ = encoding_table[ ( ( in[ 1 ] & 0x0f ) << 2 ) | ( in[ 2 ] >> 6 ) ];
*pos++ = encoding_table[ in[ 2 ] & 0x3f ];
in += 3;
}
if( end - in != 0 ) {
*pos++ = encoding_table[ in[ 0 ] >> 2 ];
if( end - in == 1 ) {
*pos++ = encoding_table[ ( in[ 0 ] & 0x03 ) << 4 ];
*pos++ = '=';
}
else {
*pos++ = encoding_table[ ( ( in[ 0 ] & 0x03 ) << 4 ) | ( in[ 1 ] >> 4 ) ];
*pos++ = encoding_table[ ( in[ 1 ] & 0x0f ) << 2 ];
}
*pos++ = '=';
}
*pos++ = '\0';
return encoded_data;
}
/**
* \brief Internal base64 decoding function.
*
* \param input Pointer to the encoded data (C string).
* \param input_length Length of the input string.
* \param output Pointer to a pre-allocated output buffer.
* \param output_length Length of the output buffer.
* \return Size of the decoded data (in bytes).
*/
inline std::ptrdiff_t
decode_block( const char* input, std::size_t input_length, std::uint8_t* output, std::size_t output_length )
{
const std::size_t min_buffer_size = std::ceil( input_length * ( 3.0 / 4.0 ) );
if( output_length < min_buffer_size )
throw std::logic_error( "base64: insufficient output buffer size " + std::to_string( output_length )
+ " (needed at least " + std::to_string( min_buffer_size ) + " bytes)" );
std::size_t count = 0;
int pad = 0;
std::uint8_t block[ 4 ];
std::uint8_t* pos = output;
for( std::size_t i = 0; i < input_length; i++ ) {
const std::uint8_t tmp = decoding_table[ (int) input[ i ] ];
if( tmp == 128 )
continue;
if( input[ i ] == '=' )
pad++;
block[ count ] = tmp;
count++;
if( count == 4 ) {
*pos++ = ( block[ 0 ] << 2 ) | ( block[ 1 ] >> 4 );
*pos++ = ( block[ 1 ] << 4 ) | ( block[ 2 ] >> 2 );
*pos++ = ( block[ 2 ] << 6 ) | block[ 3 ];
count = 0;
if( pad > 2 )
// invalid padding
throw std::invalid_argument( "base64: decoding error: input has invalid padding" );
if( pad > 0 ) {
pos -= pad;
break;
}
}
}
// check left-over chars
if( count > 0 )
throw std::invalid_argument( "base64: decoding error: invalid input (length not padded to a multiple of 4)" );
return pos - output;
}
/**
* \brief Do a base64 decoding of the given data.
*
* \param data Pointer to the encoded data (C string).
* \param data_size Length of the input string.
* \return A pair of the decoded data length and a \ref std::unique_ptr to the
* decoded data.
*/
inline std::pair< std::size_t, std::unique_ptr< std::uint8_t[] > >
decode( const char* data, const std::size_t data_size )
{
const std::size_t buffer_size = std::ceil( data_size * ( 3.0 / 4.0 ) );
std::unique_ptr< std::uint8_t[] > decoded_data{ new std::uint8_t[ buffer_size + 1 ] };
const std::size_t decoded_length_data = decode_block( data, data_size, decoded_data.get(), buffer_size );
return { decoded_length_data, std::move( decoded_data ) };
}
/**
* \brief Write a base64-encoded block of data into the given stream.
*
* The encoded data is prepended with a short header, which is the base64-encoded
* byte length of the data. The type of the byte length value is `HeaderType`.
*/
template< typename HeaderType = std::uint64_t, typename T >
void
write_encoded_block( const T* data, const std::size_t data_length, std::ostream& output_stream )
{
const HeaderType size = data_length * sizeof( T );
std::unique_ptr< char[] > encoded_size =
base64::encode( reinterpret_cast< const std::uint8_t* >( &size ), sizeof( HeaderType ) );
output_stream << encoded_size.get();
std::unique_ptr< char[] > encoded_data = base64::encode( reinterpret_cast< const std::uint8_t* >( data ), size );
output_stream << encoded_data.get();
}
} // namespace base64
} // namespace TNL
|
CezaryGoralski/samoa-test | samoa-api/src/main/java/org/apache/samoa/streams/generators/HyperplaneGenerator.java | package org.apache.samoa.streams.generators;
/*
* #%L
* SAMOA
* %%
* Copyright (C) 2014 - 2015 Apache Software Foundation
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import java.util.Random;
import org.apache.samoa.instances.Attribute;
import org.apache.samoa.instances.DenseInstance;
import org.apache.samoa.instances.Instance;
import org.apache.samoa.instances.Instances;
import org.apache.samoa.instances.InstancesHeader;
import org.apache.samoa.moa.core.Example;
import org.apache.samoa.moa.core.FastVector;
import org.apache.samoa.moa.core.InstanceExample;
import org.apache.samoa.moa.core.ObjectRepository;
import org.apache.samoa.moa.options.AbstractOptionHandler;
import org.apache.samoa.moa.tasks.TaskMonitor;
import org.apache.samoa.streams.InstanceStream;
import com.github.javacliparser.FloatOption;
import com.github.javacliparser.IntOption;
/**
* Stream generator for Hyperplane data stream.
*
* @author <NAME> (abifet at cs dot waikato dot ac dot nz)
* @version $Revision: 7 $
*/
public class HyperplaneGenerator extends AbstractOptionHandler implements InstanceStream {
@Override
public String getPurposeString() {
return "Generates a problem of predicting class of a rotating hyperplane.";
}
private static final long serialVersionUID = 1L;
public IntOption instanceRandomSeedOption = new IntOption("instanceRandomSeed", 'i',
"Seed for random generation of instances.", 1);
public IntOption numClassesOption = new IntOption("numClasses", 'c', "The number of classes to generate.", 2, 2,
Integer.MAX_VALUE);
public IntOption numAttsOption = new IntOption("numAtts", 'a', "The number of attributes to generate.", 10, 0,
Integer.MAX_VALUE);
public IntOption numDriftAttsOption = new IntOption("numDriftAtts", 'k', "The number of attributes with drift.", 2,
0, Integer.MAX_VALUE);
public FloatOption magChangeOption = new FloatOption("magChange", 't', "Magnitude of the change for every example",
0.0, 0.0, 1.0);
public IntOption noisePercentageOption = new IntOption("noisePercentage", 'n',
"Percentage of noise to add to the data.", 5, 0, 100);
public IntOption sigmaPercentageOption = new IntOption("sigmaPercentage", 's',
"Percentage of probability that the direction of change is reversed.", 10,
0, 100);
protected InstancesHeader streamHeader;
protected Random instanceRandom;
protected double[] weights;
protected int[] sigma;
public int numberInstance;
@Override
protected void prepareForUseImpl(TaskMonitor monitor, ObjectRepository repository) {
monitor.setCurrentActivity("Preparing hyperplane...", -1.0);
generateHeader();
restart();
}
@SuppressWarnings({ "rawtypes", "unchecked" })
protected void generateHeader() {
FastVector attributes = new FastVector();
for (int i = 0; i < this.numAttsOption.getValue(); i++) {
attributes.addElement(new Attribute("att" + (i + 1)));
}
FastVector classLabels = new FastVector();
for (int i = 0; i < this.numClassesOption.getValue(); i++) {
classLabels.addElement("class" + (i + 1));
}
attributes.addElement(new Attribute("class", classLabels));
this.streamHeader = new InstancesHeader(new Instances(getCLICreationString(InstanceStream.class), attributes, 0));
this.streamHeader.setClassIndex(this.streamHeader.numAttributes() - 1);
}
@Override
public long estimatedRemainingInstances() {
return -1;
}
@Override
public InstancesHeader getHeader() {
return this.streamHeader;
}
@Override
public boolean hasMoreInstances() {
return true;
}
@Override
public boolean isRestartable() {
return true;
}
@Override
public Example<Instance> nextInstance() {
int numAtts = this.numAttsOption.getValue();
double[] attVals = new double[numAtts + 1];
double sum = 0.0;
double sumWeights = 0.0;
for (int i = 0; i < numAtts; i++) {
attVals[i] = this.instanceRandom.nextDouble();
sum += this.weights[i] * attVals[i];
sumWeights += this.weights[i];
}
int classLabel;
if (sum >= sumWeights * 0.5) {
classLabel = 1;
} else {
classLabel = 0;
}
// Add Noise
if ((1 + (this.instanceRandom.nextInt(100))) <= this.noisePercentageOption.getValue()) {
classLabel = (classLabel == 0 ? 1 : 0);
}
Instance inst = new DenseInstance(1.0, attVals);
inst.setDataset(getHeader());
inst.setClassValue(classLabel);
addDrift();
return new InstanceExample(inst);
}
private void addDrift() {
for (int i = 0; i < this.numDriftAttsOption.getValue(); i++) {
this.weights[i] += (double) ((double) sigma[i]) * ((double) this.magChangeOption.getValue());
if (// this.weights[i] >= 1.0 || this.weights[i] <= 0.0 ||
(1 + (this.instanceRandom.nextInt(100))) <= this.sigmaPercentageOption.getValue()) {
this.sigma[i] *= -1;
}
}
}
@Override
public void restart() {
this.instanceRandom = new Random(this.instanceRandomSeedOption.getValue());
this.weights = new double[this.numAttsOption.getValue()];
this.sigma = new int[this.numAttsOption.getValue()];
for (int i = 0; i < this.numAttsOption.getValue(); i++) {
this.weights[i] = this.instanceRandom.nextDouble();
this.sigma[i] = (i < this.numDriftAttsOption.getValue() ? 1 : 0);
}
}
@Override
public void getDescription(StringBuilder sb, int indent) {
// TODO Auto-generated method stub
}
}
|
0katekate0/WxJava | weixin-java-mp/src/main/java/me/chanjar/weixin/mp/bean/guide/WxMpGuideGroup.java | <reponame>0katekate0/WxJava<filename>weixin-java-mp/src/main/java/me/chanjar/weixin/mp/bean/guide/WxMpGuideGroup.java
package me.chanjar.weixin.mp.bean.guide;
import com.google.gson.annotations.SerializedName;
import lombok.Data;
import me.chanjar.weixin.common.bean.ToJson;
import me.chanjar.weixin.common.util.json.WxGsonBuilder;
import java.io.Serializable;
/**
* 顾问分组信息
*
* @author <a href="https://www.sacoc.cn">广州跨界-宋心成</a>
* @date 2021/5/8/008
*/
@Data
public class WxMpGuideGroup implements ToJson, Serializable {
private static final long serialVersionUID = 6235142804489175294L;
/**
* 顾问分组id
*/
@SerializedName("id")
private Long id;
/**
* 顾问分组名称
*/
@SerializedName("name")
private String name;
/**
* 创建时间戳
*/
@SerializedName("create_time")
private Long createTime;
/**
* 更新时间戳
*/
@SerializedName("update_time")
private Long updateTime;
@Override
public String toJson() {
return WxGsonBuilder.create().toJson(this);
}
public static WxMpGuideGroup fromJson(String json) {
return WxGsonBuilder.create().fromJson(json, WxMpGuideGroup.class);
}
}
|
trc492/Frc2011Logomotion | code/WPILib/ChipObject/ExpectedFPGASignature.h | // Copyright (c) National Instruments 2008. All Rights Reserved.
// Do Not Edit... this file is generated!
#ifndef __n2EAA5E59CAF1A8A966853A011B61CC91_ExpectedFPGASignature_h__
#define __n2EAA5E59CAF1A8A966853A011B61CC91_ExpectedFPGASignature_h__
namespace nFPGA
{
namespace n2EAA5E59CAF1A8A966853A011B61CC91
{
static const unsigned short g_ExpectedFPGAVersion = 2011;
static const unsigned int g_ExpectedFPGARevision = 0x00105003;
static const unsigned int g_ExpectedFPGASignature[] =
{
0x2EAA5E59,
0xCAF1A8A9,
0x66853A01,
0x1B61CC91,
};
}
}
#endif // __n2EAA5E59CAF1A8A966853A011B61CC91_ExpectedFPGASignature_h__
|
fengli12321/Socket.io-FLSocketIM-Android | app/src/main/java/com/foxpower/flchatofandroid/model/MessageBody.java | package com.foxpower.flchatofandroid.model;
import com.foxpower.flchatofandroid.enums.MessageType;
import org.json.JSONObject;
import java.util.HashMap;
/**
* Created by fengli on 2018/2/8.
*/
public class MessageBody extends BaseModel {
/*公共*/
private MessageType type;
/*文本*/
private String msg;
/*图片*/
private HashMap<String, Integer> size;
private String thumbnailRemotePath;
private String originImagePath;
/*语音*/
private long duration;
/*位置*/
private double latitude;
private double longitude;
private String locationName;
private String detailLocationName;
/*文件*/
private String fileName;
private String fileRemotePath;
public void setLatitude(double latitude) {
this.latitude = latitude;
}
public void setLongitude(double longitude) {
this.longitude = longitude;
}
public void setLocationName(String locationName) {
this.locationName = locationName;
}
public void setDetailLocationName(String detailLocationName) {
this.detailLocationName = detailLocationName;
}
public double getLatitude() {
return latitude;
}
public void setType(MessageType type) {
this.type = type;
}
public void setMsg(String msg) {
this.msg = msg;
}
public void setSize(HashMap size) {
this.size = size;
}
public void setThumbnailRemotePath(String thumbnailRemotePath) {
this.thumbnailRemotePath = thumbnailRemotePath;
}
public void setFileName(String fileName) {
this.fileName = fileName;
}
public void setFileRemotePath(String fileRemotePath) {
this.fileRemotePath = fileRemotePath;
}
public void setOriginImagePath(String originImagePath) {
this.originImagePath = originImagePath;
}
public MessageType getType() {
return type;
}
public void setDuration(long duration) {
this.duration = duration;
}
public String getMsg() {
return msg;
}
public HashMap<String, Integer> getSize() {
return size;
}
public String getThumbnailRemotePath() {
return thumbnailRemotePath;
}
public String getFileName() {
return fileName;
}
public String getFileRemotePath() {
return fileRemotePath;
}
public String getOriginImagePath() {
return originImagePath;
}
public long getDuration() {
return duration;
}
public double getLongitude() {
return longitude;
}
public String getLocationName() {
return locationName;
}
public String getDetailLocationName() {
return detailLocationName;
}
}
|
aalbinclark/doi | app/models/doi_registration_agent.rb | <gh_stars>10-100
class DoiRegistrationAgent < ActiveRecord::Base
has_many :records
end |
jnvshubham7/CPP_Programming | A_Permutation_Grid.cpp | #include <iostream>
#include <set>
#include <algorithm>
#include <vector>
#include <map>
#include <string>
using namespace std;
int main(void){
int n,q,i,x,y;
vector<int> r,c;
cin >> n;
r.resize(n);
c.resize(n);
for(i=0;i<n;i++) cin >> r[i];
for(i=0;i<n;i++) cin >> c[i];
cin >> q;
for(i=0;i<q;i++){
cin >> x >> y;
x--; y--;
if(r[x]+c[y] <= n) cout << '.';
else cout << '#';
}
cout << '\n';
}
|
phatblat/macOSPrivateFrameworks | PrivateFrameworks/HomeKitDaemon/HMDAccountHandleResolveOperation.h | //
// Generated by class-dump 3.5 (64 bit).
//
// class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2013 by <NAME>.
//
#import "HMFOperation.h"
#import "HMFLogging.h"
#import "IDSServiceDelegate.h"
@class HMDAccountHandle, HMDIDSMessageContext, IDSService, NSMutableArray, NSString;
@interface HMDAccountHandleResolveOperation : HMFOperation <HMFLogging, IDSServiceDelegate>
{
HMDAccountHandle *_handle;
IDSService *_service;
HMDIDSMessageContext *_messageContext;
NSMutableArray *_resolveBlocks;
}
+ (id)logCategory;
+ (double)timeout;
@property(readonly) NSMutableArray *resolveBlocks; // @synthesize resolveBlocks=_resolveBlocks;
@property(retain, nonatomic) HMDIDSMessageContext *messageContext; // @synthesize messageContext=_messageContext;
@property(readonly) IDSService *service; // @synthesize service=_service;
@property(readonly, copy) HMDAccountHandle *handle; // @synthesize handle=_handle;
- (void).cxx_destruct;
- (void)service:(id)arg1 account:(id)arg2 incomingMessage:(id)arg3 fromID:(id)arg4 context:(id)arg5;
- (void)service:(id)arg1 account:(id)arg2 identifier:(id)arg3 didSendWithSuccess:(BOOL)arg4 error:(id)arg5 context:(id)arg6;
- (void)addResolveBlock:(CDUnknownBlockType)arg1;
- (void)main;
- (id)initWithHandle:(id)arg1;
- (id)initWithTimeout:(double)arg1;
- (id)init;
// Remaining properties
@property(readonly, copy) NSString *debugDescription;
@property(readonly, copy) NSString *description;
@property(readonly) unsigned long long hash;
@property(readonly) Class superclass;
@end
|
bogus-sudo/ONE-1 | runtime/onert/backend/cpu/kernel/CompareLayer.cc | <reponame>bogus-sudo/ONE-1<filename>runtime/onert/backend/cpu/kernel/CompareLayer.cc
/*
* Copyright (c) 2020 Samsung Electronics Co., Ltd. All Rights Reserved
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "CompareLayer.h"
#include "OperationUtils.h"
#include <cker/operation/Comparison.h>
namespace onert
{
namespace backend
{
namespace cpu
{
namespace kernel
{
namespace
{
using OpType = onert::ir::operation::Comparison::ComparisonType;
using namespace onert::backend::cpu;
template <typename T>
void compareScalar(const operand::Tensor *lhs, const operand::Tensor *rhs, operand::Tensor *output,
OpType op_type)
{
bool requires_broadcast = !HaveSameShapes(lhs, rhs);
if (requires_broadcast)
{
switch (op_type)
{
case OpType::Equal:
Broadcast4DSlowEqual(
convertToExtendedCkerShape(lhs), reinterpret_cast<const T *>(lhs->buffer()),
convertToExtendedCkerShape(rhs), reinterpret_cast<const T *>(rhs->buffer()),
convertToExtendedCkerShape(output), reinterpret_cast<bool *>(output->buffer()));
break;
case OpType::NotEqual:
Broadcast4DSlowNotEqual(
convertToExtendedCkerShape(lhs), reinterpret_cast<const T *>(lhs->buffer()),
convertToExtendedCkerShape(rhs), reinterpret_cast<const T *>(rhs->buffer()),
convertToExtendedCkerShape(output), reinterpret_cast<bool *>(output->buffer()));
break;
case OpType::Greater:
Broadcast4DSlowGreater(
convertToExtendedCkerShape(lhs), reinterpret_cast<const T *>(lhs->buffer()),
convertToExtendedCkerShape(rhs), reinterpret_cast<const T *>(rhs->buffer()),
convertToExtendedCkerShape(output), reinterpret_cast<bool *>(output->buffer()));
break;
case OpType::GreaterEqual:
Broadcast4DSlowGreaterEqual(
convertToExtendedCkerShape(lhs), reinterpret_cast<const T *>(lhs->buffer()),
convertToExtendedCkerShape(rhs), reinterpret_cast<const T *>(rhs->buffer()),
convertToExtendedCkerShape(output), reinterpret_cast<bool *>(output->buffer()));
break;
case OpType::Less:
Broadcast4DSlowLess(
convertToExtendedCkerShape(lhs), reinterpret_cast<const T *>(lhs->buffer()),
convertToExtendedCkerShape(rhs), reinterpret_cast<const T *>(rhs->buffer()),
convertToExtendedCkerShape(output), reinterpret_cast<bool *>(output->buffer()));
break;
case OpType::LessEqual:
Broadcast4DSlowLessEqual(
convertToExtendedCkerShape(lhs), reinterpret_cast<const T *>(lhs->buffer()),
convertToExtendedCkerShape(rhs), reinterpret_cast<const T *>(rhs->buffer()),
convertToExtendedCkerShape(output), reinterpret_cast<bool *>(output->buffer()));
break;
default:
throw std::runtime_error{"Invalid OpType for CompareLayer"};
}
}
else // if (requires_broadcast == false)
{
switch (op_type)
{
case OpType::Equal:
EqualNoScaling(convertToExtendedCkerShape(lhs), reinterpret_cast<const T *>(lhs->buffer()),
convertToExtendedCkerShape(rhs), reinterpret_cast<const T *>(rhs->buffer()),
convertToExtendedCkerShape(output),
reinterpret_cast<bool *>(output->buffer()));
break;
case OpType::NotEqual:
NotEqualNoScaling(
convertToExtendedCkerShape(lhs), reinterpret_cast<const T *>(lhs->buffer()),
convertToExtendedCkerShape(rhs), reinterpret_cast<const T *>(rhs->buffer()),
convertToExtendedCkerShape(output), reinterpret_cast<bool *>(output->buffer()));
break;
case OpType::Greater:
GreaterNoScaling(
convertToExtendedCkerShape(lhs), reinterpret_cast<const T *>(lhs->buffer()),
convertToExtendedCkerShape(rhs), reinterpret_cast<const T *>(rhs->buffer()),
convertToExtendedCkerShape(output), reinterpret_cast<bool *>(output->buffer()));
break;
case OpType::GreaterEqual:
GreaterEqualNoScaling(
convertToExtendedCkerShape(lhs), reinterpret_cast<const T *>(lhs->buffer()),
convertToExtendedCkerShape(rhs), reinterpret_cast<const T *>(rhs->buffer()),
convertToExtendedCkerShape(output), reinterpret_cast<bool *>(output->buffer()));
break;
case OpType::Less:
LessNoScaling(convertToExtendedCkerShape(lhs), reinterpret_cast<const T *>(lhs->buffer()),
convertToExtendedCkerShape(rhs), reinterpret_cast<const T *>(rhs->buffer()),
convertToExtendedCkerShape(output),
reinterpret_cast<bool *>(output->buffer()));
break;
case OpType::LessEqual:
LessEqualNoScaling(
convertToExtendedCkerShape(lhs), reinterpret_cast<const T *>(lhs->buffer()),
convertToExtendedCkerShape(rhs), reinterpret_cast<const T *>(rhs->buffer()),
convertToExtendedCkerShape(output), reinterpret_cast<bool *>(output->buffer()));
break;
default:
throw std::runtime_error{"Invalid OpType for CompareLayer"};
}
}
return;
}
} // namespace
CompareLayer::CompareLayer()
: _lhs(nullptr), _rhs(nullptr), _output(nullptr),
_op_type(ir::operation::Comparison::ComparisonType::Equal)
{
// DO NOTHING
}
void CompareLayer::compareQuant8() { throw std::runtime_error{"Compare NYI for quantized"}; }
void CompareLayer::configure(const operand::Tensor *lhs, const operand::Tensor *rhs,
const OpType op_type, operand::Tensor *output)
{
_lhs = lhs;
_rhs = rhs;
_op_type = op_type;
_output = output;
}
void CompareLayer::run()
{
if (_lhs->data_type() == OperandType::FLOAT32)
{
compareScalar<float>(_lhs, _rhs, _output, _op_type);
}
else if (_lhs->data_type() == OperandType::INT32)
{
compareScalar<int32_t>(_lhs, _rhs, _output, _op_type);
}
else if (_lhs->data_type() == OperandType::BOOL8)
{
compareScalar<uint8_t>(_lhs, _rhs, _output, _op_type);
}
else if (_lhs->data_type() == OperandType::QUANT8_ASYMM)
{
compareQuant8();
}
else
{
throw std::runtime_error{"Compare: unsupported data type"};
}
}
} // namespace kernel
} // namespace cpu
} // namespace backend
} // namespace onert
|
n1603/metalk8s | salt/tests/unit/modules/test_metalk8s_checks.py | <gh_stars>0
import os.path
from unittest import TestCase
from unittest.mock import MagicMock, patch
from parameterized import parameterized
from salt.exceptions import CheckError
import yaml
import metalk8s_checks
from tests.unit import mixins
from tests.unit import utils
YAML_TESTS_FILE = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
"files", "test_metalk8s_checks.yaml"
)
with open(YAML_TESTS_FILE) as fd:
YAML_TESTS_CASES = yaml.safe_load(fd)
class Metalk8sChecksTestCase(TestCase, mixins.LoaderModuleMockMixin):
"""
TestCase for `metalk8s_checks` module
"""
loader_module = metalk8s_checks
def test_virtual(self):
"""
Tests the return of `__virtual__` function
"""
self.assertEqual(metalk8s_checks.__virtual__(), 'metalk8s_checks')
@utils.parameterized_from_cases(YAML_TESTS_CASES["sysctl"])
def test_sysctl(self, params, data, result, raises=False):
"""
Tests the return of `sysctl` function
"""
sysctl_get_mock = MagicMock(side_effect=data.get)
patch_dict = {
'sysctl.get': sysctl_get_mock
}
with patch.dict(metalk8s_checks.__salt__, patch_dict):
if raises:
self.assertRaisesRegex(
CheckError,
result,
metalk8s_checks.sysctl,
params
)
else:
self.assertEqual(
metalk8s_checks.sysctl(params, raises=False),
result
)
|
fountainment/fountain-engine-improved | fountain/base/funcUtil.h | <reponame>fountainment/fountain-engine-improved<gh_stars>0
#ifndef _FEI_FUNCUTIL_H_
#define _FEI_FUNCUTIL_H_
#include "base/basedef.h"
namespace fei {
template <class T>
inline std::function<void()> deleteFunc(T t)
{
return [t](){delete t;};
}
inline std::function<void()> funcVector(std::vector<std::function<void()>> v)
{
return [v](){for (auto f : v){f();}};
}
} // namespace fei
#endif // _FEI_FUNCUTIL_H_
|
dram/metasfresh | backend/de.metas.handlingunits.base/src/main/java/de/metas/handlingunits/exceptions/HUNotAssignableException.java | <reponame>dram/metasfresh<filename>backend/de.metas.handlingunits.base/src/main/java/de/metas/handlingunits/exceptions/HUNotAssignableException.java<gh_stars>1000+
package de.metas.handlingunits.exceptions;
import de.metas.handlingunits.model.I_M_HU;
import de.metas.util.Check;
/**
* Exception thrown when an HU is not assignable to a given document line
*
* @author tsa
*
*/
public class HUNotAssignableException extends HUException
{
/**
*
*/
private static final long serialVersionUID = -438979286865114772L;
/**
*
* @param message reason why is not assignable
* @param documentLineModel document line on which HU was tried to be assigned
* @param hu HU that wanted to be assigned
*/
public HUNotAssignableException(final String message, final Object documentLineModel, final I_M_HU hu)
{
super(buildMsg(message, documentLineModel, hu));
}
private static final String buildMsg(final String message, final Object documentLineModel, final I_M_HU hu)
{
final StringBuilder sb = new StringBuilder();
if (!Check.isEmpty(message, true))
{
sb.append(message.trim());
}
//
// Document Line Info
if (documentLineModel != null)
{
if (sb.length() > 0)
{
sb.append("\n");
}
sb.append("@Line@: ").append(documentLineModel);
}
//
// HU Info
if (hu != null)
{
if (sb.length() > 0)
{
sb.append("\n");
}
sb.append("@M_HU_ID@: ").append(hu.getValue()).append(" (ID=").append(hu.getM_HU_ID()).append(")");
}
return sb.toString();
}
}
|
Dylan-haiji/javayh-cloud | javayh-mq/javayh-rabbit/src/main/java/com/javayh/config/TopicRabbitConfig.java | package com.javayh.config;
import org.springframework.amqp.core.Binding;
import org.springframework.amqp.core.BindingBuilder;
import org.springframework.amqp.core.Queue;
import org.springframework.amqp.core.TopicExchange;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import static com.javayh.util.StaticNumber.*;
/**
* @author <NAME>
* @Description: 主题模式
* @Title: TopicRabbitConfig
* @ProjectName javayh-cloud
* @date 2019/7/20 16:34
*/
@Configuration
public class TopicRabbitConfig {
@Bean("queueMessage")
public Queue queueMessage() {
return new Queue(JAVAYOHO_TOPIC);
}
@Bean("queueMessages")
public Queue queueMessages() {
return new Queue(YHJ_TOPIC);
}
@Bean
TopicExchange exchange() {
return new TopicExchange(TOPIC_EXCHANGE);
}
@Bean
Binding bindingExchangeMessage(Queue queueMessage, TopicExchange exchange) {
return BindingBuilder.bind(queueMessage).to(exchange).with(JAVAYOHO_TOPIC);
}
@Bean
Binding bindingExchangeMessages(Queue queueMessages, TopicExchange exchange) {
return BindingBuilder.bind(queueMessages).to(exchange).with(TOPIC);
}
}
|
sonnguyenxcii/PdfViewer | office/src/main/java/com/wxiwei/office/thirdpart/emf/EMFTag.java | <filename>office/src/main/java/com/wxiwei/office/thirdpart/emf/EMFTag.java
// Copyright 2001, FreeHEP.
package com.wxiwei.office.thirdpart.emf;
import java.io.IOException;
import com.wxiwei.office.thirdpart.emf.data.GDIObject;
import com.wxiwei.office.thirdpart.emf.io.Tag;
import com.wxiwei.office.thirdpart.emf.io.TaggedInputStream;
/**
* EMF specific tag, from which all other EMF Tags inherit.
*
* @author <NAME>
* @version $Id: EMFTag.java 10367 2007-01-22 19:26:48Z duns $
*/
public abstract class EMFTag extends Tag implements GDIObject
{
/**
* Constructs a EMFTag.
*
* @param id id of the element
* @param version emf version in which this element was first supported
*/
protected EMFTag(int id, int version)
{
super(id, version);
}
public Tag read(int tagID, TaggedInputStream input, int len) throws IOException
{
return read(tagID, (EMFInputStream)input, len);
}
public abstract EMFTag read(int tagID, EMFInputStream emf, int len) throws IOException;
/**
* @return a description of the tagName and tagID
*/
public String toString()
{
return "EMFTag " + getName() + " (" + getTag() + ")";
}
/**
* displays the tag using the renderer
*
* @param renderer EMFRenderer storing the drawing session data
*/
public void render(EMFRenderer renderer)
{
}
}
|
thong-hoczita/bigbluebutton | bigbluebutton-web/src/java/org/bigbluebutton/api/messaging/converters/messages/EndMeetingMessage.java | package org.bigbluebutton.api.messaging.converters.messages;
public class EndMeetingMessage {
public static final String END_MEETING_REQUEST_EVENT = "end_meeting_request_event";
public static final String VERSION = "0.0.1";
public final String meetingId;
public EndMeetingMessage(String meetingId) {
this.meetingId = meetingId;
}
}
|
TUZIHULI/magnum | src/Magnum/Math/Geometry/Test/IntersectionTest.cpp | <reponame>TUZIHULI/magnum
/*
This file is part of Magnum.
Copyright © 2010, 2011, 2012, 2013, 2014
<NAME> <<EMAIL>>
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
*/
#include <limits>
#include <Corrade/TestSuite/Tester.h>
#include "Magnum/Math/Geometry/Intersection.h"
namespace Magnum { namespace Math { namespace Geometry { namespace Test {
class IntersectionTest: public Corrade::TestSuite::Tester {
public:
IntersectionTest();
void planeLine();
void lineLine();
};
typedef Math::Vector2<Float> Vector2;
typedef Math::Vector3<Float> Vector3;
IntersectionTest::IntersectionTest() {
addTests({&IntersectionTest::planeLine,
&IntersectionTest::lineLine});
}
void IntersectionTest::planeLine() {
const Vector3 planePosition(-1.0f, 1.0f, 0.5f);
const Vector3 planeNormal(0.0f, 0.0f, 1.0f);
/* Inside line segment */
CORRADE_COMPARE(Intersection::planeLine(planePosition, planeNormal,
{0.0f, 0.0f, -1.0f}, {0.0f, 0.0f, 2.0f}), 0.75f);
/* Outside line segment */
CORRADE_COMPARE(Intersection::planeLine(planePosition, planeNormal,
{0.0f, 0.0f, 1.0f}, {0.0f, 0.0f, 1.0f}), -0.5f);
/* Line lies on the plane */
CORRADE_COMPARE(Intersection::planeLine(planePosition, planeNormal,
{1.0f, 0.5f, 0.5f}, {-1.0f, 0.5f, 0.0f}), std::numeric_limits<Float>::quiet_NaN());
/* Line is parallel to the plane */
CORRADE_COMPARE(Intersection::planeLine(planePosition, planeNormal,
{1.0f, 0.0f, 1.0f}, {-1.0f, 0.0f, 0.0f}), -std::numeric_limits<Float>::infinity());
}
void IntersectionTest::lineLine() {
const Vector2 p(-1.0f, -1.0f);
const Vector2 r(1.0, 2.0f);
/* Inside both line segments */
CORRADE_COMPARE(Intersection::lineSegmentLineSegment(p, r,
{0.0f, 0.0f}, {-1.0f, 0.0f}), std::make_pair(0.5f, 0.5f));
CORRADE_COMPARE(Intersection::lineSegmentLine(p, r,
{0.0f, 0.0f}, {-1.0f, 0.0f}), 0.5);
/* Outside both line segments */
CORRADE_COMPARE(Intersection::lineSegmentLineSegment(p, r,
{0.0f, -2.0f}, {-1.0f, 0.0f}), std::make_pair(-0.5f, 1.5f));
CORRADE_COMPARE(Intersection::lineSegmentLine(p, r,
{0.0f, -2.0f}, {-1.0f, 0.0f}), -0.5f);
/* Collinear lines */
const auto tu = Intersection::lineSegmentLineSegment(p, r,
{0.0f, 1.0f}, {-1.0f, -2.0f});
CORRADE_COMPARE(tu.first, -std::numeric_limits<Float>::quiet_NaN());
CORRADE_COMPARE(tu.second, -std::numeric_limits<Float>::quiet_NaN());
CORRADE_COMPARE(Intersection::lineSegmentLine(p, r,
{0.0f, 1.0f}, {-1.0f, -2.0f}), -std::numeric_limits<Float>::quiet_NaN());
/* Parallel lines */
CORRADE_COMPARE(Intersection::lineSegmentLineSegment(p, r,
{0.0f, 0.0f}, {1.0f, 2.0f}), std::make_pair(std::numeric_limits<Float>::infinity(),
std::numeric_limits<Float>::infinity()));
CORRADE_COMPARE(Intersection::lineSegmentLine(p, r,
{0.0f, 0.0f}, {1.0f, 2.0f}), std::numeric_limits<Float>::infinity());
}
}}}}
CORRADE_TEST_MAIN(Magnum::Math::Geometry::Test::IntersectionTest)
|
Kyle9021/trireme-lib | controller/internal/supervisor/iptablesctrl/instance.go | <reponame>Kyle9021/trireme-lib
package iptablesctrl
import (
"context"
"fmt"
"os"
"strings"
"go.aporeto.io/enforcerd/trireme-lib/controller/constants"
provider "go.aporeto.io/enforcerd/trireme-lib/controller/pkg/aclprovider"
"go.aporeto.io/enforcerd/trireme-lib/controller/pkg/ebpf"
"go.aporeto.io/enforcerd/trireme-lib/controller/pkg/fqconfig"
"go.aporeto.io/enforcerd/trireme-lib/controller/pkg/ipsetmanager"
"go.aporeto.io/enforcerd/trireme-lib/controller/runtime"
"go.aporeto.io/enforcerd/trireme-lib/policy"
"go.uber.org/zap"
)
const (
//IPV4 version for ipv4
IPV4 = iota
//IPV6 version for ipv6
IPV6
)
//Instance is the structure holding the ipv4 and ipv6 handles
type Instance struct {
iptv4 *iptables
iptv6 *iptables
}
// SetTargetNetworks updates ths target networks. There are three different
// types of target networks:
// - TCPTargetNetworks for TCP traffic (by default 0.0.0.0/0)
// - UDPTargetNetworks for UDP traffic (by default empty)
// - ExcludedNetworks that are always ignored (by default empty)
func (i *Instance) SetTargetNetworks(c *runtime.Configuration) error {
if err := i.iptv4.SetTargetNetworks(c); err != nil {
return err
}
if err := i.iptv6.SetTargetNetworks(c); err != nil {
return err
}
return nil
}
// Run starts the iptables controller
func (i *Instance) Run(ctx context.Context) error {
if err := i.iptv4.Run(ctx); err != nil {
return err
}
if err := i.iptv6.Run(ctx); err != nil {
return err
}
return nil
}
// ConfigureRules implments the ConfigureRules interface. It will create the
// port sets and then it will call install rules to create all the ACLs for
// the given chains. PortSets are only created here. Updates will use the
// exact same logic.
func (i *Instance) ConfigureRules(version int, contextID string, pu *policy.PUInfo) error {
if err := i.iptv4.ConfigureRules(version, contextID, pu); err != nil {
return err
}
if err := i.iptv6.ConfigureRules(version, contextID, pu); err != nil {
return err
}
return nil
}
// DeleteRules implements the DeleteRules interface. This is responsible
// for cleaning all ACLs and associated chains, as well as ll the sets
// that we have created. Note, that this only clears up the state
// for a given processing unit.
func (i *Instance) DeleteRules(version int, contextID string, tcpPorts, udpPorts string, mark string, username string, containerInfo *policy.PUInfo) error {
if err := i.iptv4.DeleteRules(version, contextID, tcpPorts, udpPorts, mark, username, containerInfo); err != nil {
zap.L().Warn("Delete rules for iptables v4 returned error")
}
if err := i.iptv6.DeleteRules(version, contextID, tcpPorts, udpPorts, mark, username, containerInfo); err != nil {
zap.L().Warn("Delete rules for iptables v6 returned error")
}
return nil
}
// UpdateRules implements the update part of the interface. Update will call
// installrules to install the new rules and then it will delete the old rules.
// For installations that do not have latests iptables-restore we time
// the operations so that the switch is almost atomic, by creating the new rules
// first. For latest kernel versions iptables-restorce will update all the rules
// in one shot.
func (i *Instance) UpdateRules(version int, contextID string, containerInfo *policy.PUInfo, oldContainerInfo *policy.PUInfo) error {
if err := i.iptv4.UpdateRules(version, contextID, containerInfo, oldContainerInfo); err != nil {
return err
}
if err := i.iptv6.UpdateRules(version, contextID, containerInfo, oldContainerInfo); err != nil {
return err
}
return nil
}
// CleanUp requires the implementor to clean up all ACLs and destroy all
// the IP sets.
func (i *Instance) CleanUp() error {
if err := i.iptv4.CleanUp(); err != nil {
zap.L().Error("Failed to cleanup ipv4 rules")
}
if err := i.iptv6.CleanUp(); err != nil {
zap.L().Error("Failed to cleanup ipv6 rules")
}
return nil
}
// CreateCustomRulesChain creates a custom rules chain if it doesnt exist
func (i *Instance) CreateCustomRulesChain() error {
nonbatchedv4tableprovider, _ := provider.NewGoIPTablesProviderV4([]string{}, CustomQOSChain)
nonbatchedv6tableprovider, _ := provider.NewGoIPTablesProviderV6([]string{}, CustomQOSChain)
err := nonbatchedv4tableprovider.NewChain(customQOSChainTable, CustomQOSChain)
if err != nil {
zap.L().Debug("Chain already exists", zap.Error(err))
}
postroutingchainrulesv4, err := nonbatchedv4tableprovider.ListRules(customQOSChainTable, customQOSChainNFHook)
if err != nil {
zap.L().Error("ListRules returned error", zap.Error(err))
return err
}
checkCustomRulesv4 := func() bool {
for _, rule := range postroutingchainrulesv4 {
if strings.Contains(rule, CustomQOSChain) {
return true
}
}
return false
}
if !checkCustomRulesv4() {
if err := nonbatchedv4tableprovider.Insert(customQOSChainTable, customQOSChainNFHook, 1,
"-m", "addrtype",
"--src-type", "LOCAL",
"-j", CustomQOSChain,
); err != nil {
zap.L().Debug("Unable to create ipv4 custom rule", zap.Error(err))
}
}
err = nonbatchedv6tableprovider.NewChain(customQOSChainTable, CustomQOSChain)
if err != nil {
zap.L().Debug("Chain already exists", zap.Error(err))
}
postroutingchainrulesv6, err := nonbatchedv6tableprovider.ListRules(customQOSChainTable, customQOSChainNFHook)
if err != nil {
return err
}
checkCustomRulesv6 := func() bool {
for _, rule := range postroutingchainrulesv6 {
if strings.Contains(rule, CustomQOSChain) {
return true
}
}
return false
}
if !checkCustomRulesv6() {
if err := nonbatchedv6tableprovider.Append(customQOSChainTable, customQOSChainNFHook,
"-m", "addrtype",
"--src-type", "LOCAL",
"-j", CustomQOSChain,
); err != nil {
zap.L().Debug("Unable to create ipv6 custom rule", zap.Error(err))
}
}
return nil
}
// NewInstance creates a new iptables controller instance
func NewInstance(fqc fqconfig.FilterQueue, mode constants.ModeType, ipv6Enabled bool, ebpf ebpf.BPFModule, iptablesLockfile string, serviceMeshType policy.ServiceMesh) (*Instance, error) {
// our iptables binary `aporeto-iptables` uses the environment variable XT_LOCK_NAME
// to set the iptables lockfile. Standard iptables does not look at this environment variable
if iptablesLockfile != "" {
if err := os.Setenv("XT_LOCK_NAME", iptablesLockfile); err != nil {
return nil, fmt.Errorf("unable to set XT_LOCK_NAME: %s", err)
}
}
ipv4Impl, err := GetIPv4Impl()
if err != nil {
return nil, fmt.Errorf("unable to create ipv4 instance: %s", err)
}
ipsetV4 := ipsetmanager.V4()
iptInstanceV4 := createIPInstance(ipv4Impl, ipsetV4, fqc, mode, ebpf, serviceMeshType)
ipv6Impl, err := GetIPv6Impl(ipv6Enabled)
if err != nil {
return nil, fmt.Errorf("unable to create ipv6 instance: %s", err)
}
ipsetV6 := ipsetmanager.V6()
iptInstanceV6 := createIPInstance(ipv6Impl, ipsetV6, fqc, mode, ebpf, serviceMeshType)
return newInstanceWithProviders(iptInstanceV4, iptInstanceV6)
}
// newInstanceWithProviders is called after ipt and ips have been created. This helps
// with all the unit testing to be able to mock the providers.
func newInstanceWithProviders(iptv4 *iptables, iptv6 *iptables) (*Instance, error) {
i := &Instance{
iptv4: iptv4,
iptv6: iptv6,
}
return i, nil
}
// ACLProvider returns the current ACL provider that can be re-used by other entities.
func (i *Instance) ACLProvider() []provider.IptablesProvider {
return []provider.IptablesProvider{i.iptv4.impl, i.iptv6.impl}
}
|
kokizzu/thrust | src/browser/session/thrust_session.cc | // Copyright (c) 2014 <NAME>.
// See the LICENSE file.
#include "src/browser/session/thrust_session.h"
#include "base/command_line.h"
#include "base/file_util.h"
#include "base/logging.h"
#include "base/path_service.h"
#include "base/threading/thread.h"
#include "base/strings/stringprintf.h"
#include "base/strings/string_util.h"
#include "net/base/escape.h"
#include "content/public/browser/browser_thread.h"
#include "content/public/browser/resource_context.h"
#include "content/public/browser/storage_partition.h"
#include "content/public/common/content_switches.h"
#include "src/common/switches.h"
#include "src/net/url_request_context_getter.h"
#include "src/browser/dialog/download_manager_delegate.h"
#include "src/browser/browser_main_parts.h"
#include "src/browser/browser_client.h"
#include "src/browser/web_view/web_view_guest.h"
#include "src/browser/session/thrust_session_proxy_config_service.h"
using namespace content;
namespace thrust_shell {
/******************************************************************************/
/* RESOURCE CONTEXT */
/******************************************************************************/
class ThrustSession::ExoResourceContext : public content::ResourceContext {
public:
ExoResourceContext()
: getter_(NULL) {}
virtual ~ExoResourceContext() {}
// ResourceContext implementation:
virtual net::HostResolver* GetHostResolver() OVERRIDE {
CHECK(getter_);
return getter_->host_resolver();
}
virtual net::URLRequestContext* GetRequestContext() OVERRIDE {
CHECK(getter_);
return getter_->GetURLRequestContext();
}
virtual bool AllowMicAccess(const GURL& origin) OVERRIDE {
return false;
}
virtual bool AllowCameraAccess(const GURL& origin) OVERRIDE {
return false;
}
void set_url_request_context_getter(
ThrustShellURLRequestContextGetter* getter) {
getter_ = getter;
}
private:
ThrustShellURLRequestContextGetter* getter_;
DISALLOW_COPY_AND_ASSIGN(ExoResourceContext);
};
/******************************************************************************/
/* EXO SESSION */
/******************************************************************************/
ThrustSession::ThrustSession(
ThrustSessionBinding* binding,
const bool off_the_record,
const std::string& path,
bool dummy_cookie_store)
: binding_(binding),
off_the_record_(off_the_record),
ignore_certificate_errors_(false),
resource_context_(new ExoResourceContext),
cookie_store_(new ThrustSessionCookieStore(this, dummy_cookie_store)),
visitedlink_store_(new ThrustSessionVisitedLinkStore(this)),
current_instance_id_(0)
{
CommandLine* cmd_line = CommandLine::ForCurrentProcess();
if (cmd_line->HasSwitch(switches::kIgnoreCertificateErrors)) {
ignore_certificate_errors_ = true;
}
#if defined(OS_WIN)
std::wstring tmp(path.begin(), path.end());
path_ = base::FilePath(tmp);
#else
path_ = base::FilePath(path);
#endif
visitedlink_store_->Init();
/* It will be owned by the URLRequestContextGetter (proxy service owns */
/* a scoped_ptr on it) as soon as it is initiated. */
proxy_config_service_ = new ThrustSessionProxyConfigService(this);
ThrustShellBrowserClient::Get()->RegisterThrustSession(this);
LOG(INFO) << "ThrustSession Constructor " << this;
}
ThrustSession::~ThrustSession()
{
LOG(INFO) << "ThrustSession Destructor " << this;
/* NOTE: We don't delete the proxy_config_service_ as it is owned by the */
/* UrlRequestContextGetter as soon as it is initialized */
/* The ResourceContext is created on the UI thread but live son the IO */
/* thread, so it must be deleted there. */
if(resource_context_) {
BrowserThread::DeleteSoon(
BrowserThread::IO, FROM_HERE, resource_context_.release());
}
ThrustShellBrowserClient::Get()->UnRegisterThrustSession(this);
/* We remove ourselves from the CookieStore as it may oulive us but we dont */
/* want it to call into the API anymore. */
cookie_store_->parent_ = NULL;
if(url_request_getter_.get())
url_request_getter_.get()->parent_ = NULL;
}
base::FilePath
ThrustSession::GetPath() const
{
return path_;
}
bool
ThrustSession::IsOffTheRecord() const
{
return off_the_record_;
}
content::DownloadManagerDelegate*
ThrustSession::GetDownloadManagerDelegate()
{
if (!download_manager_delegate_.get()) {
DownloadManager* manager = BrowserContext::GetDownloadManager(this);
download_manager_delegate_.reset(new ThrustShellDownloadManagerDelegate());
download_manager_delegate_->SetDownloadManager(manager);
}
return download_manager_delegate_.get();
}
BrowserPluginGuestManager*
ThrustSession::GetGuestManager()
{
return this;
}
content::ResourceContext*
ThrustSession::GetResourceContext()
{
return resource_context_.get();
}
net::URLRequestContextGetter*
ThrustSession::CreateRequestContext(
ProtocolHandlerMap* protocol_handlers,
URLRequestInterceptorScopedVector request_interceptors)
{
DCHECK(!url_request_getter_.get());
url_request_getter_ = new ThrustShellURLRequestContextGetter(
this,
ignore_certificate_errors_,
GetPath(),
protocol_handlers,
request_interceptors.Pass(),
ThrustShellMainParts::Get()->net_log());
resource_context_->set_url_request_context_getter(url_request_getter_.get());
return url_request_getter_.get();
}
net::URLRequestContextGetter*
ThrustSession::CreateRequestContextForStoragePartition(
const base::FilePath& partition_path,
bool in_memory,
ProtocolHandlerMap* protocol_handlers,
URLRequestInterceptorScopedVector request_interceptors)
{
DCHECK(false);
/* TODO(spolu): Add Support URLRequestContextGetter per StoragePartition. */
/* This might be made easier once http://crbug.com/159193 */
/* has landed in a release branch. */
return NULL;
}
ThrustSessionCookieStore*
ThrustSession::GetCookieStore()
{
return cookie_store_.get();
}
ThrustSessionVisitedLinkStore*
ThrustSession::GetVisitedLinkStore()
{
return visitedlink_store_.get();
}
ThrustSessionProxyConfigService*
ThrustSession::GetProxyConfigService()
{
return proxy_config_service_;
}
/******************************************************************************/
/* BROWSER_PLUGIN_GUEST_MANAGER */
/******************************************************************************/
void
ThrustSession::MaybeGetGuestByInstanceIDOrKill(
int guest_instance_id,
int embedder_render_process_id,
const GuestByInstanceIDCallback& callback)
{
content::WebContents* guest_web_contents =
GetGuestByInstanceID(guest_instance_id, embedder_render_process_id);
callback.Run(guest_web_contents);
}
WebContents*
ThrustSession::GetGuestByInstanceID(
int guest_instance_id,
int embedder_render_process_id)
{
std::map<int, content::WebContents*>::const_iterator it =
guest_web_contents_.find(guest_instance_id);
if(it == guest_web_contents_.end()) {
return NULL;
}
return it->second;
}
bool
ThrustSession::ForEachGuest(
WebContents* embedder_web_contents,
const GuestCallback& callback)
{
for (std::map<int, content::WebContents*>::iterator it =
guest_web_contents_.begin();
it != guest_web_contents_.end(); ++it) {
WebContents* guest = it->second;
WebViewGuest* guest_view = WebViewGuest::FromWebContents(guest);
if(embedder_web_contents != guest_view->embedder_web_contents()) {
continue;
}
if(callback.Run(guest)) {
return true;
}
}
return false;
}
/******************************************************************************/
/* GUEST_MANAGER INTERFACE*/
/******************************************************************************/
void
ThrustSession::AddGuest(
int guest_instance_id,
WebContents* guest_web_contents)
{
CHECK(!ContainsKey(guest_web_contents_, guest_instance_id));
guest_web_contents_[guest_instance_id] = guest_web_contents;
}
void
ThrustSession::RemoveGuest(
int guest_instance_id)
{
std::map<int, content::WebContents*>::iterator it =
guest_web_contents_.find(guest_instance_id);
DCHECK(it != guest_web_contents_.end());
guest_web_contents_.erase(it);
}
int
ThrustSession::GetNextInstanceID()
{
/* We avoid 0 as instance_id so that it's true in javascript */
return ++current_instance_id_;
}
} // namespace thrust_shell
|
tusharchoudhary0003/Custom-Football-Game | sources/com/flurry/sdk/C7609v.java | package com.flurry.sdk;
/* renamed from: com.flurry.sdk.v */
class C7609v extends C7364Cd {
/* renamed from: d */
final /* synthetic */ C7382G f15109d;
C7609v(C7382G g) {
this.f15109d = g;
}
/* renamed from: a */
public final void mo23803a() {
this.f15109d.m16341k();
}
}
|
NaturalHistoryMuseum/taxonworks | db/migrate/20140306152916_change_col_ne_geo_item_id.rb | <gh_stars>10-100
class ChangeColNeGeoItemId < ActiveRecord::Migration[4.2]
def change
remove_column :geographic_areas, :ne_geo_item_id
add_column :geographic_areas, :ne_geo_item_id, :integer
end
end
|
abdzitter/Daily-Coding-DS-ALGO-Practice | Gfg/C++/rangeQuery.cpp | <filename>Gfg/C++/rangeQuery.cpp
#include <bits/stdc++.h>
using namespace std;
int block;
// Structure to represent a query range
struct Query {
int L, R, index;
};
bool compare(Query x, Query y)
{
// Different blocks, sort by block.
if (x.L / block != y.L / block)
return x.L / block < y.L / block;
// Same block, sort by R value
return x.R < y.R;
}
void add(int x, int& currentAns,
unordered_map<int, int>& freq)
{
// increment frequency of this element
freq[x]++;
// if this element was previously
// contributing to the currentAns,
// decrement currentAns
if (freq[x] == (x + 1))
currentAns--;
// if this element has frequency
// equal to its value, increment
// currentAns
else if (freq[x] == x)
currentAns++;
}
void remove(int x, int& currentAns,
unordered_map<int, int>& freq)
{
// decrement frequency of this element
freq[x]--;
// if this element has frequency equal
// to its value, increment currentAns
if (freq[x] == x)
currentAns++;
// if this element was previously
// contributing to the currentAns
// decrement currentAns
else if (freq[x] == (x - 1))
currentAns--;
}
void queryResultsUtil(int a[], Query q[],
int ans[], int m)
{
int currL = 0, currR = 0;
int currentAns = 0;
// Traverse through all queries
for (int i = 0; i < m; i++) {
// L and R values of current range
int L = q[i].L, R = q[i].R;
int index = q[i].index;
while (currL < L) {
remove(a[currL], currentAns, freq);
currL++;
}
// Add Elements of current Range
while (currL > L) {
currL--;
add(a[currL], currentAns, freq);
}
while (currR <= R) {
add(a[currR], currentAns, freq);
currR++;
}
while (currR > R + 1) {
currR--;
remove(a[currR], currentAns, freq);
}
ans[index] = currentAns;
}
}
void queryResults(int a[], int n, Query q[], int m)
{
// Find block size
block = (int)sqrt(n);
sort(q, q + m, compare);
int* ans = new int[m];
queryResultsUtil(a, q, ans, m);
for (int i = 0; i < m; i++) {
cout << "Answer for Query " << (i + 1)
<< " = " << ans[i] << endl;
}
}
// Driver program
int main()
{
int A[] = { 1, 2, 2, 3, 3, 3 };
int n = sizeof(A) / sizeof(A[0]);
// 2D array of queries with 2 columns
Query queries[] = { { 0, 1, 0 },
{ 1, 1, 1 },
{ 0, 2, 2 },
{ 1, 3, 3 },
{ 3, 5, 4 },
{ 0, 5, 5 } };
int q = sizeof(queries) / sizeof(queries[0]);
queryResults(A, n, queries, q);
return 0;
} |
linyingzhen/btnew | src/shop/auction/Detail/ds.js | <gh_stars>0
/**
* const prefixCls = 'style-390394';
* const images = '/static/images/src/shop/auction/Detail';
* @Author: czy0729
* @Date: 2018-09-11 14:15:36
* @Last Modified by: czy0729
* @Last Modified time: 2018-11-15 09:28:13
* @Path m.benting.com.cn /src/shop/auction/Detail/ds.js
*/
import Utils from '@utils';
export const images = Utils.cdn('/static/images/src/shop/auction/Detail');
export const ruleDS = [
'第一次出价将支付全部出价,后续出价仅需要补足到出价金额既本次出价与历史出价总额的差价。',
'在时间完全结束时,出价最高者竞拍成功,获得竞拍物品;竞拍失败的用户在活动结束后金币全额返回到账户中',
'每次竞拍加价数量不得少于加价幅度',
'为了体现竞拍的公平性,使所有用户都用充分的时间拍下自己心仪的商品。若用户在结束时间倒计时最后1分钟内参与竞拍,竞拍结束时间将顺延一分钟让其他竞拍者有时间决定是否出价',
'本活动最终解释权归本汀所有'
];
export const filter = {
detail: {
addPrice: 1,
appType: 1,
auctionType: 1,
beginTime: 1,
currentPrice: 1,
endTime: 1,
goodsImg: 1,
nowTime: 1,
ownUser: 1,
showState: 1,
title: 1
},
record: {
auctionPriceTotal: 1,
auctionTime: 1,
faceImg: 1,
niname: 1,
type: 1,
userId: 1
}
};
|
maciejg-git/vue-bootstrap-icons | dist-mdi/mdi/table-eye-off.js | import { h } from 'vue'
export default {
name: "TableEyeOff",
vendor: "Mdi",
type: "",
tags: ["table","eye","off"],
render() {
return h(
"svg",
{"xmlns":"http://www.w3.org/2000/svg","width":"24","height":"24","viewBox":"0 0 24 24","class":"v-icon","fill":"currentColor","data-name":"mdi-table-eye-off","innerHTML":"<path d='M12 8.8L6.2 3H18C19.1 3 20 3.9 20 5V12.59C19.07 12.21 18.06 12 17 12C16.45 12 15.91 12.07 15.38 12.18L14.2 11H18V7H12V8.8M17.09 13.89L22.31 19.11C22.58 18.73 22.82 18.32 23 17.88C22.07 15.57 19.78 13.92 17.09 13.89M22.11 21.46L20.84 22.73L19.5 21.37C18.71 21.69 17.88 21.88 17 21.88C14.27 21.88 11.94 20.22 11 17.88C11.45 16.76 12.23 15.8 13.21 15.1L11.93 13.82C11.19 14.42 10.53 15.14 10 15.97V13H4V17H9.42C9.26 17.32 9.12 17.66 9 18C9.12 18.34 9.26 18.68 9.42 19H4C2.9 19 2 18.1 2 17V5C2 4.68 2.08 4.38 2.22 4.11L1.11 3L2.39 1.73L22.11 21.46M9.11 11L5.11 7H4V11H9.11M18.18 20.07L14.81 16.7C14.61 17.05 14.5 17.45 14.5 17.88C14.5 19.26 15.62 20.38 17 20.38C17.43 20.38 17.83 20.27 18.18 20.07Z' />"},
)
}
} |
tfilo/my-cookery-book | src/main/java/sk/filo/recipes/so/UnitCategorySO.java | <gh_stars>0
package sk.filo.recipes.so;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import javax.validation.constraints.NotBlank;
import javax.validation.constraints.Size;
import lombok.Getter;
import lombok.Setter;
import lombok.ToString;
/**
*
* @author tomas
*/
@Getter
@Setter
@ToString
public class UnitCategorySO {
private Long id;
@NotBlank
@Size(max=80)
private String name;
private List<UnitSO> units;
public List<UnitSO> getUnits() {
if (Objects.isNull(units)) {
units = new ArrayList<>();
}
return units;
}
}
|
jamacanbacn/splits-io | app/javascript/colors.js | const blue = 'rgba(14, 144, 210, 1)'
const purple = 'rgba(128, 87, 165, 1)'
const green = 'rgba(94, 185, 94, 1)'
const yellow = 'rgba(250, 200, 50, 1)'
const red = 'rgba(221, 81, 76, 1)'
const orange = 'rgba(243, 123, 29, 1)'
const logoYellow = 'rgba(245, 185, 70, 1)'
const logoBlue = 'rgba(70, 156, 230, 1)'
const logoGreen = 'rgba(109, 229, 136, 1)'
const logoColors = [logoYellow, logoBlue, logoGreen]
export {blue, purple, green, yellow, red, orange, logoColors, logoYellow, logoBlue, logoGreen}
|
WCry/demo | springboot-demo/springboot2.X/springboot2.2.x/springboot-first/src/main/java/com/zxp/controller/HelloController.java | package com.zxp.controller;
import com.fasterxml.jackson.annotation.JsonFormat;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.zxp.dto.TestDateDTO;
import com.zxp.scope.SingleAService;
import com.zxp.scope.SingleCService;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.cache.annotation.CachePut;
import org.springframework.format.annotation.DateTimeFormat;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import javax.servlet.http.HttpServletRequest;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
@Slf4j
@RestController
public class HelloController {
//注解方式获取配置文件中的属性
@Value( "${server.port}" )
private String port;
private final SingleAService singleAService;
private final SingleCService singleCService;
public HelloController(SingleAService singleAService, SingleCService singleCService) {
this.singleAService = singleAService;
this.singleCService = singleCService;
}
@GetMapping("/dsad/{name}")
@CachePut("dsad")
public String hello(@PathVariable(name = "name") String name) {
if(name.equals("666")){
while (true){
System.out.println("dsadaddddddddddddd");
}
}
return "hello , " +port+":"+ name;
}
@GetMapping("/getName")
public String hello() {
log.info("测试输出");
log.error("测试输ddd出");
log.debug("classC::"+ singleCService.getName());
log.debug("classAService::"+ singleAService.getName());
return "hello";
}
@Autowired
protected HttpServletRequest request;
@GetMapping("/dataTest")
@JsonFormat
public Date getDataTest(@RequestParam @DateTimeFormat(pattern = "yyyy-MM-dd HH:mm:ss")
Date start) throws JsonProcessingException {
System.out.println(request.getServletPath());
log.info(start.toString());
TestDateDTO testDateDTO = new TestDateDTO();
testDateDTO.setDate(start);
ObjectMapper objectMapper=new ObjectMapper();
System.out.println(objectMapper.writeValueAsString(start));
System.out.println(objectMapper.writeValueAsString(testDateDTO));
return start;
}
@GetMapping("/getOptional")
public Map helloOptional(String name) {
Map<String,Object> dasd=new HashMap<>();
log.debug("classC::"+ singleCService.getName());
log.debug("classAService::"+ singleAService.getName());
dasd.put("data",Optional.ofNullable(name));
return dasd;
}
}
|
cetico/lit-remixicon | svg/system/download-line.js | import { svg } from 'lit';
export const file = svg`<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24">
<g>
<path fill="none" d="M0 0h24v24H0z"/>
<path d="M3 19h18v2H3v-2zm10-5.828L19.071 7.1l1.414 1.414L12 17 3.515 8.515 4.929 7.1 11 13.17V2h2v11.172z"/>
</g>
</svg>
`;
export default file; |
inexistence/VideoMeeting | Client/VMeeting/app/src/main/java/com/jb/vmeeting/mvp/presenter/RoomFilesPresenter.java | <gh_stars>10-100
package com.jb.vmeeting.mvp.presenter;
import com.jb.vmeeting.R;
import com.jb.vmeeting.app.App;
import com.jb.vmeeting.mvp.model.apiservice.RoomService;
import com.jb.vmeeting.mvp.model.entity.Result;
import com.jb.vmeeting.mvp.model.entity.Room;
import com.jb.vmeeting.mvp.model.entity.RoomFiles;
import com.jb.vmeeting.mvp.model.helper.ProgressRequestListener;
import com.jb.vmeeting.mvp.model.helper.ProgressResponseListener;
import com.jb.vmeeting.mvp.model.helper.RetrofitHelper;
import com.jb.vmeeting.mvp.model.helper.SimpleCallback;
import com.jb.vmeeting.mvp.view.IRoomFilesView;
import com.jb.vmeeting.page.utils.ToastUtil;
import com.jb.vmeeting.tools.netfile.DownloadManager;
import com.jb.vmeeting.tools.netfile.UploadManager;
import com.jb.vmeeting.tools.task.TaskExecutor;
import java.io.File;
/**
* Created by Jianbin on 2016/5/18.
*/
public class RoomFilesPresenter extends BasePresenter {
RoomService roomService;
IRoomFilesView mView;
boolean isStreamingFile = false;
public RoomFilesPresenter(IRoomFilesView view) {
mView = view;
roomService = RetrofitHelper.createService(RoomService.class);
}
public void downloadFile(String fileName) {
if (isStreamingFile) {
return;
}
if (mView != null) {
String[] split = fileName.split("_");
mView.onDownloadStart(split[split.length - 1]);
}
isStreamingFile = true;
DownloadManager.getInstance().download(App.getInstance().getString(R.string.file_base_url) + fileName, new ProgressResponseListener() {
@Override
public void onResponseProgress(final long bytesRead, final long contentLength, final boolean done) {
if (mView != null) {
TaskExecutor.runTaskOnUiThread(new Runnable() {
@Override
public void run() {
mView.onDownloading(bytesRead, contentLength);
if (done) {
isStreamingFile = false;
mView.onDownloadSuccess();
}
}
});
}
}
@Override
public void onResponseFailed(Throwable throwable) {
isStreamingFile = false;
if (mView != null) {
mView.onDownloadFailed(-1, throwable.getMessage());
}
}
});
}
public void uploadFile(File file) {
if (isStreamingFile) {
return;
}
if (mView != null) {
mView.onUploadStart(file);
}
isStreamingFile = true;
// 开始上传
UploadManager.getInstance().upload(file, new SimpleCallback<String>() {
@Override
public void onSuccess(int statusCode, Result<String> result) {
isStreamingFile = false;
String url = result.body;
if (mView != null) {
mView.onUploadSuccess(url);
}
}
@Override
public void onFailed(int statusCode, Result<String> result) {
isStreamingFile = false;
if (mView != null) {
mView.onUploadFailed(result.code, result.message);
}
}
}, new ProgressRequestListener() {
@Override
public void onRequestProgress(final long bytesWritten, final long contentLength, boolean done) {
if (mView != null) {
TaskExecutor.runTaskOnUiThread(new Runnable() {
@Override
public void run() {
mView.onUploading(bytesWritten, contentLength);
}
});
}
}
});
}
public void updateRoomFile(RoomFiles roomFiles) {
roomService.updateRoomFiles(roomFiles).enqueue(new SimpleCallback<Void>() {
@Override
public void onSuccess(int statusCode, Result<Void> result) {
ToastUtil.toast("更新成功");
}
@Override
public void onFailed(int statusCode, Result<Void> result) {
ToastUtil.toast("更新失败(statusCode:" + statusCode + ";code:" + result.code + ";msg:" + result.message + ")");
}
});
}
public void getRoomFiles(Room room) {
roomService.getRoomFiles(room).enqueue(new SimpleCallback<RoomFiles>() {
@Override
public void onSuccess(int statusCode, Result<RoomFiles> result) {
RoomFiles files = result.body;
if (mView != null) {
mView.onGetRoomFiles(files);
}
}
@Override
public void onFailed(int statusCode, Result<RoomFiles> result) {
ToastUtil.toast("获取文件列表失败");
}
});
}
@Override
public void onDestroy() {
mView = null;
}
}
|
Wimoto/iOS | code/Wimoto/Shared/Classes/SensorDetailsScreens/SensorViewController.h | //
// SensorViewController.h
// Wimoto
//
// Created by MC700 on 12/13/13.
//
//
#define SENSOR_VALUE_PLACEHOLDER @"--"
#import "AppViewController.h"
#import "Sensor.h"
#import "NMRangeSlider.h"
#import "AlarmSlider.h"
#import "LastUpdateLabel.h"
#import <MessageUI/MessageUI.h>
#import <MessageUI/MFMailComposeViewController.h>
@interface SensorViewController : AppViewController <AlarmSliderDelegate, UITextFieldDelegate, SensorDataReadingDelegate, MFMailComposeViewControllerDelegate>
@property (nonatomic, strong) Sensor *sensor;
@property (nonatomic, strong) UISwitch *currentSwitch;
@property (nonatomic, weak) IBOutlet UITextField *sensorNameField;
@property (nonatomic, weak) IBOutlet LastUpdateLabel *lastUpdateLabel;
- (id)initWithSensor:(Sensor *)sensor;
@end
|
rnarla123/aliyun-openapi-java-sdk | aliyun-java-sdk-dts/src/main/java/com/aliyuncs/dts/transform/v20200101/StopDtsJobsResponseUnmarshaller.java | <reponame>rnarla123/aliyun-openapi-java-sdk
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.aliyuncs.dts.transform.v20200101;
import com.aliyuncs.dts.model.v20200101.StopDtsJobsResponse;
import com.aliyuncs.transform.UnmarshallerContext;
public class StopDtsJobsResponseUnmarshaller {
public static StopDtsJobsResponse unmarshall(StopDtsJobsResponse stopDtsJobsResponse, UnmarshallerContext _ctx) {
stopDtsJobsResponse.setRequestId(_ctx.stringValue("StopDtsJobsResponse.RequestId"));
stopDtsJobsResponse.setHttpStatusCode(_ctx.integerValue("StopDtsJobsResponse.HttpStatusCode"));
stopDtsJobsResponse.setErrCode(_ctx.stringValue("StopDtsJobsResponse.ErrCode"));
stopDtsJobsResponse.setSuccess(_ctx.booleanValue("StopDtsJobsResponse.Success"));
stopDtsJobsResponse.setErrMessage(_ctx.stringValue("StopDtsJobsResponse.ErrMessage"));
stopDtsJobsResponse.setDynamicMessage(_ctx.stringValue("StopDtsJobsResponse.DynamicMessage"));
stopDtsJobsResponse.setDynamicCode(_ctx.stringValue("StopDtsJobsResponse.DynamicCode"));
return stopDtsJobsResponse;
}
} |
schregardusc/dishtiny | include/dish2/peripheral/readable_state/writable_state/controller_mapped_state/ResourceSendLimit.hpp | <gh_stars>1-10
#pragma once
#ifndef DISH2_PERIPHERAL_READABLE_STATE_WRITABLE_STATE_CONTROLLER_MAPPED_STATE_RESOURCESENDLIMIT_HPP_INCLUDE
#define DISH2_PERIPHERAL_READABLE_STATE_WRITABLE_STATE_CONTROLLER_MAPPED_STATE_RESOURCESENDLIMIT_HPP_INCLUDE
#include "../../../../../../third-party/conduit/include/uitsl/datastructs/PodLeafNode.hpp"
#include "../../../../../../third-party/conduit/include/uitsl/meta/TypeName.hpp"
namespace dish2 {
struct ResourceSendLimit : public uitsl::PodLeafNode<float> {
// inherit constructors
using parent_t = uitsl::PodLeafNode<float>;
using parent_t::parent_t;
};
} // namespace dish2
namespace uitsl {
UITSL_ENABLE_TYPENAME( dish2::ResourceSendLimit );
} // namespace uitsl
#endif // #ifndef DISH2_PERIPHERAL_READABLE_STATE_WRITABLE_STATE_CONTROLLER_MAPPED_STATE_RESOURCESENDLIMIT_HPP_INCLUDE
|
kupl/starlab-benchmarks | Benchmarks_with_Safety_Bugs/C/recutils-1.8/src/src/rec-sex.c | /* -*- mode: C -*-
*
* File: rec-sex.c
* Date: Sat Jan 9 20:28:43 2010
*
* GNU recutils - Record Selection Expressions.
*
*/
/* Copyright (C) 2010-2019 <NAME> */
/* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#include <config.h>
#include <stdlib.h>
#include <string.h>
#include <regex.h>
#include <parse-datetime.h>
#include <rec.h>
#include <rec-utils.h>
#include <rec-sex-ast.h>
#include <rec-sex-parser.h>
#include <rec-sex-tab.h>
/*
* Data structures
*/
struct rec_sex_s
{
rec_sex_ast_t ast;
rec_sex_parser_t parser;
};
#define REC_SEX_VAL_INT 0
#define REC_SEX_VAL_REAL 1
#define REC_SEX_VAL_STR 2
struct rec_sex_val_s
{
int type;
int int_val;
double real_val;
char *str_val;
};
/* Static functions declarations. */
static struct rec_sex_val_s rec_sex_eval_node (rec_sex_t sex,
rec_record_t record,
rec_sex_ast_node_t node,
bool *status);
static bool rec_sex_op_real_p (struct rec_sex_val_s op1,
struct rec_sex_val_s op2);
/*
* Public functions.
*/
rec_sex_t
rec_sex_new (bool case_insensitive)
{
rec_sex_t new;
new = malloc (sizeof (struct rec_sex_s));
if (new)
{
/* Initialize a new parser. */
new->parser = rec_sex_parser_new ();
rec_sex_parser_set_case_insensitive (new->parser,
case_insensitive);
/* Initialize a new AST. */
new->ast = NULL;
}
return new;
}
void
rec_sex_destroy (rec_sex_t sex)
{
if (sex)
{
if (sex->parser)
{
rec_sex_parser_destroy (sex->parser);
}
if (sex->ast)
{
rec_sex_ast_destroy (sex->ast);
}
free (sex); /* yeah! :D */
}
}
bool
rec_sex_compile (rec_sex_t sex,
const char *expr)
{
bool res;
res = rec_sex_parser_run (sex->parser, expr);
if (res)
{
sex->ast = rec_sex_parser_ast (sex->parser);
}
return res;
}
#define EXEC_AST(RECORD) \
do \
{ \
val = rec_sex_eval_node (sex, \
(RECORD), \
rec_sex_ast_top (sex->ast), \
status); \
\
switch (val.type) \
{ \
case REC_SEX_VAL_INT: \
{ \
res = (val.int_val != 0); \
break; \
} \
case REC_SEX_VAL_REAL: \
case REC_SEX_VAL_STR: \
{ \
res = false; \
break; \
} \
} \
} \
while (0)
char *
rec_sex_eval_str (rec_sex_t sex,
rec_record_t record)
{
char *res;
struct rec_sex_val_s val;
bool status;
rec_sex_ast_node_unfix (rec_sex_ast_top (sex->ast));
val = rec_sex_eval_node (sex,
record,
rec_sex_ast_top (sex->ast),
&status);
if (!status)
{
/* Error evaluating the expression. */
return NULL;
}
res = NULL;
switch (val.type)
{
case REC_SEX_VAL_INT:
{
asprintf (&res, "%d", val.int_val);
break;
}
case REC_SEX_VAL_REAL:
{
asprintf (&res, "%f", val.real_val);
break;
}
case REC_SEX_VAL_STR:
{
res = strdup (val.str_val);
break;
}
}
return res;
}
bool
rec_sex_eval (rec_sex_t sex,
rec_record_t record,
bool *status)
{
bool res;
rec_field_t field;
rec_field_t wfield;
rec_record_t wrec;
rec_mset_iterator_t iter;
int j, nf;
struct rec_sex_val_s val;
res = false;
wrec = NULL;
rec_sex_ast_node_unfix (rec_sex_ast_top (sex->ast));
EXEC_AST (record);
if (res)
{
goto exit;
}
rec_record_reset_marks (record);
iter = rec_mset_iterator (rec_record_mset (record));
while (rec_mset_iterator_next (&iter, MSET_FIELD, (const void**) &field, NULL))
{
nf = rec_record_get_num_fields_by_name (record, rec_field_name (field));
if ((nf > 1)
&& (rec_record_field_mark (record, field) == 0)
&& (rec_sex_ast_name_p (sex->ast, rec_field_name (field), nf))
&& (!rec_sex_ast_hash_name_p (sex->ast, rec_field_name (field))))
{
for (j = 0; j < nf; j++)
{
wfield = rec_record_get_field_by_name (record,
rec_field_name (field),
j);
if (wrec)
{
rec_record_destroy (wrec);
}
rec_record_mark_field (record, wfield, 1);
wrec = rec_record_dup (record);
rec_record_remove_field_by_name (wrec,
rec_field_name (field),
-1); /* Delete all. */
rec_mset_append (rec_record_mset (wrec), MSET_FIELD, (void *) rec_field_dup (wfield), MSET_ANY);
EXEC_AST(wrec);
if (res)
{
rec_record_destroy (wrec);
goto exit;
}
}
}
}
rec_mset_iterator_free (&iter);
exit:
if (!*status)
{
res = false;
}
return res;
}
void
rec_sex_print_ast (rec_sex_t sex)
{
rec_sex_parser_print_ast (sex->parser);
}
/*
* Private functions.
*/
#define GET_CHILD_VAL(DEST,NUM) \
do \
{ \
(DEST) = rec_sex_eval_node (sex, \
record, \
rec_sex_ast_node_child (node, (NUM)), \
status); \
if (!*status) \
{ \
return res; \
} \
} \
while (0)
#define ATOI_VAL(DEST, VAL) \
do \
{ \
switch ((VAL).type) \
{ \
case REC_SEX_VAL_INT: \
{ \
(DEST) = (VAL).int_val; \
break; \
} \
case REC_SEX_VAL_STR: \
{ \
if (strcmp ((VAL).str_val, "") == 0) \
{ \
(DEST) = 0; \
} \
else \
{ \
if (!rec_atoi ((VAL).str_val, &(DEST))) \
{ \
*status = false; \
return res; \
} \
} \
break; \
} \
} \
} \
while (0)
#define ATOD_VAL(DEST, VAL) \
do \
{ \
switch ((VAL).type) \
{ \
case REC_SEX_VAL_REAL: \
{ \
(DEST) = (VAL).real_val; \
break; \
} \
case REC_SEX_VAL_INT: \
{ \
(DEST) = (VAL).int_val; \
break; \
} \
case REC_SEX_VAL_STR: \
{ \
if (strcmp ((VAL).str_val, "") == 0) \
{ \
(DEST) = 0.0; \
} \
else \
{ \
if (!rec_atod ((VAL).str_val, &(DEST))) \
{ \
*status = false; \
return res; \
} \
} \
break; \
} \
} \
} \
while (0)
#define ATOTS_VAL(DEST, VAL) \
do \
{ \
switch ((VAL).type) \
{ \
case REC_SEX_VAL_REAL: \
{ \
*status = false; \
return res; \
break; \
} \
case REC_SEX_VAL_INT: \
{ \
*status = false; \
return res; \
break; \
} \
case REC_SEX_VAL_STR: \
{ \
if (!parse_datetime (&(DEST), (VAL).str_val, NULL))\
{ \
*status = false; \
return res; \
} \
\
break; \
} \
} \
} \
while (0)
struct rec_sex_val_s
rec_sex_eval_node (rec_sex_t sex,
rec_record_t record,
rec_sex_ast_node_t node,
bool *status)
{
struct rec_sex_val_s res = {0, 0, 0, NULL};
struct rec_sex_val_s child_val1 = {0, 0, 0, NULL};
struct rec_sex_val_s child_val2 = {0, 0, 0, NULL};
struct rec_sex_val_s child_val3 = {0, 0, 0, NULL};
*status = true;
switch (rec_sex_ast_node_type (node))
{
case REC_SEX_NOVAL:
{
fprintf (stderr, "Application bug: REC_SEX_NOVAL node found.\nPlease report this!\n");
exit (EXIT_FAILURE);
break;
}
/* Operations. */
case REC_SEX_OP_NEG:
case REC_SEX_OP_ADD:
{
int op1;
int op2;
double op1_real;
double op2_real;
GET_CHILD_VAL (child_val1, 0);
GET_CHILD_VAL (child_val2, 1);
if (rec_sex_op_real_p (child_val1, child_val2))
{
/* Real operation. */
ATOD_VAL (op1_real, child_val1);
ATOD_VAL (op2_real, child_val2);
res.type = REC_SEX_VAL_REAL;
res.real_val = op1_real + op2_real;
}
else
{
/* Integer operation. */
ATOI_VAL (op1, child_val1);
ATOI_VAL (op2, child_val2);
res.type = REC_SEX_VAL_INT;
res.int_val = op1 + op2;
}
break;
}
case REC_SEX_OP_SUB:
{
int op1;
int op2;
double op1_real;
double op2_real;
GET_CHILD_VAL (child_val1, 0);
GET_CHILD_VAL (child_val2, 1);
if (rec_sex_op_real_p (child_val1, child_val2))
{
/* Real operation. */
ATOD_VAL (op1_real, child_val1);
ATOD_VAL (op2_real, child_val2);
res.type = REC_SEX_VAL_REAL;
res.real_val = op1 - op2;
}
else
{
/* Integer operation. */
ATOI_VAL (op1, child_val1);
ATOI_VAL (op2, child_val2);
res.type = REC_SEX_VAL_INT;
res.int_val = op1 - op2;
}
break;
}
case REC_SEX_OP_MUL:
{
int op1;
int op2;
double op1_real;
double op2_real;
GET_CHILD_VAL (child_val1, 0);
GET_CHILD_VAL (child_val2, 1);
if (rec_sex_op_real_p (child_val1, child_val2))
{
/* Real operation. */
ATOD_VAL (op1_real, child_val1);
ATOD_VAL (op2_real, child_val2);
res.type = REC_SEX_VAL_REAL;
res.real_val = op1_real * op2_real;
}
else
{
/* Integer operation. */
ATOI_VAL (op1, child_val1);
ATOI_VAL (op2, child_val2);
res.type = REC_SEX_VAL_INT;
res.int_val = op1 * op2;
}
break;
}
case REC_SEX_OP_DIV:
{
int op1;
int op2;
double op1_real;
double op2_real;
GET_CHILD_VAL (child_val1, 0);
GET_CHILD_VAL (child_val2, 1);
if (rec_sex_op_real_p (child_val1, child_val2))
{
/* Real operation. */
ATOD_VAL (op1_real, child_val1);
ATOD_VAL (op2_real, child_val2);
res.type = REC_SEX_VAL_REAL;
res.real_val = op1_real / op2_real;
}
else
{
/* Integer operation. */
ATOI_VAL (op1, child_val1);
ATOI_VAL (op2, child_val2);
res.type = REC_SEX_VAL_INT;
if (op2 != 0)
{
res.int_val = op1 / op2;
}
else
{
/* Error: division by zero */
*status = false;
return res;
}
}
break;
}
case REC_SEX_OP_MOD:
{
int op1;
int op2;
GET_CHILD_VAL (child_val1, 0);
GET_CHILD_VAL (child_val2, 1);
/* Integer operation. */
ATOI_VAL (op1, child_val1);
ATOI_VAL (op2, child_val2);
res.type = REC_SEX_VAL_INT;
if (op2 != 0)
{
res.int_val = op1 % op2;
}
else
{
/* Error: division by zero */
*status = false;
return res;
}
break;
}
case REC_SEX_OP_EQL:
{
int op1;
int op2;
double op1_real;
double op2_real;
GET_CHILD_VAL (child_val1, 0);
GET_CHILD_VAL (child_val2, 1);
if ((child_val1.type == REC_SEX_VAL_STR)
&& (child_val2.type == REC_SEX_VAL_STR))
{
/* String comparison. */
res.type = REC_SEX_VAL_INT;
if (rec_sex_parser_case_insensitive (sex->parser))
{
res.int_val = (strcasecmp (child_val1.str_val,
child_val2.str_val) == 0);
}
else
{
res.int_val = (strcmp (child_val1.str_val,
child_val2.str_val) == 0);
}
}
else
{
if (rec_sex_op_real_p (child_val1, child_val2))
{
/* Real comparison. */
ATOD_VAL (op1_real, child_val1);
ATOD_VAL (op2_real, child_val2);
res.type = REC_SEX_VAL_INT;
res.int_val = op1_real == op2_real;
}
else
{
/* Integer comparison. */
ATOI_VAL (op1, child_val1);
ATOI_VAL (op2, child_val2);
res.type = REC_SEX_VAL_INT;
res.int_val = op1 == op2;
}
}
break;
}
case REC_SEX_OP_NEQ:
{
int op1;
int op2;
double op1_real;
double op2_real;
GET_CHILD_VAL (child_val1, 0);
GET_CHILD_VAL (child_val2, 1);
if ((child_val1.type == REC_SEX_VAL_STR)
&& (child_val2.type == REC_SEX_VAL_STR))
{
/* String comparison. */
res.type = REC_SEX_VAL_INT;
if (rec_sex_parser_case_insensitive (sex->parser))
{
res.int_val = (strcasecmp (child_val1.str_val,
child_val2.str_val) != 0);
}
else
{
res.int_val = (strcmp (child_val1.str_val,
child_val2.str_val) != 0);
}
}
else
{
if (rec_sex_op_real_p (child_val1, child_val2))
{
/* Real comparison. */
ATOD_VAL (op1_real, child_val1);
ATOD_VAL (op2_real, child_val2);
res.type = REC_SEX_VAL_INT;
res.int_val = op1_real != op2_real;
}
else
{
/* Integer comparison. */
ATOI_VAL (op1, child_val1);
ATOI_VAL (op2, child_val2);
res.type = REC_SEX_VAL_INT;
res.int_val = op1 != op2;
}
}
break;
}
case REC_SEX_OP_MAT:
{
GET_CHILD_VAL (child_val1, 0);
GET_CHILD_VAL (child_val2, 1);
if ((child_val1.type == REC_SEX_VAL_STR)
&& (child_val2.type == REC_SEX_VAL_STR))
{
/* String match. */
res.type = REC_SEX_VAL_INT;
if (rec_sex_parser_case_insensitive (sex->parser))
{
res.int_val =
rec_match_insensitive (child_val1.str_val, child_val2.str_val);
}
else
{
res.int_val =
rec_match (child_val1.str_val, child_val2.str_val);
}
}
else
{
/* Error. */
*status = false;
return res;
}
break;
}
case REC_SEX_OP_BEFORE:
{
struct timespec op1;
struct timespec op2;
struct timespec diff;
GET_CHILD_VAL (child_val1, 0);
GET_CHILD_VAL (child_val2, 1);
ATOTS_VAL (op1, child_val1);
ATOTS_VAL (op2, child_val2);
res.type = REC_SEX_VAL_INT;
res.int_val = rec_timespec_subtract (&diff, &op1, &op2);
break;
}
case REC_SEX_OP_AFTER:
{
struct timespec op1;
struct timespec op2;
struct timespec diff;
GET_CHILD_VAL (child_val1, 0);
GET_CHILD_VAL (child_val2, 1);
ATOTS_VAL (op1, child_val1);
ATOTS_VAL (op2, child_val2);
res.type = REC_SEX_VAL_INT;
res.int_val = (!rec_timespec_subtract (&diff, &op1, &op2)
&& ((diff.tv_sec != 0) || (diff.tv_nsec != 0)));
break;
}
case REC_SEX_OP_SAMETIME:
{
struct timespec op1;
struct timespec op2;
struct timespec diff;
GET_CHILD_VAL (child_val1, 0);
GET_CHILD_VAL (child_val2, 1);
ATOTS_VAL (op1, child_val1);
ATOTS_VAL (op2, child_val2);
rec_timespec_subtract (&diff, &op1, &op2);
res.type = REC_SEX_VAL_INT;
res.int_val = ((diff.tv_sec == 0) && (diff.tv_nsec == 0));
break;
}
case REC_SEX_OP_IMPLIES:
{
int op1;
int op2;
GET_CHILD_VAL (child_val1, 0);
GET_CHILD_VAL (child_val2, 1);
ATOI_VAL (op1, child_val1);
ATOI_VAL (op2, child_val2);
res.type = REC_SEX_VAL_INT;
res.int_val = !op1 || (op1 && op2);
break;
}
case REC_SEX_OP_LT:
case REC_SEX_OP_LTE:
{
int op1;
int op2;
double op1_real;
double op2_real;
GET_CHILD_VAL (child_val1, 0);
GET_CHILD_VAL (child_val2, 1);
if (rec_sex_op_real_p (child_val1, child_val2))
{
/* Real comparison. */
ATOD_VAL (op1_real, child_val1);
ATOD_VAL (op2_real, child_val2);
res.type = REC_SEX_VAL_INT;
if (rec_sex_ast_node_type (node) == REC_SEX_OP_LT)
{
res.int_val = op1_real < op2_real;
}
else
{
res.int_val = op1_real <= op2_real;
}
}
else
{
/* Integer comparison. */
ATOI_VAL (op1, child_val1);
ATOI_VAL (op2, child_val2);
res.type = REC_SEX_VAL_INT;
if (rec_sex_ast_node_type (node) == REC_SEX_OP_LT)
{
res.int_val = op1 < op2;
}
else
{
res.int_val = op1 <= op2;
}
}
break;
}
case REC_SEX_OP_GT:
case REC_SEX_OP_GTE:
{
int op1;
int op2;
double op1_real;
double op2_real;
GET_CHILD_VAL (child_val1, 0);
GET_CHILD_VAL (child_val2, 1);
if (rec_sex_op_real_p (child_val1, child_val2))
{
/* Real comparison. */
ATOD_VAL (op1_real, child_val1);
ATOD_VAL (op2_real, child_val2);
res.type = REC_SEX_VAL_INT;
if (rec_sex_ast_node_type (node) == REC_SEX_OP_GT)
{
res.int_val = op1_real > op2_real;
}
else
{
res.int_val = op1_real >= op2_real;
}
}
else
{
/* Integer comparison. */
ATOI_VAL (op1, child_val1);
ATOI_VAL (op2, child_val2);
res.type = REC_SEX_VAL_INT;
if (rec_sex_ast_node_type (node) == REC_SEX_OP_GT)
{
res.int_val = op1 > op2;
}
else
{
res.int_val = op1 >= op2;
}
}
break;
}
case REC_SEX_OP_AND:
{
int op1;
int op2;
GET_CHILD_VAL (child_val1, 0);
GET_CHILD_VAL (child_val2, 1);
ATOI_VAL (op1, child_val1);
ATOI_VAL (op2, child_val2);
res.type = REC_SEX_VAL_INT;
res.int_val = op1 && op2;
break;
}
case REC_SEX_OP_OR:
{
int op1;
int op2;
GET_CHILD_VAL (child_val1, 0);
GET_CHILD_VAL (child_val2, 1);
ATOI_VAL (op1, child_val1);
ATOI_VAL (op2, child_val2);
res.type = REC_SEX_VAL_INT;
res.int_val = op1 || op2;
break;
}
case REC_SEX_OP_CONCAT:
{
size_t str1_size;
size_t str2_size;
GET_CHILD_VAL (child_val1, 0);
GET_CHILD_VAL (child_val2, 1);
if ((child_val1.type == REC_SEX_VAL_STR)
&& (child_val2.type == REC_SEX_VAL_STR))
{
str1_size = strlen (child_val1.str_val);
str2_size = strlen (child_val2.str_val);
res.type = REC_SEX_VAL_STR;
res.str_val = malloc (str1_size + str2_size + 1);
memcpy (res.str_val, child_val1.str_val, str1_size);
memcpy (res.str_val + str1_size, child_val2.str_val, str2_size);
res.str_val[str1_size + str2_size] = '\0';
}
else
{
*status = false;
return res;
}
break;
}
case REC_SEX_OP_NOT:
{
int op;
GET_CHILD_VAL (child_val1, 0);
ATOI_VAL (op, child_val1);
res.type = REC_SEX_VAL_INT;
res.int_val = !op;
break;
}
case REC_SEX_OP_SHA:
{
int n;
const char *field_name = NULL;
const char *field_subname = NULL;
rec_sex_ast_node_t child;
/* The child should be a Name. */
child = rec_sex_ast_node_child (node, 0);
if (rec_sex_ast_node_type (rec_sex_ast_node_child(node, 0))
!= REC_SEX_NAME)
{
*status = false;
return res;
}
field_name = rec_sex_ast_node_name (child);
field_subname = rec_sex_ast_node_subname (child);
if (field_subname)
{
/* Compound a field name from the name/subname pair in the
AST node. */
char *effective_name
= rec_concat_strings (field_name, "_", field_subname);
n = rec_record_get_num_fields_by_name (record,
effective_name);
free (effective_name);
}
else
{
n = rec_record_get_num_fields_by_name (record, field_name);
}
res.type = REC_SEX_VAL_INT;
res.int_val = n;
break;
}
case REC_SEX_OP_COND:
{
int op1;
GET_CHILD_VAL (child_val1, 0);
GET_CHILD_VAL (child_val2, 1);
GET_CHILD_VAL (child_val3, 2);
/* Get the boolean value of the first operand. */
ATOI_VAL (op1, child_val1);
/* Return the first or the second operand, depending on the
value of op1. */
if (op1)
{
res = child_val2;
}
else
{
res = child_val3;
}
break;
}
/* Values. */
case REC_SEX_INT:
{
res.type = REC_SEX_VAL_INT;
res.int_val = rec_sex_ast_node_int (node);
break;
}
case REC_SEX_REAL:
{
res.type = REC_SEX_VAL_REAL;
res.real_val = rec_sex_ast_node_real (node);
break;
}
case REC_SEX_STR:
{
res.type = REC_SEX_VAL_STR;
res.str_val = rec_sex_ast_node_str (node);
break;
}
case REC_SEX_NAME:
{
rec_field_t field;
const char *field_name;
const char *field_subname;
int index;
bool tofix;
if (rec_sex_ast_node_fixed (node))
{
res.type = REC_SEX_VAL_STR;
res.str_val = rec_sex_ast_node_fixed_val (node);
}
else
{
field_name = rec_sex_ast_node_name (node);
field_subname = rec_sex_ast_node_subname (node);
index = rec_sex_ast_node_index (node);
tofix = (index != -1);
if (index == -1)
{
index = 0;
}
/* If there is a subname then the effective field name is
the concatenation of the name and the subname separated
by a '_' character. Otherwise it is just the name. */
{
if (field_subname)
{
char *effective_field_name = malloc (sizeof (char) *
(strlen (field_name) + strlen (field_subname) + 2));
memcpy (effective_field_name, field_name, strlen(field_name));
effective_field_name[strlen(field_name)] = '_';
memcpy (effective_field_name + strlen(field_name) + 1, field_subname, strlen(field_subname) + 1);
field = rec_record_get_field_by_name (record, effective_field_name, index);
}
else
{
field = rec_record_get_field_by_name (record, field_name, index);
}
}
res.type = REC_SEX_VAL_STR;
if (field)
{
res.str_val = strdup (rec_field_value (field));
}
else
{
/* No field => "" */
res.str_val = "";
}
if (tofix)
{
/* Make this node fixed. */
rec_sex_ast_node_fix (node, res.str_val);
}
}
break;
}
}
return res;
}
static bool
rec_sex_op_real_p (struct rec_sex_val_s op1,
struct rec_sex_val_s op2)
{
bool ret;
int integer;
double real;
ret = true;
if ((op1.type == REC_SEX_VAL_INT)
|| ((op1.type == REC_SEX_VAL_STR)
&& rec_atoi (op1.str_val, &integer)))
{
/* Operand 1 is an integer. */
switch (op2.type)
{
case REC_SEX_VAL_INT:
{
ret = false;
break;
}
case REC_SEX_VAL_REAL:
{
ret = true;
break;
}
case REC_SEX_VAL_STR:
{
ret = (rec_atod (op2.str_val, &real)
&& (!rec_atoi (op2.str_val, &integer)));
break;
}
default:
{
ret = false;
break;
}
}
}
if ((op1.type == REC_SEX_VAL_REAL)
|| ((op1.type == REC_SEX_VAL_STR)
&& rec_atod (op1.str_val, &real)
&& (!rec_atoi (op1.str_val, &integer))))
{
/* Operand 1 is a real. */
switch (op2.type)
{
case REC_SEX_VAL_INT:
{
ret = true;
break;
}
case REC_SEX_VAL_REAL:
{
ret = true;
break;
}
case REC_SEX_VAL_STR:
{
ret = rec_atod (op2.str_val, &real);
break;
}
default:
{
ret = false;
break;
}
}
}
return ret;
}
/* End of rec-sex.c */
|
manuelperuzzi/scafi | simulator-gui/src/main/scala/it/unibo/scafi/simulation/gui/view/GuiNodeListeners.scala | /*
* Copyright (C) 2016-2017, <NAME>, <NAME>, and contributors.
* See the LICENCE.txt file distributed with this work for additional
* information regarding copyright ownership.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package it.unibo.scafi.simulation.gui.view
import it.unibo.scafi.simulation.gui.controller.Controller
import java.awt._
import java.awt.event.MouseAdapter
import java.awt.event.MouseEvent
/**
* This class represent an ActionListener for a GuiNode
*/
class GuiNodeListeners private[view](val node: GuiNode) extends MouseAdapter {
final private val controller: Controller = Controller.getInstance
final private val p: Point = new Point
override def mouseClicked(e: MouseEvent) {
super.mouseClicked(e)
if (e.getButton == MouseEvent.BUTTON3) {
repositionsInfoPanel()
}
}
override def mousePressed(e: MouseEvent) {
super.mousePressed(e)
if (!e.isMetaDown) {
p.x = e.getX
p.y = e.getY
}
}
override def mouseDragged(e: MouseEvent) {
super.mouseDragged(e)
if (!e.isMetaDown) {
val pos: Point = node.getLocation
node.setNodeLocation(pos.x + e.getX - p.x, pos.y + e.getY - p.y)
if (node.getInfoPanel != null) {
repositionsInfoPanel()
}
controller.moveNode(node, pos)
}
}
private def repositionsInfoPanel() {
controller.showInfoPanel(node, true)
val infoP: NodeInfoPanel = node.getInfoPanel
val screen: Dimension = Toolkit.getDefaultToolkit.getScreenSize
if (node.getX > screen.getWidth / 2) {
infoP.setLocation(node.getX - infoP.getSize().width, node.getY)
}
else {
infoP.setLocation(node.getX + node.getSize().width, node.getY)
}
if (node.getY > (screen.getHeight / 1.5)) {
infoP.setLocation(node.getX, node.getY - infoP.getHeight)
}
}
}
|
picatrix1899/barghos | barghos-core/src/main/java/org/barghos/core/api/event/CancellableEvent.java | package org.barghos.core.api.event;
/**
* This interface represents a cancellable event within the event system.
*
* @author picatrix1899
*/
public interface CancellableEvent extends Event
{
/**
* Sets the cancelleation status of this event.
*
* @param isCancelled Setting this to true will mark the event as cancelled.
* Setting this to false on an already cancelled event might uncancel it.
* This depends on the implementation.
*/
public void setCancelled(boolean isCancelled);
/**
* Returns the cancellation status of this event.
*
* @return True if the event is marked as cancelled. Otherwise false.
*/
public boolean isCancelled();
} |
lixiaoyuner/bym | tyadmin/node_modules/@umijs/plugin-ui-blocks/src/sdk/flagBabelPlugin/fixtures/pro-table/expected.js | import { DownOutlined, PlusOutlined } from '@ant-design/icons';
import { Button, Divider, Dropdown, Menu, message } from 'antd';
import React, { useState, useRef } from 'react';
import { PageHeaderWrapper } from '@ant-design/pro-layout';
import ProTable, { ProColumns, ActionType } from '@ant-design/pro-table';
import { SorterResult } from 'antd/es/table/interface';
import CreateForm from './components/CreateForm';
import UpdateForm, { FormValueType } from './components/UpdateForm';
import { TableListItem } from './data.d';
import { queryRule, updateRule, addRule, removeRule } from './service';
/**
* 添加节点
* @param fields
*/
const handleAdd = async (fields: TableListItem) => {
const hide = message.loading('正在添加');
try {
await addRule({ ...fields });
hide();
message.success('添加成功');
return true;
} catch (error) {
hide();
message.error('添加失败请重试!');
return false;
}
};
/**
* 更新节点
* @param fields
*/
const handleUpdate = async (fields: FormValueType) => {
const hide = message.loading('正在配置');
try {
await updateRule({
name: fields.name,
desc: fields.desc,
key: fields.key,
});
hide();
message.success('配置成功');
return true;
} catch (error) {
hide();
message.error('配置失败请重试!');
return false;
}
};
/**
* 删除节点
* @param selectedRows
*/
const handleRemove = async (selectedRows: TableListItem[]) => {
const hide = message.loading('正在删除');
if (!selectedRows) return true;
try {
await removeRule({
key: selectedRows.map((row) => row.key),
});
hide();
message.success('删除成功,即将刷新');
return true;
} catch (error) {
hide();
message.error('删除失败,请重试');
return false;
}
};
const TableList: React.FC<{}> = () => {
const [sorter, setSorter] = useState<string>('');
const [createModalVisible, handleModalVisible] = useState<boolean>(false);
const [updateModalVisible, handleUpdateModalVisible] = useState<boolean>(false);
const [stepFormValues, setStepFormValues] = useState({});
const actionRef = useRef<ActionType>();
const columns: ProColumns<TableListItem>[] = [
{
title: '规则名称',
dataIndex: 'name',
rules: [
{
required: true,
message: '规则名称为必填项',
},
],
},
{
title: '描述',
dataIndex: 'desc',
valueType: 'textarea',
},
{
title: '服务调用次数',
dataIndex: 'callNo',
sorter: true,
hideInForm: true,
renderText: (val: string) => `${val} 万`,
},
{
title: '状态',
dataIndex: 'status',
hideInForm: true,
valueEnum: {
0: { text: '关闭', status: 'Default' },
1: { text: '运行中', status: 'Processing' },
2: { text: '已上线', status: 'Success' },
3: { text: '异常', status: 'Error' },
},
},
{
title: '上次调度时间',
dataIndex: 'updatedAt',
sorter: true,
valueType: 'dateTime',
hideInForm: true,
},
{
title: '操作',
dataIndex: 'option',
valueType: 'option',
render: (_, record) => (
<>
<a
onClick={() => {
handleUpdateModalVisible(true);
setStepFormValues(record);
}}
>
配置
</a>
<Divider type="vertical" />
<a href="">订阅警报</a>
</>
),
},
];
return (
<PageHeaderWrapper>
<GUmiUIFlag filename="/tmp/pages/origin.tsx" index="0" />
<ProTable<TableListItem>
headerTitle="查询表格"
actionRef={actionRef}
rowKey="key"
onChange={(_, _filter, _sorter) => {
const sorterResult = _sorter as SorterResult<TableListItem>;
if (sorterResult.field) {
setSorter(`${sorterResult.field}_${sorterResult.order}`);
}
}}
params={{
sorter,
}}
toolBarRender={(action, { selectedRows }) => [
<Button type="primary" onClick={() => handleModalVisible(true)}>
<PlusOutlined /> 新建
</Button>,
selectedRows && selectedRows.length > 0 && (
<Dropdown
overlay={
<Menu
onClick={async (e) => {
if (e.key === 'remove') {
await handleRemove(selectedRows);
action.reload();
}
}}
selectedKeys={[]}
>
<Menu.Item key="remove">批量删除</Menu.Item>
<Menu.Item key="approval">批量审批</Menu.Item>
</Menu>
}
>
<Button>
批量操作 <DownOutlined />
</Button>
</Dropdown>
),
]}
tableAlertRender={(selectedRowKeys, selectedRows) => (
<div>
已选择 <a style={{ fontWeight: 600 }}>{selectedRowKeys.length}</a> 项
<span>
服务调用次数总计 {selectedRows.reduce((pre, item) => pre + item.callNo, 0)} 万
</span>
</div>
)}
request={(params) => queryRule(params)}
columns={columns}
rowSelection={{}}
/>
<GUmiUIFlag filename="/tmp/pages/origin.tsx" index="1" />
<CreateForm onCancel={() => handleModalVisible(false)} modalVisible={createModalVisible}>
<ProTable<TableListItem, TableListItem>
onSubmit={async (value) => {
const success = await handleAdd(value);
if (success) {
handleModalVisible(false);
if (actionRef.current) {
actionRef.current.reload();
}
}
}}
rowKey="key"
type="form"
columns={columns}
rowSelection={{}}
/>
</CreateForm>
<GUmiUIFlag filename="/tmp/pages/origin.tsx" index="2" />
{stepFormValues && Object.keys(stepFormValues).length ? (
<UpdateForm
onSubmit={async (value) => {
const success = await handleUpdate(value);
if (success) {
handleUpdateModalVisible(false);
setStepFormValues({});
if (actionRef.current) {
actionRef.current.reload();
}
}
}}
onCancel={() => {
handleUpdateModalVisible(false);
setStepFormValues({});
}}
updateModalVisible={updateModalVisible}
values={stepFormValues}
/>
) : null}
</PageHeaderWrapper>
);
};
export default TableList;
|
avdv/daml | ledger/ledger-api-tests/suites/src/main/scala/com/daml/ledger/api/testtool/suites/v1_8/CompletionDeduplicationInfoIT.scala | <filename>ledger/ledger-api-tests/suites/src/main/scala/com/daml/ledger/api/testtool/suites/v1_8/CompletionDeduplicationInfoIT.scala<gh_stars>0
// Copyright (c) 2022 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.testtool.suites.v1_8
import com.daml.ledger.api.SubmissionIdGenerator
import com.daml.ledger.api.testtool.infrastructure.Allocation._
import com.daml.ledger.api.testtool.infrastructure.Assertions.assertDefined
import com.daml.ledger.api.testtool.infrastructure.LedgerTestSuite
import com.daml.ledger.api.testtool.infrastructure.participant.ParticipantTestContext
import com.daml.ledger.api.testtool.suites.v1_8.CompletionDeduplicationInfoIT._
import com.daml.ledger.api.v1.command_service.SubmitAndWaitRequest
import com.daml.ledger.api.v1.command_submission_service.SubmitRequest
import com.daml.ledger.api.v1.commands.Command
import com.daml.ledger.api.v1.completion.Completion
import com.daml.ledger.api.v1.ledger_offset.LedgerOffset
import com.daml.ledger.client.binding
import com.daml.ledger.client.binding.Primitive
import com.daml.ledger.test.model.Test.Dummy
import com.daml.lf.data.Ref
import com.daml.lf.data.Ref.SubmissionId
import com.daml.platform.testing.WithTimeout
import io.grpc.Status
import scala.concurrent.duration.DurationInt
import scala.concurrent.{ExecutionContext, Future}
final class CompletionDeduplicationInfoIT[ServiceRequest](
service: Service[ServiceRequest]
) extends LedgerTestSuite {
private val serviceName: String = service.productPrefix
override private[testtool] def name = super.name + serviceName
test(
shortIdentifier = s"CCDIIncludeDedupInfo$serviceName",
description = s"Deduplication information is preserved in completions ($serviceName)",
allocate(SingleParty),
)(implicit ec => { case Participants(Participant(ledger, party)) =>
val requestWithoutSubmissionId = service.buildRequest(ledger, party)
val requestWithSubmissionId = service.buildRequest(ledger, party, Some(RandomSubmissionId))
for {
optNoDeduplicationSubmittedCompletion <- service.submitRequest(
ledger,
party,
requestWithoutSubmissionId,
)
optSubmissionIdSubmittedCompletion <- service
.submitRequest(ledger, party, requestWithSubmissionId)
} yield {
assertApplicationIdIsPreserved(ledger.applicationId, optNoDeduplicationSubmittedCompletion)
service.assertCompletion(optNoDeduplicationSubmittedCompletion)
assertDeduplicationPeriodIsReported(optNoDeduplicationSubmittedCompletion)
assertSubmissionIdIsPreserved(optSubmissionIdSubmittedCompletion, RandomSubmissionId)
}
})
}
private[testtool] object CompletionDeduplicationInfoIT {
private[testtool] sealed trait Service[ProtoRequestType] extends Serializable with Product {
def buildRequest(
ledger: ParticipantTestContext,
party: Primitive.Party,
optSubmissionId: Option[Ref.SubmissionId] = None,
): ProtoRequestType
def submitRequest(
ledger: ParticipantTestContext,
party: Primitive.Party,
request: ProtoRequestType,
)(implicit ec: ExecutionContext): Future[Option[Completion]]
def assertCompletion(optCompletion: Option[Completion]): Unit
}
case object CommandService extends Service[SubmitAndWaitRequest] {
override def buildRequest(
ledger: ParticipantTestContext,
party: binding.Primitive.Party,
optSubmissionId: Option[SubmissionId],
): SubmitAndWaitRequest = {
val request = ledger.submitAndWaitRequest(party, simpleCreate(party))
optSubmissionId
.map { submissionId =>
request.update(_.commands.submissionId := submissionId)
}
.getOrElse(request)
}
override def submitRequest(
ledger: ParticipantTestContext,
party: binding.Primitive.Party,
request: SubmitAndWaitRequest,
)(implicit ec: ExecutionContext): Future[Option[Completion]] =
for {
offset <- ledger.currentEnd()
_ <- ledger.submitAndWait(request)
completion <- singleCompletionAfterOffset(ledger, party, offset)
} yield completion
override def assertCompletion(optCompletion: Option[Completion]): Unit = {
val completion = assertDefined(optCompletion, "No completion has been produced")
assert(completion.status.forall(_.code == Status.Code.OK.value()))
assert(
Ref.SubmissionId.fromString(completion.submissionId).isRight,
"Missing or invalid submission ID in completion",
)
}
}
case object CommandSubmissionService extends Service[SubmitRequest] {
override def buildRequest(
ledger: ParticipantTestContext,
party: binding.Primitive.Party,
optSubmissionId: Option[SubmissionId],
): SubmitRequest = {
val request = ledger.submitRequest(party, simpleCreate(party))
optSubmissionId
.map { submissionId =>
request.update(_.commands.submissionId := submissionId)
}
.getOrElse(request)
}
override def submitRequest(
ledger: ParticipantTestContext,
party: binding.Primitive.Party,
request: SubmitRequest,
)(implicit ec: ExecutionContext): Future[Option[Completion]] =
for {
offset <- ledger.currentEnd()
_ <- ledger.submit(request)
completion <- singleCompletionAfterOffset(ledger, party, offset)
} yield completion
override def assertCompletion(optCompletion: Option[Completion]): Unit = {
val completion = assertDefined(optCompletion, "No completion has been produced")
assert(completion.status.forall(_.code == Status.Code.OK.value()))
}
}
private def singleCompletionAfterOffset(
ledger: ParticipantTestContext,
party: binding.Primitive.Party,
offset: LedgerOffset,
)(implicit ec: ExecutionContext): Future[Option[Completion]] =
WithTimeout(5.seconds)(
ledger
.findCompletion(ledger.completionStreamRequest(offset)(party))(_ => true)
.map(_.map(_.completion))
)
private def assertSubmissionIdIsPreserved(
optCompletion: Option[Completion],
requestedSubmissionId: Ref.SubmissionId,
): Unit = {
val submissionIdCompletion = assertDefined(optCompletion, "No completion has been produced")
val actualSubmissionId = submissionIdCompletion.submissionId
assert(submissionIdCompletion.status.forall(_.code == Status.Code.OK.value()))
assert(
actualSubmissionId == requestedSubmissionId,
"Wrong submission ID in completion, " +
s"expected: $requestedSubmissionId, actual: $actualSubmissionId",
)
}
private def assertDeduplicationPeriodIsReported(
optCompletion: Option[Completion]
): Unit = {
val completion = assertDefined(optCompletion, "No completion has been produced")
assert(completion.status.forall(_.code == Status.Code.OK.value()))
assert(completion.deduplicationPeriod.isDefined, "The deduplication period was not reported")
}
private def assertApplicationIdIsPreserved(
requestedApplicationId: String,
optCompletion: Option[Completion],
): Unit = {
val expectedApplicationId = requestedApplicationId
assertDefined(optCompletion, "No completion has been produced")
val applicationIdCompletion = optCompletion.get
assert(applicationIdCompletion.status.forall(_.code == Status.Code.OK.value()))
val actualApplicationId = applicationIdCompletion.applicationId
assert(
Ref.ApplicationId.fromString(actualApplicationId).contains(expectedApplicationId),
"Wrong application ID in completion, " +
s"expected: $expectedApplicationId, actual: $actualApplicationId",
)
}
private def simpleCreate(party: Primitive.Party): Command = Dummy(party).create.command
private val RandomSubmissionId =
Ref.SubmissionId.assertFromString(SubmissionIdGenerator.Random.generate())
}
|
Schinzel/basic-utils | src/main/java/io/schinzel/basicutils/UTF8.java | <reponame>Schinzel/basic-utils<filename>src/main/java/io/schinzel/basicutils/UTF8.java
package io.schinzel.basicutils;
import java.nio.charset.StandardCharsets;
/**
* The purpose of this class is to convert strings into UTF-8 byte arrays and vice versa.
* Created by schinzel on 2017-04-30.
*/
public class UTF8 {
/**
* Package private constructor as should not be instantiated.
*/
UTF8() {
throw new RuntimeException(this.getClass().getSimpleName() + " should not be instantiated. " +
"Use the static methods.");
}
/**
* @param string The string to encode.
* @return The argument string as UTF-8 byte array.
*/
public static byte[] getBytes(String string) {
return (string == null) ? null : string.getBytes(StandardCharsets.UTF_8);
}
/**
* @param bytes The bytes to decode.
* @return The argument UTF-8 encoded bytes as a string.
*/
public static String getString(byte[] bytes) {
return (bytes == null) ? null : new String(bytes, StandardCharsets.UTF_8);
}
}
|
lazycathome/study | collection/encrypt/src/main/java/cn/bigdb/dh/DHJDK.java | <reponame>lazycathome/study<filename>collection/encrypt/src/main/java/cn/bigdb/dh/DHJDK.java
package cn.bigdb.dh;
import java.io.UnsupportedEncodingException;
import java.security.InvalidAlgorithmParameterException;
import java.security.InvalidKeyException;
import java.security.Key;
import java.security.KeyFactory;
import java.security.KeyPair;
import java.security.KeyPairGenerator;
import java.security.NoSuchAlgorithmException;
import java.security.PrivateKey;
import java.security.PublicKey;
import java.security.spec.InvalidKeySpecException;
import java.security.spec.PKCS8EncodedKeySpec;
import java.security.spec.X509EncodedKeySpec;
import javax.crypto.BadPaddingException;
import javax.crypto.Cipher;
import javax.crypto.IllegalBlockSizeException;
import javax.crypto.KeyAgreement;
import javax.crypto.NoSuchPaddingException;
import javax.crypto.interfaces.DHPublicKey;
import javax.crypto.spec.DHParameterSpec;
import javax.crypto.spec.SecretKeySpec;
import org.apache.commons.codec.binary.Base64;
/**
* 基于JDK的DH算法,工作模式采用ECB
*/
public class DHJDK {
private static final String ENCODING = "UTF-8";
private static final String FDC_KEY_ALGORITHM = "DH";//非对称加密密钥算法
private static final String DC_KEY_ALGORITHM = "AES";//产生本地密钥的算法(对称加密密钥算法)
private static final String CIPHER_ALGORITHM = "AES/ECB/PKCS5Padding";//加解密算法 格式:算法/工作模式/填充模式 注意:ECB不使用IV参数
private static final int FDC_KEY_SIZE = 512;//非对称密钥长度(512~1024之间的64的整数倍)
/**
* 生成甲方密钥对
*/
public static KeyPair initKey() throws NoSuchAlgorithmException{
KeyPairGenerator keyPairGenerator = KeyPairGenerator.getInstance(FDC_KEY_ALGORITHM);//密钥对生成器
keyPairGenerator.initialize(FDC_KEY_SIZE);//指定密钥长度
KeyPair keyPair = keyPairGenerator.generateKeyPair();//生成密钥对
return keyPair;
}
/**
* 生成乙方密钥对
* @param key 甲方公钥
*/
public static KeyPair initKey(byte[] key) throws NoSuchAlgorithmException,
InvalidKeySpecException,
InvalidAlgorithmParameterException{
KeyFactory keyFactory = KeyFactory.getInstance(FDC_KEY_ALGORITHM);//密钥工厂
PublicKey publicKey = keyFactory.generatePublic(new X509EncodedKeySpec(key));//还原甲方公钥
DHParameterSpec dHParameterSpec = ((DHPublicKey)publicKey).getParams();
KeyPairGenerator keyPairGenerator = KeyPairGenerator.getInstance(keyFactory.getAlgorithm());//乙方密钥对生成器
keyPairGenerator.initialize(dHParameterSpec);//使用甲方公钥参数初始化乙方密钥对生成器
KeyPair keyPair = keyPairGenerator.generateKeyPair();//生成密钥对
return keyPair;
}
/**
* DH加密
* @param data 带加密数据
* @param keyByte 本地密钥,由getSecretKey(byte[] publicKey, byte[] privateKey)产生
*/
public static byte[] encrypt(String data, byte[] keyByte) throws NoSuchAlgorithmException,
NoSuchPaddingException,
InvalidKeyException,
IllegalBlockSizeException,
BadPaddingException,
UnsupportedEncodingException {
Key key = new SecretKeySpec(keyByte, DC_KEY_ALGORITHM);//生成本地密钥
Cipher cipher = Cipher.getInstance(CIPHER_ALGORITHM);
cipher.init(Cipher.ENCRYPT_MODE, key);//设置加密模式并且初始化key
return cipher.doFinal(data.getBytes(ENCODING));
}
/**
* DH解密
* @param data 待解密数据为字节数组
* @param keyByte 本地密钥,由getSecretKey(byte[] publicKey, byte[] privateKey)产生
*/
public static byte[] decrypt(byte[] data, byte[] keyByte) throws NoSuchAlgorithmException,
NoSuchPaddingException,
InvalidKeyException,
IllegalBlockSizeException,
BadPaddingException {
Key key = new SecretKeySpec(keyByte, DC_KEY_ALGORITHM);//生成本地密钥
Cipher cipher = Cipher.getInstance(CIPHER_ALGORITHM);
cipher.init(Cipher.DECRYPT_MODE, key);
return cipher.doFinal(data);
}
/**
* 根据本方私钥与对方公钥构建本地密钥(即对称加密的密钥)
* @param publicKey 对方公钥
* @param privateKey 本方私钥
*/
public static byte[] getSecretKey(byte[] publicKey, byte[] privateKey) throws NoSuchAlgorithmException,
InvalidKeySpecException,
InvalidKeyException{
KeyFactory keyFactory = KeyFactory.getInstance(FDC_KEY_ALGORITHM);//密钥工厂
PublicKey pubkey = keyFactory.generatePublic(new X509EncodedKeySpec(publicKey));//还原公钥
PrivateKey prikey = keyFactory.generatePrivate(new PKCS8EncodedKeySpec(privateKey));//还原私钥
KeyAgreement keyAgreement = KeyAgreement.getInstance(keyFactory.getAlgorithm());
keyAgreement.init(prikey);
keyAgreement.doPhase(pubkey, true);
return keyAgreement.generateSecret(DC_KEY_ALGORITHM).getEncoded();//生成本地密钥(对称加密的密钥)
}
/**
* 获取公钥
*/
public static byte[] getPublicKey(KeyPair keyPair){
return keyPair.getPublic().getEncoded();
}
/**
* 获取私钥
*/
public static byte[] getPrivateKey(KeyPair keyPair){
return keyPair.getPrivate().getEncoded();
}
/**
* 测试
*/
public static void main(String[] args) throws NoSuchAlgorithmException,
InvalidKeySpecException,
InvalidAlgorithmParameterException,
InvalidKeyException,
NoSuchPaddingException,
IllegalBlockSizeException,
BadPaddingException,
UnsupportedEncodingException {
byte[] pubKey1;//甲方公钥
byte[] priKey1;//甲方私钥
byte[] key1;//甲方本地密钥
byte[] pubKey2;//乙方公钥
byte[] priKey2;//乙方私钥
byte[] key2;//乙方本地密钥
/*********************测试是否可以正确生成以上6个key,以及key1与key2是否相等*********************/
KeyPair keyPair1 = DHJDK.initKey();//生成甲方密钥对
pubKey1 = DHJDK.getPublicKey(keyPair1);
priKey1 = DHJDK.getPrivateKey(keyPair1);
KeyPair keyPair2 = DHJDK.initKey(pubKey1);//根据甲方公钥生成乙方密钥对
pubKey2 = DHJDK.getPublicKey(keyPair2);
priKey2 = DHJDK.getPrivateKey(keyPair2);
key1 = DHJDK.getSecretKey(pubKey2, priKey1);//使用对方公钥和自己私钥构建本地密钥
key2 = DHJDK.getSecretKey(pubKey1, priKey2);//使用对方公钥和自己私钥构建本地密钥
System.out.println("甲方公钥pubKey1-->"+Base64.encodeBase64String(pubKey1)+"@@pubKey1.length-->"+pubKey1.length);
System.out.println("甲方私钥priKey1-->"+Base64.encodeBase64String(priKey1)+"@@priKey1.length-->"+priKey1.length);
System.out.println("乙方公钥pubKey2-->"+Base64.encodeBase64String(pubKey2)+"@@pubKey2.length-->"+pubKey2.length);
System.out.println("乙方私钥priKey2-->"+Base64.encodeBase64String(priKey2)+"@@priKey2.length-->"+priKey2.length);
System.out.println("甲方密钥key1-->"+Base64.encodeBase64String(key1));
System.out.println("乙方密钥key2-->"+Base64.encodeBase64String(key2));
/*********************测试甲方使用本地密钥加密数据向乙方发送,乙方使用本地密钥解密数据*********************/
System.out.println("甲方-->乙方");
String data = "找一个好姑娘啊!";
byte[] encodeStr = DHJDK.encrypt(data, key1);
System.out.println("甲方加密后的数据-->"+Base64.encodeBase64String(encodeStr));
byte[] decodeStr = DHJDK.decrypt(encodeStr, key2);
System.out.println("乙方解密后的数据-->"+new String(decodeStr,"UTF-8"));
/*********************测试乙方使用本地密钥加密数据向甲方发送,甲方使用本地密钥解密数据*********************/
System.out.println("乙方-->甲方");
String data2 = "找一个好姑娘啊!";
byte[] encodeStr2 = DHJDK.encrypt(data2, key2);
System.out.println("乙方加密后的数据-->"+Base64.encodeBase64String(encodeStr2));
byte[] decodeStr2 = DHJDK.decrypt(encodeStr, key1);
System.out.println("甲方解密后的数据-->"+new String(decodeStr2,"UTF-8"));
}
} |
bonybody/2020_hew_app | front/plugins/my-auth/my-auth.js | <reponame>bonybody/2020_hew_app<gh_stars>1-10
export default function ({$auth, redirect}, inject) {
const myAuth = new MyAuth($auth, redirect)
inject('myAuth', myAuth)
}
class MyAuth {
constructor(auth, redirect) {
this.auth = auth
this.redirect = redirect
}
login(email, password) {
this.auth.loginWith('local', {
data: {
username: email,
password: password
}
})
.then((response) => {
return response
},
(error) => {
return error
})
}
user() {
if (this.loggedIn()) {
return this.auth.user
}
return {id: 0, name: 'guest'}
}
loggedIn() {
return this.auth.loggedIn
}
async logout() {
return await this.auth.logout()
}
getToken() {
return this.auth.strategy.token.get()
}
} |
fscheung/drill | exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonReader.java | <reponame>fscheung/drill
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.store.json;
import static org.apache.drill.test.TestBuilder.listOf;
import static org.apache.drill.test.TestBuilder.mapOf;
import static org.junit.Assert.assertEquals;
import java.io.BufferedOutputStream;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.nio.file.Paths;
import org.apache.drill.categories.RowSetTest;
import org.apache.drill.common.util.DrillFileUtils;
import org.apache.drill.exec.ExecConstants;
import org.apache.drill.exec.proto.UserBitShared;
import org.apache.drill.exec.store.easy.json.JSONRecordReader;
import org.apache.drill.exec.util.JsonStringHashMap;
import org.apache.drill.exec.util.Text;
import org.apache.drill.shaded.guava.com.google.common.base.Charsets;
import org.apache.drill.shaded.guava.com.google.common.io.Files;
import org.apache.drill.test.BaseTestQuery;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Some tests previously here have moved, and been rewritten to use
* the newer test framework. Find them in
* <tt>org.apache.drill.exec.store.json</tt>:
* <ul>
* <li><tt>TestJsonReaderFns</tt></li>
* <li><tt>TestJsonReaderQuery</tt></li>
* </ul>
*/
@Category(RowSetTest.class)
public class TestJsonReader extends BaseTestQuery {
private static final Logger logger = LoggerFactory.getLogger(TestJsonReader.class);
@BeforeClass
public static void setupTestFiles() {
dirTestWatcher.copyResourceToRoot(Paths.get("store", "json"));
dirTestWatcher.copyResourceToRoot(Paths.get("vector","complex", "writer"));
}
private void enableV2Reader(boolean enable) {
alterSession(ExecConstants.ENABLE_V2_JSON_READER_KEY, enable);
}
private void resetV2Reader() {
resetSessionOption(ExecConstants.ENABLE_V2_JSON_READER_KEY);
}
public interface TestWrapper {
void apply() throws Exception;
}
public void runBoth(TestWrapper wrapper) throws Exception {
try {
enableV2Reader(false);
wrapper.apply();
enableV2Reader(true);
wrapper.apply();
} finally {
resetV2Reader();
}
}
@Test
public void schemaChange() throws Exception {
runBoth(this::doSchemaChange);
}
private void doSchemaChange() throws Exception {
test("select b from dfs.`vector/complex/writer/schemaChange/`");
}
@Test
public void testSplitAndTransferFailure() throws Exception {
runBoth(this::doTestSplitAndTransferFailure);
}
private void doTestSplitAndTransferFailure() throws Exception {
final String testVal = "a string";
testBuilder()
.sqlQuery("select flatten(config) as flat from cp.`store/json/null_list.json`")
.ordered()
.baselineColumns("flat")
.baselineValues(listOf())
.baselineValues(listOf(testVal))
.go();
test("select flatten(config) as flat from cp.`store/json/null_list_v2.json`");
testBuilder()
.sqlQuery("select flatten(config) as flat from cp.`store/json/null_list_v2.json`")
.ordered()
.baselineColumns("flat")
.baselineValues(mapOf("repeated_varchar", listOf()))
.baselineValues(mapOf("repeated_varchar", listOf(testVal)))
.go();
testBuilder()
.sqlQuery("select flatten(config) as flat from cp.`store/json/null_list_v3.json`")
.ordered()
.baselineColumns("flat")
.baselineValues(mapOf("repeated_map", listOf(mapOf("repeated_varchar", listOf()))))
.baselineValues(mapOf("repeated_map", listOf(mapOf("repeated_varchar", listOf(testVal)))))
.go();
}
@Test // DRILL-1824
public void schemaChangeValidate() throws Exception {
runBoth(this::doSchemaChangeValidate);
}
private void doSchemaChangeValidate() throws Exception {
testBuilder()
.sqlQuery("select b from dfs.`vector/complex/writer/schemaChange/`")
.unOrdered()
.baselineColumns("b")
.baselineValues(null)
.baselineValues(null)
.baselineValues(mapOf())
.baselineValues(mapOf("x", 1L, "y", 2L))
.build()
.run();
}
public void runTestsOnFile(String filename, UserBitShared.QueryType queryType, String[] queries, long[] rowCounts) throws Exception {
logger.debug("===================");
logger.debug("source data in json");
logger.debug("===================");
logger.debug(Files.asCharSource(DrillFileUtils.getResourceAsFile(filename), Charsets.UTF_8).read());
int i = 0;
for (String query : queries) {
logger.debug("=====");
logger.debug("query");
logger.debug("=====");
logger.debug(query);
logger.debug("======");
logger.debug("result");
logger.debug("======");
int rowCount = testRunAndPrint(queryType, query);
assertEquals(rowCounts[i], rowCount);
logger.debug("\n");
i++;
}
}
// TODO: Union not yet supported in V2.
@Test
public void testSelectStarWithUnionType() throws Exception {
try {
testBuilder()
.sqlQuery("select * from cp.`jsoninput/union/a.json`")
.ordered()
.optionSettingQueriesForTestQuery("alter session set `exec.enable_union_type` = true")
.optionSettingQueriesForTestQuery("alter session set `store.json.enable_v2_reader` = false")
.baselineColumns("field1", "field2")
.baselineValues(
1L, 1.2
)
.baselineValues(
listOf(2L), 1.2
)
.baselineValues(
mapOf("inner1", 3L, "inner2", 4L), listOf(3L, 4.0, "5")
)
.baselineValues(
mapOf("inner1", 3L,
"inner2", listOf(
mapOf(
"innerInner1", 1L,
"innerInner2",
listOf(
3L,
"a"
)
)
)
),
listOf(
mapOf("inner3", 7L),
4.0,
"5",
mapOf("inner4", 9L),
listOf(
mapOf(
"inner5", 10L,
"inner6", 11L
),
mapOf(
"inner5", 12L,
"inner7", 13L
)
)
)
).go();
} finally {
resetSessionOption(ExecConstants.ENABLE_UNION_TYPE_KEY);
resetSessionOption(ExecConstants.ENABLE_V2_JSON_READER_KEY);
}
}
// TODO: Union not yet supported in V2.
@Test
public void testSelectFromListWithCase() throws Exception {
try {
testBuilder()
.sqlQuery("select a, typeOf(a) `type` from " +
"(select case when is_list(field2) then field2[4][1].inner7 end a " +
"from cp.`jsoninput/union/a.json`) where a is not null")
.ordered()
.optionSettingQueriesForTestQuery("alter session set `exec.enable_union_type` = true")
.optionSettingQueriesForTestQuery("alter session set `store.json.enable_v2_reader` = false")
.baselineColumns("a", "type")
.baselineValues(13L, "BIGINT")
.go();
} finally {
resetSessionOption(ExecConstants.ENABLE_UNION_TYPE_KEY);
resetSessionOption(ExecConstants.ENABLE_V2_JSON_READER_KEY);
}
}
// TODO: Union not yet supported in V2.
@Test
public void testTypeCase() throws Exception {
try {
testBuilder()
.sqlQuery("select case when is_bigint(field1) " +
"then field1 when is_list(field1) then field1[0] " +
"when is_map(field1) then t.field1.inner1 end f1 from cp.`jsoninput/union/a.json` t")
.ordered()
.optionSettingQueriesForTestQuery("alter session set `exec.enable_union_type` = true")
.optionSettingQueriesForTestQuery("alter session set `store.json.enable_v2_reader` = false")
.baselineColumns("f1")
.baselineValues(1L)
.baselineValues(2L)
.baselineValues(3L)
.baselineValues(3L)
.go();
} finally {
resetSessionOption(ExecConstants.ENABLE_UNION_TYPE_KEY);
resetSessionOption(ExecConstants.ENABLE_V2_JSON_READER_KEY);
}
}
// TODO: Union not yet supported in V2.
@Test
public void testSumWithTypeCase() throws Exception {
try {
testBuilder()
.sqlQuery("select sum(cast(f1 as bigint)) sum_f1 from " +
"(select case when is_bigint(field1) then field1 " +
"when is_list(field1) then field1[0] when is_map(field1) then t.field1.inner1 end f1 " +
"from cp.`jsoninput/union/a.json` t)")
.ordered()
.optionSettingQueriesForTestQuery("alter session set `exec.enable_union_type` = true")
.optionSettingQueriesForTestQuery("alter session set `store.json.enable_v2_reader` = false")
.baselineColumns("sum_f1")
.baselineValues(9L)
.go();
} finally {
resetSessionOption(ExecConstants.ENABLE_UNION_TYPE_KEY);
resetSessionOption(ExecConstants.ENABLE_V2_JSON_READER_KEY);
}
}
// TODO: Union not yet supported in V2.
@Test
public void testUnionExpressionMaterialization() throws Exception {
try {
testBuilder()
.sqlQuery("select a + b c from cp.`jsoninput/union/b.json`")
.ordered()
.optionSettingQueriesForTestQuery("alter session set `exec.enable_union_type` = true")
.optionSettingQueriesForTestQuery("alter session set `store.json.enable_v2_reader` = false")
.baselineColumns("c")
.baselineValues(3L)
.baselineValues(7.0)
.baselineValues(11.0)
.go();
} finally {
resetSessionOption(ExecConstants.ENABLE_UNION_TYPE_KEY);
resetSessionOption(ExecConstants.ENABLE_V2_JSON_READER_KEY);
}
}
// TODO: Union not yet supported in V2.
@Test
public void testSumMultipleBatches() throws Exception {
File table_dir = dirTestWatcher.makeTestTmpSubDir(Paths.get("multi_batch"));
BufferedOutputStream os = new BufferedOutputStream(new FileOutputStream(new File(table_dir, "a.json")));
for (int i = 0; i < 10000; i++) {
os.write("{ type : \"map\", data : { a : 1 } }\n".getBytes());
os.write("{ type : \"bigint\", data : 1 }\n".getBytes());
}
os.flush();
os.close();
try {
testBuilder()
.sqlQuery("select sum(cast(case when `type` = 'map' then t.data.a else data end as bigint)) `sum` from dfs.tmp.multi_batch t")
.ordered()
.optionSettingQueriesForTestQuery("alter session set `exec.enable_union_type` = true")
.optionSettingQueriesForTestQuery("alter session set `store.json.enable_v2_reader` = false")
.baselineColumns("sum")
.baselineValues(20000L)
.go();
} finally {
resetSessionOption(ExecConstants.ENABLE_UNION_TYPE_KEY);
resetSessionOption(ExecConstants.ENABLE_V2_JSON_READER_KEY);
}
}
// TODO: Union not yet supported in V2.
@Test
public void testSumFilesWithDifferentSchema() throws Exception {
File table_dir = dirTestWatcher.makeTestTmpSubDir(Paths.get("multi_file"));
BufferedOutputStream os = new BufferedOutputStream(new FileOutputStream(new File(table_dir, "a.json")));
for (int i = 0; i < 10000; i++) {
os.write("{ type : \"map\", data : { a : 1 } }\n".getBytes());
}
os.flush();
os.close();
os = new BufferedOutputStream(new FileOutputStream(new File(table_dir, "b.json")));
for (int i = 0; i < 10000; i++) {
os.write("{ type : \"bigint\", data : 1 }\n".getBytes());
}
os.flush();
os.close();
try {
testBuilder()
.sqlQuery("select sum(cast(case when `type` = 'map' then t.data.a else data end as bigint)) `sum` from dfs.tmp.multi_file t")
.ordered()
.optionSettingQueriesForTestQuery("alter session set `exec.enable_union_type` = true")
.optionSettingQueriesForTestQuery("alter session set `store.json.enable_v2_reader` = false")
.baselineColumns("sum")
.baselineValues(20000L)
.go();
} finally {
resetSessionOption(ExecConstants.ENABLE_UNION_TYPE_KEY);
resetSessionOption(ExecConstants.ENABLE_V2_JSON_READER_KEY);
}
}
// V1 version of the test. See TestJsonReaderQueries for the V2 version.
@Test
public void drill_4032() throws Exception {
File table_dir = dirTestWatcher.makeTestTmpSubDir(Paths.get("drill_4032"));
table_dir.mkdir();
BufferedOutputStream os = new BufferedOutputStream(new FileOutputStream(new File(table_dir, "a.json")));
os.write("{\"col1\": \"val1\",\"col2\": null}".getBytes());
os.write("{\"col1\": \"val1\",\"col2\": {\"col3\":\"abc\", \"col4\":\"xyz\"}}".getBytes());
os.flush();
os.close();
os = new BufferedOutputStream(new FileOutputStream(new File(table_dir, "b.json")));
os.write("{\"col1\": \"val1\",\"col2\": null}".getBytes());
os.write("{\"col1\": \"val1\",\"col2\": null}".getBytes());
os.flush();
os.close();
runBoth(() -> testNoResult("select t.col2.col3 from dfs.tmp.drill_4032 t"));
}
@Test // todo: place this logic to beforeClass. And divide doDrill_4479 into 3 tests
public void drill_4479() throws Exception {
File table_dir = dirTestWatcher.makeTestTmpSubDir(Paths.get("drill_4479"));
table_dir.mkdir();
BufferedOutputStream os = new BufferedOutputStream(new FileOutputStream(new File(table_dir, "mostlynulls.json")));
// Create an entire batch of null values for 3 columns
for (int i = 0; i < JSONRecordReader.DEFAULT_ROWS_PER_BATCH; i++) {
os.write("{\"a\": null, \"b\": null, \"c\": null}".getBytes());
}
// Add a row with {bigint, float, string} values
os.write("{\"a\": 123456789123, \"b\": 99.999, \"c\": \"Hello World\"}".getBytes());
os.flush();
os.close();
runBoth(this::doDrill_4479);
}
private void doDrill_4479() throws Exception {
try {
testBuilder()
.sqlQuery("select c, count(*) as cnt from dfs.tmp.drill_4479 t group by c")
.ordered()
.optionSettingQueriesForTestQuery("alter session set `store.json.all_text_mode` = true")
.baselineColumns("c", "cnt")
.baselineValues(null, 4096L)
.baselineValues("Hello World", 1L)
.go();
testBuilder()
.sqlQuery("select a, b, c, count(*) as cnt from dfs.tmp.drill_4479 t group by a, b, c")
.ordered()
.optionSettingQueriesForTestQuery("alter session set `store.json.all_text_mode` = true")
.baselineColumns("a", "b", "c", "cnt")
.baselineValues(null, null, null, 4096L)
.baselineValues("123456789123", "99.999", "Hello World", 1L)
.go();
testBuilder()
.sqlQuery("select max(a) as x, max(b) as y, max(c) as z from dfs.tmp.drill_4479 t")
.ordered()
.optionSettingQueriesForTestQuery("alter session set `store.json.all_text_mode` = true")
.baselineColumns("x", "y", "z")
.baselineValues("123456789123", "99.999", "Hello World")
.go();
} finally {
resetSessionOption(ExecConstants.JSON_ALL_TEXT_MODE);
}
}
@Test
public void testFlattenEmptyArrayWithAllTextMode() throws Exception {
try (BufferedWriter writer = new BufferedWriter(new FileWriter(new File(dirTestWatcher.getRootDir(), "empty_array_all_text_mode.json")))) {
writer.write("{ \"a\": { \"b\": { \"c\": [] }, \"c\": [] } }");
}
runBoth(this::doTestFlattenEmptyArrayWithAllTextMode);
}
private void doTestFlattenEmptyArrayWithAllTextMode() throws Exception {
try {
String query = "select flatten(t.a.b.c) as c from dfs.`empty_array_all_text_mode.json` t";
testBuilder()
.sqlQuery(query)
.unOrdered()
.optionSettingQueriesForTestQuery("alter session set `store.json.all_text_mode` = true")
.expectsEmptyResultSet()
.go();
testBuilder()
.sqlQuery(query)
.unOrdered()
.optionSettingQueriesForTestQuery("alter session set `store.json.all_text_mode` = false")
.expectsEmptyResultSet()
.go();
} finally {
resetSessionOption(ExecConstants.JSON_ALL_TEXT_MODE);
}
}
@Test
public void testFlattenEmptyArrayWithUnionType() throws Exception {
try (BufferedWriter writer = new BufferedWriter(new FileWriter(new File(dirTestWatcher.getRootDir(), "empty_array.json")))) {
writer.write("{ \"a\": { \"b\": { \"c\": [] }, \"c\": [] } }");
}
runBoth(this::doTestFlattenEmptyArrayWithUnionType);
}
private void doTestFlattenEmptyArrayWithUnionType() throws Exception {
try {
String query = "select flatten(t.a.b.c) as c from dfs.`empty_array.json` t";
testBuilder()
.sqlQuery(query)
.unOrdered()
.optionSettingQueriesForTestQuery("alter session set `exec.enable_union_type` = true")
.expectsEmptyResultSet()
.go();
testBuilder()
.sqlQuery(query)
.unOrdered()
.optionSettingQueriesForTestQuery("alter session set `exec.enable_union_type` = true")
.optionSettingQueriesForTestQuery("alter session set `store.json.all_text_mode` = true")
.expectsEmptyResultSet()
.go();
} finally {
resetSessionOption(ExecConstants.JSON_ALL_TEXT_MODE);
resetSessionOption(ExecConstants.ENABLE_UNION_TYPE_KEY);
}
}
@Test // DRILL-5521
public void testKvgenWithUnionAll() throws Exception {
String fileName = "map.json";
try (BufferedWriter writer = new BufferedWriter(new FileWriter(new File(dirTestWatcher.getRootDir(), fileName)))) {
writer.write("{\"rk\": \"a\", \"m\": {\"a\":\"1\"}}");
}
runBoth(() -> doTestKvgenWithUnionAll(fileName));
}
private void doTestKvgenWithUnionAll(String fileName) throws Exception {
String query = String.format("select kvgen(m) as res from (select m from dfs.`%s` union all " +
"select convert_from('{\"a\" : null}' ,'json') as m from (values(1)))", fileName);
assertEquals("Row count should match", 2, testSql(query));
}
@Test // DRILL-4264
public void testFieldWithDots() throws Exception {
String fileName = "table.json";
try (BufferedWriter writer = new BufferedWriter(new FileWriter(new File(dirTestWatcher.getRootDir(), fileName)))) {
writer.write("{\"rk.q\": \"a\", \"m\": {\"a.b\":\"1\", \"a\":{\"b\":\"2\"}, \"c\":\"3\"}}");
}
runBoth(() -> doTestFieldWithDots(fileName));
}
private void doTestFieldWithDots(String fileName) throws Exception {
testBuilder()
.sqlQuery("select t.m.`a.b` as a,\n" +
"t.m.a.b as b,\n" +
"t.m['a.b'] as c,\n" +
"t.rk.q as d,\n" +
"t.`rk.q` as e\n" +
"from dfs.`%s` t", fileName)
.unOrdered()
.baselineColumns("a", "b", "c", "d", "e")
.baselineValues("1", "2", "1", null, "a")
.go();
}
// TODO: Union not yet supported in V2.
@Test // DRILL-6020
public void testUntypedPathWithUnion() throws Exception {
String fileName = "table.json";
try (BufferedWriter writer = new BufferedWriter(new FileWriter(new File(dirTestWatcher.getRootDir(), fileName)))) {
writer.write("{\"rk\": {\"a\": {\"b\": \"1\"}}}");
writer.write("{\"rk\": {\"a\": \"2\"}}");
}
JsonStringHashMap<String, Text> map = new JsonStringHashMap<>();
map.put("b", new Text("1"));
try {
testBuilder()
.sqlQuery("select t.rk.a as a from dfs.`%s` t", fileName)
.ordered()
.optionSettingQueriesForTestQuery("alter session set `exec.enable_union_type`=true")
.optionSettingQueriesForTestQuery("alter session set `store.json.enable_v2_reader` = false")
.baselineColumns("a")
.baselineValues(map)
.baselineValues("2")
.go();
} finally {
resetSessionOption(ExecConstants.ENABLE_UNION_TYPE_KEY);
resetSessionOption(ExecConstants.ENABLE_V2_JSON_READER_KEY);
}
}
@Test
public void testConvertFromJson() throws Exception {
String fileName = "table.tsv";
try (BufferedWriter writer = new BufferedWriter(new FileWriter(new File(dirTestWatcher.getRootDir(), fileName)))) {
for (int i = 0; i < JSONRecordReader.DEFAULT_ROWS_PER_BATCH; i++) {
writer.write("{\"id\":\"1\"}\n");
}
writer.write("{\"id\":\"2\",\"v\":[\"abc\"]}");
}
String sql = "SELECT t.m.id AS id, t.m.v[0] v FROM \n" +
"(SELECT convert_from(columns[0], 'json') AS m FROM dfs.`%s`) t\n" +
"where t.m.id='2'";
testBuilder()
.sqlQuery(sql, fileName)
.unOrdered()
.baselineColumns("id", "v")
.baselineValues("2", "abc")
.go();
}
@Test // DRILL-7821
public void testEmptyObjectInference() throws Exception {
String fileName = "emptyObject.json";
try (BufferedWriter writer = new BufferedWriter(new FileWriter(new File(dirTestWatcher.getRootDir(), fileName)))) {
writer.write("{\"sample\": [{\"data\": {}},{\"data\": \"\"}]}");
}
String sql = "SELECT * from dfs.`%s` t";
testBuilder()
.sqlQuery(sql, fileName)
.ordered()
.baselineColumns("sample")
.baselineValues(
listOf(
mapOf(
"data", mapOf()
),
mapOf(
"data", mapOf()
)
)
)
.go();
}
@Test // DRILL-7821
public void testFilledObjectInference() throws Exception {
String fileName = "filledObject.json";
try (BufferedWriter writer = new BufferedWriter(new FileWriter(new File(dirTestWatcher.getRootDir(), fileName)))) {
writer.write("{\"sample\": [{\"data\": {\"foo\": \"bar\"}},{\"data\": \"\"}]}");
}
String sql = "SELECT * from dfs.`%s` t";
testBuilder()
.sqlQuery(sql, fileName)
.ordered()
.baselineColumns("sample")
.baselineValues(
listOf(
mapOf(
"data", mapOf(
"foo", "bar"
)
),
mapOf(
"data", mapOf()
)
)
)
.go();
}
}
|
besscroft/imeta-cloud | pisces-auth/src/main/java/com/besscroft/pisces/auth/exception/Oauth2ExceptionHandler.java | package com.besscroft.pisces.auth.exception;
import com.besscroft.pisces.framework.common.result.AjaxResult;
import org.springframework.security.oauth2.common.exceptions.OAuth2Exception;
import org.springframework.web.bind.annotation.ControllerAdvice;
import org.springframework.web.bind.annotation.ExceptionHandler;
import org.springframework.web.bind.annotation.ResponseBody;
/**
* @Description Oauth2 全局异常处理
* @Author <NAME>
* @Date 2022/2/4 16:30
*/
@ControllerAdvice
public class Oauth2ExceptionHandler {
@ResponseBody
@ExceptionHandler(value = OAuth2Exception.class)
public AjaxResult handleOauth2(OAuth2Exception e) {
return AjaxResult.error(e.getMessage());
}
}
|
vskurikhin/daybook | src/main/java/su/svn/showcase/dao/jpa/TagDaoEjb.java | /*
* This file was last modified at 2020.03.04 23:17 by <NAME>.
* This is free and unencumbered software released into the public domain.
* For more information, please refer to <http://unlicense.org>
* TagDaoEjb.java
* $Id: 54f8ba6101b97fe2f32f9652a81be8c5583f57e3 $
*/
package su.svn.showcase.dao.jpa;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import su.svn.showcase.dao.TagDao;
import su.svn.showcase.domain.Tag;
import su.svn.showcase.utils.StubEntityManager;
import javax.ejb.Stateless;
import javax.persistence.*;
import static su.svn.shared.Constants.Db.PERSISTENCE_UNIT_NAME;
/**
* The Tag DAO implementation.
*
* @author <NAME>
*/
@Stateless
public class TagDaoEjb extends TagDaoJpa implements TagDao {
private static final Logger LOGGER = LoggerFactory.getLogger(TagDaoEjb.class);
@PersistenceContext(unitName = PERSISTENCE_UNIT_NAME)
private EntityManager entityManager;
public TagDaoEjb() {
super(new StubEntityManager()); // TODO create stub proxy for EntityManager
}
/**
* {@inheritDoc }
*/
@Override
EntityManager getEntityManager() {
return this.entityManager;
}
@Override
Logger getLogger() {
return LOGGER;
}
/**
* {@inheritDoc }
*/
@Override
public Class<Tag> getEClass() {
return Tag.class;
}
}
//EOF
|
mylxsw/GoLangStudy | design/facade/system.go | package facade
import "fmt"
type SystemA struct{}
func (sys *SystemA) operationA() {
fmt.Println("SystemA operationA")
}
type SystemB struct{}
func (sys *SystemB) operationB() {
fmt.Println("SystemB operationB")
}
type SystemC struct{}
func (sys *SystemC) operationC() {
fmt.Println("SystemC operationC")
}
|
pacman616/hybrid-fenwick-tree | test/rankselect/like_dynamic.hpp | #ifndef __TEST_RANKSELECT_LIKE_DYNAMIC_HPP__
#define __TEST_RANKSELECT_LIKE_DYNAMIC_HPP__
#include "../utils.hpp"
#include <dynamic.hpp>
TEST(rankselect_like_dynamic, random)
{
using namespace std;
using namespace hft;
static std::mt19937 mte;
constexpr size_t SIZE = 1000;
std::uint64_t *bitvect = new std::uint64_t[SIZE];
std::uint64_t *updates = new std::uint64_t[SIZE];
std::uniform_int_distribution<std::uint64_t> dist(0, UINT64_MAX);
for (size_t i = 0; i < SIZE; i++) {
bitvect[i] = dist(mte);
updates[i] = dist(mte);
}
size_t ones = 0;
for (size_t i = 0; i < SIZE; i++)
ones += popcount(bitvect[i]);
size_t zeroes = 64*SIZE - ones;
Word<FixedF> internal(bitvect, SIZE);
suc_bv dynamic;
for (uint64_t i = 0; i < SIZE; ++i) {
for (uint64_t j = 0; j < 64; ++j)
dynamic.insert(64*i + j, bitvect[i] & (1ULL << j));
}
for (uint64_t i = 0; i < SIZE; i++)
EXPECT_EQ(dynamic.rank(i), internal.rank(i)) << "at index: " << i;
for (uint64_t i = 0; i < SIZE; i++)
EXPECT_EQ(dynamic.rank(i, 0), internal.rankZero(i)) << "at index: " << i;
for (uint64_t i = 0; i < ones; i++)
EXPECT_EQ(dynamic.select(i, 1), internal.select(i)) << "at index: " << i;
for (uint64_t i = 0; i < zeroes; i++)
EXPECT_EQ(dynamic.select(i, 0), internal.selectZero(i)) << "at index: " << i;
delete[] updates;
delete[] bitvect;
}
#endif // __TEST_RANKSELECT_LIKE_DYNAMIC_HPP__
|
liaoqingmiao/sfgantt-gantt-chart | BASE/gantt/control/SFGanttDragZoomControl.js | /**
这是一个甘特图功能控件,本控件实现在拖动甘特图的日历层的时候,进行甘特图图表的缩放操作
@private
@extends SFGanttControl
@class
*/
function SFGanttDragZoomControl()
{
}
SFGanttDragZoomControl.prototype=new window.SFGanttControl();
/**
@private
功能控件的初始化,每个插件的实现都会重写此方法
@param {SFGantt} gantt
@returns {Bool} 如果初始化成功,返回true,否则返回false
*/
SFGanttDragZoomControl.prototype.initialize=function(gantt)
{
if(gantt.disableDragZoom || !gantt.getLayout){return false;}
var container=gantt.getLayout("mapHead");
if(!container){return false;}
SFGlobal.setCursor(container,"col-resize");
this.gantt=gantt;
this.container=container;
this.listeners=[
SFDragObject.setup(container,SFEvent.getCallback(this,this.onMove),{interval:32})
];
return true;
}
/**
@private
在拖拽缩放图表的过程之中持续触发的函数
@param {Number[]} startPoint 拖拽起始点位置
@param {Number[]} point 拖拽当前点位置
@param {String} type 当前触发的类型
*/
SFGanttDragZoomControl.prototype.onMove=function(sp,lp,type)
{
if(type=="start"){this.startScale=this.gantt.getScale();}
if(lp[0]>1)
{
var scale=this.startScale*sp[0]/lp[0];
this.gantt.setScale(scale);
}
}
window.SFGanttDragZoomControl=SFGanttDragZoomControl; |
oremj/zamboni | mkt/inapp_pay/utils.py | import logging
import urlparse
import requests
import amo
log = logging.getLogger('z.inapp_pay.utils')
def send_pay_notice(notice_type, signed_notice, config, contrib,
notifier_task):
"""
Send app a notification about a payment or chargeback.
Parameters:
**notice_type**
constant to indicate the type of notification being sent
**signed_notice**
encoded JWT with request and response
**config**
InappConfig object that specifies notification URLs
**contrib**
Contribution instance for the payment in question
**notifier_task**
celery task object
The *signed_notice* will be sent to the URL found in the *config*.
If there's an error in the app's response, *notifier_task* will be
retried up to five times.
A tuple of (url, success, last_error) is returned.
**url**
Absolute URL where notification was sent
**success**
True if notification was successful
**last_error**
String to indicate the last exception message in the case of failure.
"""
if notice_type == amo.INAPP_NOTICE_PAY:
uri = config.postback_url
elif notice_type == amo.INAPP_NOTICE_CHARGEBACK:
uri = config.chargeback_url
else:
raise NotImplementedError('Unknown type: %s' % notice_type)
url = urlparse.urlunparse((config.app_protocol(),
config.addon.parsed_app_domain.netloc, uri, '',
'', ''))
exception = None
success = False
try:
res = requests.post(url, signed_notice, timeout=5)
res.raise_for_status() # raise exception for non-200s
res_content = res.text
except AssertionError:
raise # Raise test-related exceptions.
except Exception, exception:
log.error('Notice for contrib %s raised exception in URL %s'
% (contrib.pk, url), exc_info=True)
try:
notifier_task.retry(exc=exception)
except:
log.exception('while retrying contrib %s notice; '
'notification URL: %s' % (contrib.pk, url))
else:
if res_content == str(contrib.pk):
success = True
log.debug('app config %s responded OK for contrib %s notification'
% (config.pk, contrib.pk))
else:
log.error('app config %s did not respond with contribution ID %s '
'for notification' % (config.pk, contrib.pk))
if exception:
last_error = u'%s: %s' % (exception.__class__.__name__, exception)
else:
last_error = ''
return url, success, last_error
|
frank1147/vr-sandbox | src/js/car.refactor.js | <gh_stars>0
import CarCameraControls from './a-car/car/CarCameraControls';
import {
findClosestEntity, getPosition, getWorldDirection, getWorldPosition, lookAtAndOrient, playSound, setPosition,
toast
} from './utils/aframe-utils';
import {getPlayer} from './game-utils';
import {FPSCtrl} from './utils/fps-utils';
export function createAndAttachCarCameraControls (player, vehicle) {
// TODO check how tquery camera controls better would work with other aframe controls
var camera = player.object3D.children[0];
var car = vehicle.components['simple-car'];
var carCamera = new CarCameraControls({camera: camera, car: car});
window.carcamcon = carCamera;
var carUpdateScript = new FPSCtrl(60, () => carCamera.update()).start();
// prevent physics between player and car while "driving"
var staticBody = player.getAttribute('static-body');
return {
start: function () {
player.removeAttribute('static-body');
carUpdateScript.start();
},
stop: function () {
carUpdateScript.stop();
// will freeze if no timeout is set
setTimeout(() => player.setAttribute('static-body', staticBody), 200);
}
};
}
var carCamControls;
export
function exitVehicle (player, vehicle) {
// vehicle.removeAttribute('customizable-wasd-car-controls');
player.setAttribute('customizable-wasd-controls', 'enabled', true);
player.setAttribute('look-controls', true);
if (carCamControls) {
carCamControls.stop();
}
// FIXME the rotation is currently applied to the car model not to the container
let car = vehicle.components['simple-car'].getCarInstance();
let carModel = car.model();
// TODO exit on the left side of the car facing in the driving direction
// TODO use animation :>
// var pos = getWorldPosition(vehicle.object3D);
var pos = getWorldPosition(carModel);
// var dir = getWorldDirection(vehicle.object3D);// .multiplyScalar(-1);
var dir = getWorldDirection(carModel).normalize();
var exitPos = pos.clone();
// --------------
var vector = dir.clone();
var axis = new THREE.Vector3(0, 1, 0);
var angle = Math.PI / 2;
vector.applyAxisAngle(axis, angle).multiplyScalar(2);
// ---------------
exitPos.add(vector);
// exitPos.add(dir);
// exitPos.y = 1;
var lookPos = exitPos.clone().add(dir);
exitPos.y = 1.8;
setPosition(player, exitPos);
// player.object3D.lookAt(lookPos);
// player.sceneEl.camera.lookAt(lookPos);
// player.sceneEl.camera.lookAt(pos);
// lookAtAndOrient(player.object3D, lookPos, vehicle.object3D);
/*
console.log('pos', pos);
console.log('dir', dir);
console.log('dir rotated', vector);
console.log('exitPos', exitPos);
console.log('lookPos', lookPos);
*/
}
export
function enterVehicle (player, vehicle) {
// vehicle.setAttribute('customizable-wasd-car-controls', true);
player.setAttribute('customizable-wasd-controls', 'enabled', false);
player.removeAttribute('look-controls');
if (!carCamControls) { carCamControls = createAndAttachCarCameraControls(player, vehicle); } else carCamControls.start();
}
|
NUARIG/redcap2omop | spec/models/redcap2omop/device_exposure_spec.rb | <reponame>NUARIG/redcap2omop<filename>spec/models/redcap2omop/device_exposure_spec.rb
require 'rails_helper'
require 'support/shared_examples/with_next_id'
module Redcap2omop
RSpec.describe DeviceExposure, type: :model do
let(:device_exposure) { FactoryBot.create(:device_exposure) }
let(:subject) { device_exposure }
describe 'associations' do
it { is_expected.to have_one(:redcap_source_link) }
it { is_expected.to belong_to(:person) }
it { is_expected.to belong_to(:provider).optional }
it { is_expected.to belong_to(:concept) }
it { is_expected.to belong_to(:type_concept) }
end
describe 'validations' do
it { is_expected.to validate_presence_of(:device_exposure_start_date) }
end
describe 'methods' do
it 'allows to set instance_id' do
new_value = device_exposure.device_exposure_id + 100
device_exposure.instance_id = new_value
expect(device_exposure.device_exposure_id).to eq new_value
end
it 'allows to set concept_id' do
new_value = device_exposure.device_concept_id + 100
device_exposure.concept_id = new_value
expect(device_exposure.device_concept_id).to eq new_value
end
it 'allows to set type_concept_id' do
new_value = device_exposure.device_type_concept_id + 100
device_exposure.type_concept_id = new_value
expect(device_exposure.device_type_concept_id).to eq new_value
end
it 'allows to set source_value' do
device_exposure.device_source_value = '100'
new_value = 'hello'
device_exposure.source_value = new_value
expect(device_exposure.device_source_value).to eq new_value
end
end
include_examples 'with next_id'
end
end
|
eryjus/century-os | modules/kernel/src/interrupts/IsrUnregister.cc | <gh_stars>10-100
//===================================================================================================================
//
// IsrUnregister.cc -- Unregister an ISR Handler
//
// Copyright (c) 2017-2020 -- <NAME>
// Licensed under "THE BEER-WARE LICENSE"
// See License.md for details.
//
// ------------------------------------------------------------------------------------------------------------------
//
// Date Tracker Version Pgmr Description
// ----------- ------- ------- ---- ---------------------------------------------------------------------------
// 2018-Jul-06 Initial 0.1.0 ADCL Initial version
// 2019-Feb-08 Initial 0.3.0 ADCL Relocated
//
//===================================================================================================================
#include "types.h"
#include "cpu.h"
#include "printf.h"
#include "interrupt.h"
//
// -- Remove an ISR handler from the handlers table
// ---------------------------------------------
void IsrUnregister(uint8_t interrupt)
{
archsize_t flags = DisableInterrupts();
if (isrHandlers[interrupt] == NULL_ISR) {
kprintf("When unregistering interrupt %d, no handler is registered\n", interrupt);
} else {
isrHandlers[interrupt] = NULL_ISR;
}
RestoreInterrupts(flags);
}
|
NextJS-projects/simple-site | src/store/app/reducers/AppReducer.js | <reponame>NextJS-projects/simple-site<gh_stars>1-10
//constants
import { updateObject } from '../../../constants/Helpers';
//action
import { SET_TEST_ACTION } from '../AppActionTypes';
const initialState = {
testString: 'Initial test',
};
const reducer = (state = initialState, action) => {
switch (action.type) {
case SET_TEST_ACTION:
return updateObject(state, { testString: 'Final test' });
default:
return state;
}
};
export default reducer;
|
dbo1001/Gestor-TFG-2021 | src/main/java/ubu/digit/security/LoginUbuVirtual.java | package ubu.digit.security;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import okhttp3.FormBody;
import okhttp3.Request;
import okhttp3.RequestBody;
import okhttp3.Response;
import org.json.JSONObject;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Clase donde se realiza el inicio de seción en el moodle de UbuVirtual con
* el usuario y contraseña obtenido en la vista del Login (LoginView)
*
* @author <NAME>
*/
public class LoginUbuVirtual {
private static final Logger LOGGER = LoggerFactory.getLogger(LoginUbuVirtual.class);
/**
* Ruta desde la cual se realizará el login al moodle
*/
private static final String HOST_LOGIN_PATH = "/login/index.php";
private static final String HTTP = "http://";
private static final String HTTPS = "https://";
private WebService webService;
private String username;
private String password;
private String host;
/**
* Constructor donde se inicializan las variables: host, username y password.
*
* @param host
* @param username
* @param password
*/
public LoginUbuVirtual(String host, String username, String password) {
webService = new WebService();
this.host = host;
this.username = username;
this.password = password;
}
/**
* Inicia sesión en el moodle de UbuVirtual con el usuario y contraseña especificados.
*
* @throws IOException si no ha podido conectarse o la contraseña es erronea
*/
public void normalLogin() throws IOException {
webService = new WebService(host, username, password);
String hostLogin = host + HOST_LOGIN_PATH;
LOGGER.info("Realizando login en el moodle");
try (Response response = Connection.getResponse(hostLogin)) {
String redirectedUrl = response.request().url().toString();
Document loginDoc = Jsoup.parse(response.body().byteStream(), null, hostLogin);
Element e = loginDoc.selectFirst("input[name=logintoken]");
String logintoken = (e == null) ? "" : e.attr("value");
RequestBody formBody = new FormBody.Builder().add("username", username)
.add("password", password)
.add("logintoken", logintoken)
.build();
String html = Connection.getResponse(new Request.Builder().url(redirectedUrl)
.post(formBody)
.build())
.body()
.string();
String sesskey = findSesskey(html);
if (sesskey != null) {
webService.setSesskey(sesskey);
}
}
}
/**
* Comprueba que la Url sea correcta.
*
* @param host
* @return url
* @throws MalformedURLException
*/
public String checkUrlServer(String host) throws MalformedURLException {
String url = convertToHttps(host);
URL httpsUrl = new URL(url);
if (checkWebsService(httpsUrl)) {
return httpsUrl.toString();
}
url = url.replaceFirst(HTTPS, HTTP);
URL httpUrl = new URL(url);
if (checkWebsService(httpUrl)) {
return httpUrl.toString();
}
throw new IllegalArgumentException("Error en checkUrlServer " + host);
}
/**
* Verifica si la respuesta obtenida al conectar con el Servicio contiene un mensaje de error.
*
* @param url
* @return boolean
* true si el mensaje obtenido contiene un error, falso si no.
*/
private boolean checkWebsService(URL url) {
try (Response response = Connection.getResponse(url + "/login/token.php")) {
JSONObject jsonObject = new JSONObject(response.body()
.string());
return jsonObject.has("error");
} catch (IOException e) {
LOGGER.info("Error checkWebsService al intentar obtener una respuesta del Servicio Web", e);
} catch (IllegalArgumentException e) {
throw new IllegalArgumentException("Error en checkWebsService", e);
}
return false;
}
/**
* Convertir la ruta en https.
*
* @param host
* @return url con el host en un determinado formato
*/
private String convertToHttps(String host) {
String url;
if (!host.matches("^(?i)https?://.*$")) {
url = HTTPS + host;
} else if (host.matches("^(?i)http://.*$")) {
url = host.replaceFirst("(?i)http://", HTTPS);
} else {
url = host;
}
return url;
}
/**
* Obtiene la Sesskey.
*
* @param html
* @return Sesskey
*/
public String findSesskey(String html) {
Pattern pattern = Pattern.compile("sesskey=(\\w+)");
Matcher m = pattern.matcher(html);
if (m.find()) {
LOGGER.info("Obtenida Sesskey");
return m.group(1);
}
LOGGER.warn("No se pudo encontrar la clave Sesskey: ", html);
return null;
}
/**
* Obtiene el servicio web.
*
* @return WebService
*/
public WebService getWebService() {
return webService;
}
/**
* Establece el servicio web.
*
* @param webService
*/
public void setWebService(WebService webService) {
this.webService = webService;
}
/**
* Obtiene el username.
*
* @return username
*/
public String getUsername() {
return username;
}
/**
* Establece el username.
*
* @param username
*/
public void setUsername(String username) {
this.username = username;
}
/**
* Obtiene el password.
*
* @return password
*/
public String getPassword() {
return password;
}
/**
* Establece el password.
*
* @param password
*/
public void setPassword(String password) {
this.password = password;
}
/**
* Obtiene el host.
*
* @return host
*/
public String getHost() {
return host;
}
/**
* Establece el host.
*
* @param host
*/
public void setHost(String host) {
this.host = host;
}
}
|
tangweikun/leetcode | src/268-missing-number/pro.js | export function missingNumber(nums) {
let missing = nums.length;
for (let i = 0; i < nums.length; i++) {
missing ^= i ^ nums[i];
}
return missing;
}
|
jonfisik/ScriptsPython | Scripts5/Script44.py | '''Elabore um programa que calcule o valr a ser pago por um porduto, considerando o seu preço normal e condição de pagamento:
- à vista dinheiro/cheque: 10% de desconto;
- à vista no cartão: 5% de desconto;
- em até 2x no cartão: preço normal;
- 3x ou mais no cartão: 20% de juros.'''
print('=+=+=+=+=+=+=+=+='*3)
titulo = str('LOJA BARATEIRA')
print('{:^51}'.format(titulo))
print('-----------------'*3)
valor = float(input('QUal o valor da comprar? R$ '))
print('-----------------'*3)
print('Formas de PAGAMENTO')
print('[1] à vista dinheiro/cheque')
print('[2] à vista cartão')
print('[3] 2x no cartão')
print('[4] 3x ou mais no cartão')
print('-----------------'*3)
pag = int(input('Qual é a opção? '))
parc = int(input('Quantas parcelas? '))
#-------------------------------------------------
if pag !=1 and pag !=2 and pag !=3 and pag !=4:
nPreco = 0
print('Opção ERRADA!')
if pag == 1:
nPreco = valor - (valor * 0.1)
print('Sua comprar será paga à vista.')
elif pag == 2:
nPreco = valor - (valor * 0.05)
print('Sua compra será paga à vista com acréscimo de R$ {}.'.format(valor * 0.05))
elif pag == 3:
nPreco = valor
print('Sua compra será parcelada em 2x de {} sem juros.'.format(nPreco/2))
elif pag == 4:
nPreco = valor + (valor * 0.2)
print('Sua compra será parcelada em {}x de {} sem juros.'.format(parc, nPreco/parc))
print('Valor de compra R${}. Valor total compra R${}.'.format(valor,nPreco))
print('=+=+=+=+=+=+=+=+='*3) |
18380460383/eshare | app/src/main/java/com/kzmen/sczxjf/imagepicker/util/BitmapUtil.java | <filename>app/src/main/java/com/kzmen/sczxjf/imagepicker/util/BitmapUtil.java
package com.kzmen.sczxjf.imagepicker.util;
import android.app.Activity;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Matrix;
import android.media.ExifInterface;
import android.net.Uri;
import android.provider.MediaStore;
import java.io.File;
import java.io.IOException;
/**
*
* Bitmap工具类,主要是解决拍照旋转的适配
*
* Author: nanchen
* Email: <EMAIL>
* Date: 2017-03-20 13:27
*/
public class BitmapUtil {
private BitmapUtil() {
throw new UnsupportedOperationException("u can't instantiate me...");
}
/**
* 获取图片的旋转角度
*
* @param path 图片绝对路径
* @return 图片的旋转角度
*/
public static int getBitmapDegree(String path) {
int degree = 0;
try {
// 从指定路径下读取图片,并获取其EXIF信息
ExifInterface exifInterface = new ExifInterface(path);
// 获取图片的旋转信息
int orientation = exifInterface.getAttributeInt(ExifInterface.TAG_ORIENTATION, ExifInterface.ORIENTATION_NORMAL);
switch (orientation) {
case ExifInterface.ORIENTATION_ROTATE_90:
degree = 90;
break;
case ExifInterface.ORIENTATION_ROTATE_180:
degree = 180;
break;
case ExifInterface.ORIENTATION_ROTATE_270:
degree = 270;
break;
}
} catch (IOException e) {
e.printStackTrace();
}
return degree;
}
/**
* 将图片按照指定的角度进行旋转
*
* @param bitmap 需要旋转的图片
* @param degree 指定的旋转角度
* @return 旋转后的图片
*/
public static Bitmap rotateBitmapByDegree(Bitmap bitmap, int degree) {
// 根据旋转角度,生成旋转矩阵
Matrix matrix = new Matrix();
matrix.postRotate(degree);
// 将原始图片按照旋转矩阵进行旋转,并得到新的图片
Bitmap newBitmap = Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(), bitmap.getHeight(), matrix, true);
if (!bitmap.isRecycled()) {
bitmap.recycle();
}
return newBitmap;
}
/**
* 获取我们需要的整理过旋转角度的Uri
* @param activity 上下文环境
* @param path 路径
* @return 正常的Uri
*/
public static Uri getRotatedUri(Activity activity, String path){
int degree = BitmapUtil.getBitmapDegree(path);
if (degree != 0){
Bitmap bitmap = BitmapFactory.decodeFile(path);
Bitmap newBitmap = BitmapUtil.rotateBitmapByDegree(bitmap,degree);
return Uri.parse(MediaStore.Images.Media.insertImage(activity.getContentResolver(),newBitmap,null,null));
}else{
return Uri.fromFile(new File(path));
}
}
/**
* 将图片按照指定的角度进行旋转
*
* @param path 需要旋转的图片的路径
* @param degree 指定的旋转角度
* @return 旋转后的图片
*/
public static Bitmap rotateBitmapByDegree(String path, int degree) {
Bitmap bitmap = BitmapFactory.decodeFile(path);
return rotateBitmapByDegree(bitmap,degree);
}
}
|
timgates42/netcdf4-python | test/tst_compoundvar.py | <filename>test/tst_compoundvar.py
import sys
import unittest
import os
import tempfile
from netCDF4 import Dataset, CompoundType
import numpy as np
from numpy.testing import assert_array_equal, assert_array_almost_equal
# test compound data types.
FILE_NAME = tempfile.NamedTemporaryFile(suffix='.nc', delete=False).name
DIM_NAME = 'phony_dim'
GROUP_NAME = 'phony_group'
VAR_NAME = 'phony_compound_var'
TYPE_NAME1 = 'cmp1'
TYPE_NAME2 = 'cmp2'
TYPE_NAME3 = 'cmp3'
TYPE_NAME4 = 'cmp4'
TYPE_NAME5 = 'cmp5'
DIM_SIZE=3
# unaligned data types (note they are nested)
dtype1=np.dtype([('i', 'i2'), ('j', 'i8')])
dtype2=np.dtype([('x', 'f4',), ('y', 'f8',(3,2))])
dtype3=np.dtype([('xx', dtype1), ('yy', dtype2)])
dtype4=np.dtype([('xxx',dtype3),('yyy','f8', (4,))])
dtype5=np.dtype([('x1', dtype1), ('y1', dtype2)])
# aligned data types
dtype1a = np.dtype({'names':['i','j'],'formats':['<i2','<i8']},align=True)
dtype2a = np.dtype({'names':['x','y'],'formats':['<f4',('<f8', (3, 2))]},align=True)
dtype3a = np.dtype({'names':['xx','yy'],'formats':[dtype1a,dtype2a]},align=True)
dtype4a = np.dtype({'names':['xxx','yyy'],'formats':[dtype3a,('f8', (4,))]},align=True)
dtype5a = np.dtype({'names':['x1','y1'],'formats':[dtype1a,dtype2a]},align=True)
data = np.zeros(DIM_SIZE,dtype4)
data['xxx']['xx']['i']=1
data['xxx']['xx']['j']=2
data['xxx']['yy']['x']=3
data['xxx']['yy']['y']=4
data['yyy'] = 5
datag = np.zeros(DIM_SIZE,dtype5)
datag['x1']['i']=10
datag['x1']['j']=20
datag['y1']['x']=30
datag['y1']['y']=40
class VariablesTestCase(unittest.TestCase):
def setUp(self):
self.file = FILE_NAME
f = Dataset(self.file, 'w')
d = f.createDimension(DIM_NAME,DIM_SIZE)
g = f.createGroup(GROUP_NAME)
# simple compound types.
cmptype1 = f.createCompoundType(dtype1, TYPE_NAME1)
cmptype2 = f.createCompoundType(dtype2, TYPE_NAME2)
# close and reopen the file to make sure compound
# type info read back in correctly.
f.close()
f = Dataset(self.file,'r+')
g = f.groups[GROUP_NAME]
# multiply nested compound types
cmptype3 = f.createCompoundType(dtype3, TYPE_NAME3)
cmptype4 = f.createCompoundType(dtype4, TYPE_NAME4)
cmptype5 = f.createCompoundType(dtype5, TYPE_NAME5)
v = f.createVariable(VAR_NAME,cmptype4, DIM_NAME)
vv = g.createVariable(VAR_NAME,cmptype5, DIM_NAME)
v[:] = data
vv[:] = datag
# try reading the data back before the file is closed
dataout = v[:]
dataoutg = vv[:]
assert (cmptype4 == dtype4a) # data type should be aligned
assert (dataout.dtype == dtype4a) # data type should be aligned
assert(list(f.cmptypes.keys()) ==\
[TYPE_NAME1,TYPE_NAME2,TYPE_NAME3,TYPE_NAME4,TYPE_NAME5])
assert_array_equal(dataout['xxx']['xx']['i'],data['xxx']['xx']['i'])
assert_array_equal(dataout['xxx']['xx']['j'],data['xxx']['xx']['j'])
assert_array_almost_equal(dataout['xxx']['yy']['x'],data['xxx']['yy']['x'])
assert_array_almost_equal(dataout['xxx']['yy']['y'],data['xxx']['yy']['y'])
assert_array_almost_equal(dataout['yyy'],data['yyy'])
assert_array_equal(dataoutg['x1']['i'],datag['x1']['i'])
assert_array_equal(dataoutg['x1']['j'],datag['x1']['j'])
assert_array_almost_equal(dataoutg['y1']['x'],datag['y1']['x'])
assert_array_almost_equal(dataoutg['y1']['y'],datag['y1']['y'])
f.close()
def tearDown(self):
# Remove the temporary files
os.remove(self.file)
#pass
def runTest(self):
"""testing compound variables"""
f = Dataset(self.file, 'r')
v = f.variables[VAR_NAME]
g = f.groups[GROUP_NAME]
vv = g.variables[VAR_NAME]
dataout = v[:]
dataoutg = vv[:]
# make sure data type is aligned
assert (f.cmptypes['cmp4'] == dtype4a)
assert(list(f.cmptypes.keys()) ==\
[TYPE_NAME1,TYPE_NAME2,TYPE_NAME3,TYPE_NAME4,TYPE_NAME5])
assert_array_equal(dataout['xxx']['xx']['i'],data['xxx']['xx']['i'])
assert_array_equal(dataout['xxx']['xx']['j'],data['xxx']['xx']['j'])
assert_array_almost_equal(dataout['xxx']['yy']['x'],data['xxx']['yy']['x'])
assert_array_almost_equal(dataout['xxx']['yy']['y'],data['xxx']['yy']['y'])
assert_array_almost_equal(dataout['yyy'],data['yyy'])
assert_array_equal(dataoutg['x1']['i'],datag['x1']['i'])
assert_array_equal(dataoutg['x1']['j'],datag['x1']['j'])
assert_array_almost_equal(dataoutg['y1']['x'],datag['y1']['x'])
assert_array_almost_equal(dataoutg['y1']['y'],datag['y1']['y'])
f.close()
# issue 773
f = Dataset(self.file,'w')
dtype = np.dtype([('observation', 'i4'),
('station_name','S80')])
dtype_nest = np.dtype([('observation', 'i4'),
('station_name','S80'),
('nested_observation',dtype)])
station_data_t1 = f.createCompoundType(dtype,'station_data1')
station_data_t2 = f.createCompoundType(dtype_nest,'station_data')
f.createDimension('station',None)
statdat = f.createVariable('station_obs', station_data_t2, ('station',))
assert(statdat.dtype == station_data_t2.dtype)
datain = np.empty(2,station_data_t2.dtype_view)
datain['observation'][:] = (123,314)
datain['station_name'][:] = ('Boulder','New York')
datain['nested_observation']['observation'][:] = (-999,999)
datain['nested_observation']['station_name'][:] = ('Boston','Chicago')
statdat[:] = datain
f.close()
f = Dataset(self.file)
dataout = f['station_obs'][:]
assert(dataout.dtype == station_data_t2.dtype_view)
assert_array_equal(datain, dataout)
f.close()
if __name__ == '__main__':
from netCDF4 import getlibversion
version = getlibversion().split()[0]
unittest.main()
|
shiruka/network | src/main/java/io/github/shiruka/network/packets/ConnectedPong.java | package io.github.shiruka.network.packets;
import io.github.shiruka.network.PacketBuffer;
import lombok.Getter;
import lombok.Setter;
import lombok.experimental.Accessors;
import org.jetbrains.annotations.NotNull;
/**
* a class that represents connected pong packets.
*/
@Getter
@Setter
@Accessors(fluent = true)
public final class ConnectedPong extends FramedPacket.Base {
/**
* the timestamp of the sender of the ping.
*/
public long timestamp;
/**
* the timestamp of the sender of the pong.
*/
public long timestampPong;
/**
* ctor.
*
* @param timestamp the timestamp.
* @param timestampPong the timestampPong.
* @param reliability the reliability.
*/
public ConnectedPong(final long timestamp, final long timestampPong, @NotNull final Reliability reliability) {
super(reliability);
this.timestamp = timestamp;
this.timestampPong = timestampPong;
}
/**
* ctor.
*
* @param timestamp the timestamp.
* @param reliability the reliability.
*/
public ConnectedPong(final long timestamp, @NotNull final Reliability reliability) {
this(timestamp, System.nanoTime(), reliability);
}
/**
* ctor.
*
* @param timestamp the timestamp.
* @param timestampPong the timestampPong.
*/
public ConnectedPong(final long timestamp, final long timestampPong) {
this(timestamp, timestampPong, Reliability.UNRELIABLE);
}
/**
* ctor.
*
* @param timestamp the timestamp.
*/
public ConnectedPong(final long timestamp) {
this(timestamp, Reliability.UNRELIABLE);
}
/**
* ctor.
*/
public ConnectedPong() {
super(Reliability.UNRELIABLE);
}
@Override
public void decode(@NotNull final PacketBuffer buffer) {
this.timestamp = buffer.readLong();
this.timestampPong = buffer.readLong();
}
@Override
public void encode(@NotNull final PacketBuffer buffer) {
this.timestamp = buffer.readLong();
if (buffer.isReadable()) {
this.timestampPong = buffer.readLong();
}
}
/**
* obtains the rtt.
*
* @return rtt.
*/
public long rtt() {
return System.nanoTime() - this.timestamp;
}
}
|
renfei-net/WinterEE | WinterEE-Core-Serve/src/main/java/com/winteree/core/dao/FilesDOMapper.java | <filename>WinterEE-Core-Serve/src/main/java/com/winteree/core/dao/FilesDOMapper.java
package com.winteree.core.dao;
import com.winteree.core.dao.entity.FilesDO;
import com.winteree.core.dao.entity.FilesDOExample;
import org.apache.ibatis.annotations.Param;
import org.springframework.stereotype.Repository;
import java.util.List;
@Repository
public interface FilesDOMapper {
long countByExample(FilesDOExample example);
int deleteByExample(FilesDOExample example);
int deleteByPrimaryKey(Long id);
int insert(FilesDO record);
int insertSelective(FilesDO record);
List<FilesDO> selectByExample(FilesDOExample example);
FilesDO selectByPrimaryKey(Long id);
int updateByExampleSelective(@Param("record") FilesDO record, @Param("example") FilesDOExample example);
int updateByExample(@Param("record") FilesDO record, @Param("example") FilesDOExample example);
int updateByPrimaryKeySelective(FilesDO record);
int updateByPrimaryKey(FilesDO record);
} |
Mastersnes/YouLose | core/src/com/bebel/youlose/screens/menu/vitre/MenuVitre.java | package com.bebel.youlose.screens.menu.vitre;
import com.badlogic.gdx.math.MathUtils;
import com.badlogic.gdx.scenes.scene2d.actions.Actions;
import com.bebel.youlose.components.actions.FinishRunnableAction;
import com.bebel.youlose.components.refound.actors.ui.ImageActor;
import com.bebel.youlose.components.runnable.FinishRunnable;
import com.bebel.youlose.screens.menu.MenuScreen;
import com.bebel.youlose.screens.menu.MenuSubscreen;
import static com.badlogic.gdx.math.Interpolation.elastic;
import static com.badlogic.gdx.math.Interpolation.linear;
import static com.badlogic.gdx.scenes.scene2d.actions.Actions.*;
import static com.bebel.youlose.components.actions.Actions.finishRun;
/**
* Ecran titre composé d'une vitre, du nom du jeu et d'un scan
*/
public class MenuVitre extends MenuSubscreen {
private ImageActor led;
private ImageActor vitre;
private ImageActor texte;
private MenuScan carre;
private FinishRunnable clignotteAction;
private int delayClignottement;
private boolean stopClignotte;
public MenuVitre(final MenuScreen parent) {
super(parent);
}
@Override
public void create() {
putActor(led = new ImageActor("vitre/atlas:led"));
putActor(vitre = new ImageActor("vitre/atlas:vitre"));
putActor(texte = new ImageActor("vitre/atlas:youlose"));
putActor(carre = new MenuScan(screen));
}
@Override
public void startSubscreen() {
setVisible(false);
led.move(led.centerX(), 206);
led.setAlpha(0);
vitre.move(vitre.centerX(), 0);
texte.setPosition(led.getX(), led.getY());
texte.setAlpha(0);
carre.setAlpha(0);
carre.move(588, 427);
setY(getHeight());
refresh(getColor());
}
@Override
public void act(float delta) {
super.act(delta);
checkClignotte();
}
/**
* Verifie si le clignottement est en cours
* et le relance aleatoirement sinon
*/
private void checkClignotte() {
if (stopClignotte) return;
if (clignotteAction == null) return;
if (!isVisible()) return;
if (led.getAlpha() < 1) return;
if (delayClignottement-- > 0) return;
final float randNumber = MathUtils.random(0, 500);
if (randNumber == 0 && clignotteAction.isFinish()) {
delayClignottement = 5000;
addActions(finishRun(clignotte()));
}
}
/**
* Fait apparaitre la vitre puis la fait clignotter
*
* @return
*/
public FinishRunnableAction appair() {
stop();
return finishRun(new FinishRunnable() {
@Override
public void run() {
addBlockedActions(
Actions.moveBy(0, -getHeight(), 3, linear),
finishRun(clignotte()),
finish()
);
}
});
}
/**
* Fait clignotter l'ecran avant de l'allumer
*
* @return
*/
private FinishRunnable clignotte() {
if (clignotteAction == null) {
clignotteAction = new FinishRunnable() {
@Override
public void run() {
led.addActions(
alpha(0),
fadeIn(1, elastic),
alpha(0),
alpha(1)
);
texte.addActions(
alpha(0),
fadeIn(1, elastic),
alpha(0),
alpha(1),
finish()
);
carre.addActions(
alpha(0),
fadeIn(1, elastic),
alpha(0),
alpha(1),
finish()
);
}
};
} else if (clignotteAction.isFinish()) clignotteAction.restart();
return clignotteAction;
}
/**
* Eteinds l'ecran en clignotant
*
* @return
*/
private FinishRunnable extinction() {
return new FinishRunnable() {
@Override
public void run() {
led.addActions(
fadeOut(1, elastic)
);
texte.addActions(
fadeOut(1, elastic)
);
carre.addActions(
fadeOut(1, elastic),
finish()
);
}
};
}
/**
* Eteinds l'ecran et enleve la vitre
*
* @return
*/
public FinishRunnableAction disappair() {
stop();
stopClignotte = true;
return finishRun(new FinishRunnable() {
@Override
public void run() {
addBlockedActions(
finishRun(extinction()),
Actions.moveBy(0, getHeight(), 3, linear),
finish()
);
}
});
}
@Override
public void makeSpecificEvents() {
}
}
|
dmareddx/client-sdk | lib/prot/to2/msg46.c | <gh_stars>1-10
/*
* Copyright 2020 Intel Corporation
* SPDX-License-Identifier: Apache 2.0
*/
/*!
* \file
* \brief This file implements msg46 of TO2 state machine.
*/
#include "sdoprot.h"
#include "sdokeyexchange.h"
#include "util.h"
/**
* msg46() - TO2.Next_device_service_info
* --- Message Format Begins ---
* {
* "nn" : UInt8, # index of this message, from zero upwards.
* "dsi": Service_info # service info entries to add or
* # append to previous ones.
* }
* --- Message Format Ends ---
*/
int32_t msg46(sdo_prot_t *ps)
{
int ret = -1;
/* Send all the key value sets in the Service Info list */
sdow_next_block(&ps->sdow, SDO_TO2_NEXT_DEVICE_SERVICE_INFO);
sdow_begin_object(&ps->sdow);
/* Write the index of this message ("nn") */
sdo_write_tag(&ps->sdow, "nn");
sdo_writeUInt(&ps->sdow, ps->serv_req_info_num);
/* Write the Device Service Info ("dsi") */
sdo_write_tag(&ps->sdow, "dsi");
sdow_begin_object(&ps->sdow);
if (!ps->service_info)
goto err;
/*
* DSI's that need to be sent:
* 1. Platform DSI's (1st iteration, when nn=0)
* 2. Sv_info external module(s) DSI's (remaining iterations)
*/
if (ps->serv_req_info_num == 0) {
/* Construct and write platform DSI's into a single json msg */
if (!sdo_combine_platform_dsis(&ps->sdow, ps->service_info)) {
LOG(LOG_ERROR, "Error in combining platform DSI's!\n");
goto err;
}
} else {
int mod_ret_val = 0;
/* Sv_info external module(s) DSI's */
sdo_sdk_si_key_value *sv_kv =
sdo_alloc(sizeof(sdo_sdk_si_key_value));
if (!sv_kv)
goto err;
if (!sdo_construct_module_dsi(ps->dsi_info, sv_kv,
&mod_ret_val)) {
LOG(LOG_DEBUG, "Sv_info: module DSI "
"Construction Failed\n");
sdo_sv_key_value_free(sv_kv);
goto err;
}
if (!sdo_mod_kv_write(&ps->sdow, sv_kv)) {
sdo_sv_key_value_free(sv_kv);
goto err;
}
/* Free allocated memory */
sdo_sv_key_value_free(sv_kv);
}
sdow_end_object(&ps->sdow);
sdow_end_object(&ps->sdow);
/* Encrypt the packet */
if (!sdo_encrypted_packet_windup(
&ps->sdow, SDO_TO2_NEXT_DEVICE_SERVICE_INFO, ps->iv)) {
goto err;
}
/* Check for DSI rounds */
if (ps->serv_req_info_num < ps->total_dsi_rounds - 1) {
/* Back to msg45 */
ps->state = SDO_STATE_TO2_RCV_GET_NEXT_DEVICE_SERVICE_INFO;
} else {
/* Move to msg47 */
ps->state = SDO_STATE_TO2_RCV_SETUP_DEVICE;
}
ret = 0; /* Mark as success */
err:
return ret;
}
|
orion-lab/orion-server-side-starter | shared/hooks/check-permission/__tests__/index.js | <filename>shared/hooks/check-permission/__tests__/index.js
import checkPermission from '../';
describe('checkPermission', () => {
const emptyHook = { data: {}, result: {} };
const appComplete = {
service: jest.fn(() => ({
async get() {
throw new Error('Cannot find user with that ID');
},
})),
passport: {
async getJWT() {
return '<PASSWORD>yhak&';
},
async verifyJWT() {
return {
userId: 'iRtsfs',
iss: 'feathers',
};
},
},
};
it('throw error if hook object is empty', async () => {
try {
await checkPermission(null, null);
} catch (e) {
expect(e.message).toBe('No reference to hook found');
}
});
it('throw error if permission is undefined', async () => {
try {
await checkPermission(emptyHook, null);
} catch (e) {
expect(e.message).toBe('Permission is not defined');
}
});
it('throw error if reference to app in hook is not found', async () => {
try {
await checkPermission(emptyHook, 'UPLOADING_ARTICLE_IMAGE');
} catch (e) {
expect(e.message).toBe('No reference to app found in the hook');
}
});
it('throw error if reference to userId in hook is not found', async () => {
try {
await checkPermission({ data: { app: appComplete, userId: null }, result: {} }, 'UPLOADING_ARTICLE_IMAGE');
} catch (e) {
expect(e.message).toBe('UserId cannot be found on the hook data');
}
});
it('throw error if user record cannot be found on the database', async () => {
try {
await checkPermission({ data: { app: appComplete, userId: 'diruuu' }, result: {} }, 'UPLOADING_ARTICLE_IMAGE');
} catch (e) {
expect(e.message).toBe('Cannot find user with that ID');
}
});
it('throw error if user role level cannot be found', async () => {
try {
const appWithNoUserRoleKey = {
...appComplete,
service: jest.fn(() => ({
async get() {
return {
name: '<NAME>',
roleLevel: null,
};
},
})),
};
await checkPermission({
data: {
app: appWithNoUserRoleKey,
userId: 'diruuu',
},
result: {},
}, 'UPLOADING_ARTICLE_IMAGE');
} catch (e) {
expect(e.message).toBe('User role level cannot be found');
}
});
it('throw error if permission cannot be found on the database', async () => {
try {
const appWithNoPermissionFound = {
...appComplete,
service: jest.fn(name => ({
async get() {
if (name === 'users') {
return {
name: '<NAME>',
roleLevel: 5,
};
}
throw new Error('Cannot find permission with that ID');
},
})),
};
await checkPermission({
data: {
app: appWithNoPermissionFound,
userId: 'diruuu',
},
result: {},
}, 'UPLOADING_ARTICLE_IMAGE');
} catch (e) {
expect(e.message).toBe('Cannot find permission with that ID');
}
});
it('throw error if permission minimum role level cannot be found', async () => {
try {
const appWithNoRoleLevelOnPermissionFound = {
...appComplete,
service: jest.fn(name => ({
async get() {
if (name === 'users') {
return {
name: '<NAME>',
roleLevel: 5,
};
}
return {
name: 'UPLOADING_ARTICLE_IMAGE',
};
},
})),
};
await checkPermission({
data: {
app: appWithNoRoleLevelOnPermissionFound,
userId: 'diruuu',
},
result: {},
}, 'UPLOADING_ARTICLE_IMAGE');
} catch (e) {
expect(e.message).toBe('Minimum role level for the permission cannot be found');
}
});
it('throw error if user role level higher than minimum role level required', async () => {
try {
const appWithNoRoleLevelOnPermissionFound = {
...appComplete,
service: jest.fn(name => ({
async get() {
if (name === 'users') {
return {
name: '<NAME>',
roleLevel: 6,
};
}
return {
name: 'UPLOADING_ARTICLE_IMAGE',
minRoleLevel: 5,
};
},
})),
};
await checkPermission({
data: {
app: appWithNoRoleLevelOnPermissionFound,
userId: 'diruuu',
},
result: {},
}, 'UPLOADING_ARTICLE_IMAGE');
} catch (e) {
expect(e.message).toBe("You don't have access to this service");
}
});
it('return the same hook if permission is granted', async () => {
try {
const appWithNoRoleLevelOnPermissionFound = {
...appComplete,
service: jest.fn(name => ({
async get() {
if (name === 'users') {
return {
name: '<NAME>',
roleLevel: 6,
};
}
return {
name: 'UPLOADING_ARTICLE_IMAGE',
minRoleLevel: 7,
};
},
})),
};
const hookObject = {
data: {
app: appWithNoRoleLevelOnPermissionFound,
userId: 'diruuu',
},
result: {},
};
const result = await checkPermission(hookObject, 'UPLOADING_ARTICLE_IMAGE');
expect(result).toMatchObject(hookObject);
} catch (e) {
expect(e.message).toBeNull();
}
});
});
|
eirTony/BelaRust | brCommon/src/libs/data/brStone/brStone.h | <gh_stars>0
#ifndef BRSTONE_H
#define BRSTONE_H
#include "brstone_global.h"
#include <brBase/ModuleInfo.h>
#include <brType/Singleton.h>
class BRSTONESHARED_EXPORT brStone: public ModuleInfo
{
DECLARE_SINGLETON(brStone)
};
#endif // BRSTONE_H
|
syngenta-digital/dta-python | syngenta_digital_dta/common/publisher.py | import boto3
import simplejson as json
from syngenta_digital_dta.common import logger
def publish(**kwargs):
if not kwargs.get('arn') or not kwargs.get('data'):
return
try:
publisher = boto3.client('sns', region_name=kwargs.get('region'), endpoint_url=kwargs.get('endpoint'))
publisher.publish(
TopicArn=kwargs['arn'],
Message=json.dumps(kwargs['data']),
MessageAttributes=kwargs.get('attributes', {})
)
except Exception as e:
logger.log(level='WARN', log={'error': 'publish_sns_error: {}'.format(e)})
|
svdpuranik/BLE_SDK6_examples | connectivity/BLE2IR/src/custom_profile/user_custs1_def.h | /**
****************************************************************************************
*
* @file user_custs1_def.h
*
* @brief Custom Server 1 (CUSTS1) profile database definitions.
*
* Copyright (c) 2016-2018 Dialog Semiconductor. All rights reserved.
*
* This software ("Software") is owned by Dialog Semiconductor.
*
* By using this Software you agree that Dialog Semiconductor retains all
* intellectual property and proprietary rights in and to this Software and any
* use, reproduction, disclosure or distribution of the Software without express
* written permission or a license agreement from Dialog Semiconductor is
* strictly prohibited. This Software is solely for use on or in conjunction
* with Dialog Semiconductor products.
*
* EXCEPT AS OTHERWISE PROVIDED IN A LICENSE AGREEMENT BETWEEN THE PARTIES, THE
* SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. EXCEPT AS OTHERWISE
* PROVIDED IN A LICENSE AGREEMENT BETWEEN THE PARTIES, IN NO EVENT SHALL
* DIALOG SEMICONDUCTOR BE LIABLE FOR ANY DIRECT, SPECIAL, INDIRECT, INCIDENTAL,
* OR CONSEQUENTIAL DAMAGES, OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF
* USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
* TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
* OF THE SOFTWARE.
*
****************************************************************************************
*/
#ifndef _USER_CUSTS1_DEF_H_
#define _USER_CUSTS1_DEF_H_
/**
****************************************************************************************
* @defgroup USER_CONFIG
* @ingroup USER
* @brief Custom Server 1 (CUSTS1) profile database definitions.
*
* @{
****************************************************************************************
*/
/*
* INCLUDE FILES
****************************************************************************************
*/
#include "attm_db_128.h"
/*
* DEFINES
****************************************************************************************
*/
// Service 1 of the custom server 1
#define DEF_SVC1_UUID_128 {0x2F, 0x2A, 0x93, 0xA6, 0xBD, 0xD8, 0x41, 0x52, 0xAC, 0x0B, 0x10, 0x99, 0x2E, 0xC6, 0xFE, 0xED}
#define DEF_SVC1_KBD_DATA_UUID_128 {0x4F, 0x43, 0x31, 0x3C, 0x93, 0x92, 0x42, 0xE6, 0xA8, 0x76, 0xFA, 0x3B, 0xEF, 0xB4, 0x87, 0x5A}
#define DEF_SVC1_KBD_DATA_CHAR_LEN 4
#define DEF_SVC1_KBD_DATA_USER_DESC "Keyboard data"
/// Custom1 Service Data Base Characteristic enum
enum
{
// Custom Service 1
SVC1_IDX_SVC = 0,
SVC1_IDX_KBD_DATA_STATE_CHAR,
SVC1_IDX_KBD_DATA_STATE_VAL,
SVC1_IDX_KBD_DATA_STATE_USER_DESC,
SVC1_IDX_KBD_DATA_DATA_NTF_CFG,
CUSTS1_IDX_NB,
};
/// @} USER_CONFIG
#endif // _USER_CUSTS1_DEF_H_
|
j-benson/advent-of-code | 2020/solvers/advent_11_test.py | import solvers.advent_11_1 as advent_11_1
import solvers.advent_11_2 as advent_11_2
from solvers.advent_11_1 import WaitingArea
input = [
'L.LL.LL.LL',
'LLLLLLL.LL',
'L.L.L..L..',
'LLLL.LL.LL',
'L.LL.LL.LL',
'L.LLLLL.LL',
'..L.L.....',
'LLLLLLLLLL',
'L.LLLLLL.L',
'L.LLLLL.LL',
]
def test_1():
assert advent_11_1.solve(input) == 37
def test_round_1():
waiting_area = WaitingArea(input)
waiting_area.next_round()
assert waiting_area.layout == WaitingArea([
'#.##.##.##',
'#######.##',
'#.#.#..#..',
'####.##.##',
'#.##.##.##',
'#.#####.##',
'..#.#.....',
'##########',
'#.######.#',
'#.#####.##',
]).layout
def test_round_2():
waiting_area = WaitingArea(input)
waiting_area.next_round()
waiting_area.next_round()
assert waiting_area == WaitingArea([
'#.LL.L#.##',
'#LLLLLL.L#',
'L.L.L..L..',
'#LLL.LL.L#'
'#.LL.LL.LL',
'#.LLLL#.##',
'..L.L.....',
'#LLLLLLLL#',
'#.LLLLLL.L',
'#.#LLLL.##',
])
def test_2():
pass
|
akiezun/hampi | src/hampi/utils/Histogram.java | <reponame>akiezun/hampi
package hampi.utils;
import java.io.Serializable;
import java.util.*;
import java.util.Map.Entry;
/**
* Keeps track of how many objects of different kinds are inserted into it.
*/
public final class Histogram<T> implements Serializable{
private static final long serialVersionUID = 7401396200080635154L;
private final Map<T, Integer> map = new LinkedHashMap<T, Integer>();
private String name;
public Histogram(String name){
this.name = name;
}
public Histogram(){
this(null);
}
public void put(T t){
put(t, 1);
}
public void setName(String name){
this.name = name;
}
public String getName(){
return name;
}
public void put(T t, int k){
if (k < 0)
throw new IllegalArgumentException("k:" + k);
if (!map.containsKey(t)){
map.put(t, k);
}else{
map.put(t, getCount(t) + k);
}
}
@Override
public String toString(){
return toStringSortedByKey();
}
public String toStringSortedByNumbers(){
return entriesToString(true, new Comparator<Map.Entry<T, Integer>>(){
public int compare(Map.Entry<T, Integer> e1, Map.Entry<T, Integer> e2){
Integer value1 = e1.getValue();
Integer value2 = e2.getValue();
return value2.intValue() - value1.intValue();
}
});
}
/**
* Note: This works only when keys are comparable.
*/
public String toStringSortedByKey(){
return entriesToString(true, new Comparator<Map.Entry<T, Integer>>(){
@SuppressWarnings("unchecked")
public int compare(Map.Entry<T, Integer> e1, Map.Entry<T, Integer> e2){
Comparable key1 = (Comparable) e1.getKey();
Comparable key2 = (Comparable) e2.getKey();
return key1.compareTo(key2);
}
});
}
private String entriesToString(boolean append_percent, Comparator<Map.Entry<T, Integer>> c){
List<Map.Entry<T, Integer>> entries = new ArrayList<Map.Entry<T, Integer>>(map.entrySet());
Collections.sort(entries, c);
StringBuilder sb = new StringBuilder();
if (this.name != null){
sb.append("Histogram:").append(this.name).append("\n");
}
int size = getSize();
sb.append("Total size:" + size).append("\n");
for (Entry<T, Integer> e : entries){
sb.append(Utils.rpad(e.getValue(), 9));
if (append_percent){
sb.append(Utils.rpad(createPercentString(size, e), 8));
}
sb.append(e.getKey() + "\n");
}
return sb.toString();
}
private String createPercentString(int size, Map.Entry<T, Integer> e){
float size_f = size;//this this assignment for conversion
float count = getCount(e.getKey());//this assignment for conversion
int percent = Math.round((count / size_f) * 100);
return " [" + percent + "%]";
}
private int getSize(){
int result = 0;
for (T t : map.keySet()){
result += getCount(t);
}
return result;
}
public int getCount(T t){
if (map.containsKey(t))
return map.get(t);
else
return 0;
}
public void clear(){
map.clear();
}
/**
* Returns the total count of all elements.
*/
public int totalCount(){
int total = 0;
for (T key : keySet()){
total += getCount(key);
}
return total;
}
public Set<T> keySet(){
return map.keySet();
}
/**
* Returns the maximum count.
*/
public int getMaxCount(){
int max = Integer.MIN_VALUE;
for (T key : keySet()){
max = Math.max(max, getCount(key));
}
return max;
}
/**
* Returns the mean of the results.
*/
public static double mean(Histogram<Integer> h){
int sum = sum(h);
int size = h.getSize();
return (sum * 1.0) / size;
}
/**
* Returns the sum of the numbers. May overflow.
*/
private static int sum(Histogram<Integer> h){
int res = 0;
for (int elem : h.keySet()){
res += elem * h.getCount(elem);
}
return res;
}
}
|
spoke-d/thermionic | internal/cluster/membership/package_test.go | <filename>internal/cluster/membership/package_test.go
package membership_test
import (
"context"
"fmt"
"net"
"reflect"
"runtime"
"testing"
"time"
dqlite "github.com/CanonicalLtd/go-dqlite"
rafthttp "github.com/CanonicalLtd/raft-http"
"github.com/golang/mock/gomock"
"github.com/spoke-d/thermionic/internal/cert"
"github.com/spoke-d/thermionic/internal/cluster"
"github.com/spoke-d/thermionic/internal/cluster/membership"
"github.com/spoke-d/thermionic/internal/db"
querycluster "github.com/spoke-d/thermionic/internal/db/cluster"
"github.com/spoke-d/thermionic/internal/state"
)
//go:generate mockgen -package mocks -destination mocks/db_mock.go github.com/spoke-d/thermionic/internal/db/database DB,Tx,Rows
//go:generate mockgen -package mocks -destination mocks/filesystem_mock.go github.com/spoke-d/thermionic/internal/fsys FileSystem
//go:generate mockgen -package mocks -destination mocks/config_mock.go github.com/spoke-d/thermionic/internal/cluster/membership NodeConfigProvider,ClusterConfigProvider
//go:generate mockgen -package mocks -destination mocks/gateway_mock.go github.com/spoke-d/thermionic/internal/cluster/membership Gateway
//go:generate mockgen -package mocks -destination mocks/state_mock.go github.com/spoke-d/thermionic/internal/cluster/membership State
//go:generate mockgen -package mocks -destination mocks/cluster_mock.go github.com/spoke-d/thermionic/internal/cluster/membership Cluster
//go:generate mockgen -package mocks -destination mocks/raft_mock.go github.com/spoke-d/thermionic/internal/cluster/membership RaftInstance
//go:generate mockgen -package mocks -destination mocks/node_mock.go github.com/spoke-d/thermionic/internal/cluster/membership Node
//go:generate mockgen -package mocks -destination mocks/dialer_mock.go github.com/spoke-d/thermionic/internal/cluster/membership DialerProvider
//go:generate mockgen -package mocks -destination mocks/membership_mock.go github.com/spoke-d/thermionic/internal/cluster/membership Membership
//go:generate mockgen -package mocks -destination mocks/os_mock.go github.com/spoke-d/thermionic/internal/cluster/membership OS
//go:generate mockgen -package mocks -destination mocks/query_mock.go github.com/spoke-d/thermionic/internal/db Query,QueryCluster,QueryNode,Transaction
//go:generate mockgen -package mocks -destination mocks/clock_mock.go github.com/spoke-d/thermionic/internal/clock Clock
//go:generate mockgen -package mocks -destination mocks/sleeper_mock.go github.com/spoke-d/thermionic/internal/clock Sleeper
//go:generate mockgen -package mocks -destination mocks/raftmembership_mock.go github.com/CanonicalLtd/raft-membership Changer
//go:generate mockgen -package mocks -destination mocks/result_mock.go database/sql Result
func setup(t *testing.T, fn func(*testing.T, *gomock.Controller, *gomock.Controller)) {
ctrl := gomock.NewController(t)
defer ctrl.Finish()
ctrl2 := gomock.NewController(t)
defer ctrl2.Finish()
t.Run("run", func(t *testing.T) {
fn(t, ctrl, ctrl2)
})
}
type clusterTransactionMatcher struct {
tx *db.ClusterTx
err error
}
func ClusterTransactionMatcher(tx *db.ClusterTx) *clusterTransactionMatcher {
return &clusterTransactionMatcher{
tx: tx,
}
}
func (m *clusterTransactionMatcher) Matches(x interface{}) bool {
fn, ok := x.(func(*db.ClusterTx) error)
if ok {
m.err = fn(m.tx)
return true
}
return false
}
func (m *clusterTransactionMatcher) Err() error {
return m.err
}
func (*clusterTransactionMatcher) String() string {
return "ClusterTransaction"
}
type nodeTransactionMatcher struct {
tx *db.NodeTx
err error
}
func NodeTransactionMatcher(tx *db.NodeTx) *nodeTransactionMatcher {
return &nodeTransactionMatcher{
tx: tx,
}
}
func (m *nodeTransactionMatcher) Matches(x interface{}) bool {
fn, ok := x.(func(*db.NodeTx) error)
if ok {
m.err = fn(m.tx)
return true
}
return false
}
func (m *nodeTransactionMatcher) Err() error {
return m.err
}
func (*nodeTransactionMatcher) String() string {
return "NodeTransaction"
}
type nodeInfoDestSelectObjectsMatcher struct {
x []db.NodeInfo
}
func NodeInfoDestSelectObjectsMatcher(v []db.NodeInfo) gomock.Matcher {
return nodeInfoDestSelectObjectsMatcher{
x: v,
}
}
func (m nodeInfoDestSelectObjectsMatcher) Matches(x interface{}) bool {
ref := reflect.ValueOf(x)
i := 0
for _, v := range m.x {
values := ref.Call([]reflect.Value{
reflect.ValueOf(i),
})
if num := len(values); num != 1 {
panic(fmt.Sprintf("expected 1 values got %d", num))
}
slice := values[0]
if num := slice.Len(); num != 7 {
panic(fmt.Sprintf("expected 7 values got %d", num))
}
slice.Index(0).Elem().Elem().SetInt(v.ID)
slice.Index(1).Elem().Elem().SetString(v.Name)
slice.Index(2).Elem().Elem().SetString(v.Address)
slice.Index(3).Elem().Elem().SetString(v.Description)
slice.Index(4).Elem().Elem().SetInt(int64(v.Schema))
slice.Index(5).Elem().Elem().SetInt(int64(v.APIExtensions))
slice.Index(6).Elem().Elem().Set(reflect.ValueOf(v.Heartbeat))
i++
}
return true
}
func (m nodeInfoDestSelectObjectsMatcher) String() string {
return fmt.Sprintf("%v", m.x)
}
type nodeDestSelectObjectsMatcher struct {
x []db.RaftNode
}
func NodeDestSelectObjectsMatcher(v []db.RaftNode) gomock.Matcher {
return nodeDestSelectObjectsMatcher{
x: v,
}
}
func (m nodeDestSelectObjectsMatcher) Matches(x interface{}) bool {
ref := reflect.ValueOf(x)
i := 0
for _, v := range m.x {
values := ref.Call([]reflect.Value{
reflect.ValueOf(i),
})
if num := len(values); num != 1 {
panic(fmt.Sprintf("expected 1 values got %d", num))
}
slice := values[0]
if num := slice.Len(); num != 2 {
panic(fmt.Sprintf("expected 2 values got %d", num))
}
slice.Index(0).Elem().Elem().SetInt(v.ID)
slice.Index(1).Elem().Elem().SetString(v.Address)
i++
}
return true
}
func (m nodeDestSelectObjectsMatcher) String() string {
return fmt.Sprintf("%v", m.x)
}
type dialMatcher struct {
x rafthttp.Dial
}
func DialMatcher(x rafthttp.Dial) gomock.Matcher {
return dialMatcher{x}
}
func (m dialMatcher) Matches(x interface{}) bool {
check := func(x, y rafthttp.Dial) bool {
a := runtime.FuncForPC(reflect.ValueOf(x).Pointer()).Name()
b := runtime.FuncForPC(reflect.ValueOf(y).Pointer()).Name()
return a == b
}
if y, ok := x.(rafthttp.Dial); ok {
return check(m.x, y)
} else if y, ok := x.(func(string, time.Duration) (net.Conn, error)); ok {
return check(m.x, y)
}
return false
}
func (m dialMatcher) String() string {
return fmt.Sprintf("%v", m.x)
}
type operationDestSelectObjectsMatcher struct {
x []db.Operation
}
func OperationDestSelectObjectsMatcher(v []db.Operation) gomock.Matcher {
return operationDestSelectObjectsMatcher{
x: v,
}
}
func (m operationDestSelectObjectsMatcher) Matches(x interface{}) bool {
ref := reflect.ValueOf(x)
i := 0
for _, v := range m.x {
values := ref.Call([]reflect.Value{
reflect.ValueOf(i),
})
if num := len(values); num != 1 {
panic(fmt.Sprintf("expected 1 values got %d", num))
}
slice := values[0]
if num := slice.Len(); num != 4 {
panic(fmt.Sprintf("expected 4 values got %d", num))
}
slice.Index(0).Elem().Elem().SetInt(v.ID)
slice.Index(1).Elem().Elem().SetString(v.UUID)
slice.Index(2).Elem().Elem().SetString(v.NodeAddress)
slice.Index(3).Elem().Elem().SetString(string(v.Type))
i++
}
return true
}
func (m operationDestSelectObjectsMatcher) String() string {
return fmt.Sprintf("%v", m.x)
}
type membershipStateShim struct {
state *state.State
}
func makeMembershipStateShim(state *state.State) membershipStateShim {
return membershipStateShim{
state: state,
}
}
func (s membershipStateShim) Node() membership.Node {
return s.state.Node()
}
func (s membershipStateShim) Cluster() membership.Cluster {
return s.state.Cluster()
}
func (s membershipStateShim) OS() membership.OS {
return s.state.OS()
}
type membershipGatewayShim struct {
gateway *cluster.Gateway
}
func makeMembershipGatewayShim(gateway *cluster.Gateway) membershipGatewayShim {
return membershipGatewayShim{
gateway: gateway,
}
}
func (s membershipGatewayShim) Init(certInfo *cert.Info) error {
return s.gateway.Init(certInfo)
}
func (s membershipGatewayShim) Shutdown() error {
return s.gateway.Shutdown()
}
func (s membershipGatewayShim) WaitLeadership() error {
return s.gateway.WaitLeadership()
}
func (s membershipGatewayShim) RaftNodes() ([]db.RaftNode, error) {
return s.gateway.RaftNodes()
}
func (s membershipGatewayShim) Raft() membership.RaftInstance {
return s.gateway.Raft()
}
func (s membershipGatewayShim) DB() membership.Node {
return s.gateway.DB()
}
func (s membershipGatewayShim) IsDatabaseNode() bool {
return s.gateway.IsDatabaseNode()
}
func (s membershipGatewayShim) Cert() *cert.Info {
return s.gateway.Cert()
}
func (s membershipGatewayShim) Reset(certInfo *cert.Info) error {
return s.gateway.Reset(certInfo)
}
func (s membershipGatewayShim) DialFunc() dqlite.DialFunc {
return s.gateway.DialFunc()
}
func (s membershipGatewayShim) ServerStore() querycluster.ServerStore {
return s.gateway.ServerStore()
}
func (s membershipGatewayShim) Context() context.Context {
return s.gateway.Context()
}
|
maachang/quina | project/src/main/java/quina/component/annotation/ResponseSwitch.java | package quina.component.annotation;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import quina.annotation.Switch;
/**
* HttpResponse返却のモードを設定するAnnotation.
*
* quina.component.Componentインターフェイスを継承した
* コンポーネントに対してResponse判別設定を設定します.
* <例>
*
* @ResponseSwitch(gzip=Switch.On, cache=Switch.Off, cors=Switch.On)
* public class JsonGetSync implements RESTfulGetSync {
* public Object get(Request req, SyncResponse res, Params params) {
* return new ResultJson("params", params);
* }
* }
*
* この設定によりGzip圧縮され、キャッシュモードOFFで
* クロスドメインを許可するレスポンス返却が行われます.
*/
@Target(ElementType.TYPE)
@Retention(RetentionPolicy.RUNTIME)
public @interface ResponseSwitch {
/**
* Gzip圧縮モード.
*/
public Switch gzip() default Switch.None;
/**
* レスポンスキャッシュモード.
*/
public Switch cache() default Switch.None;
/**
* クロスドメイン許可モード.
*/
public Switch cors() default Switch.None;
}
|
litlpoet/beliefbox | src/algorithms/DiscreteBanditPolicy.h | /* -*- Mode: C++; -*- */
/* VER: $Id: Policy.h,v 1.8 2006/10/23 08:33:24 olethros Exp cdimitrakakis $*/
// copyright (c) 2006-2007 by <NAME>
// <<EMAIL>>
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
#ifndef DISCRETE_BANDIT_POLICY_H
#define DISCRETE_BANDIT_POLICY_H
#include <cmath>
#include <cstdio>
#include <cstdlib>
#include <vector>
#include "ActionValueEstimate.h"
#include "PFActionValueEstimate.h"
#include "SampleEstimator.h"
/** A general policy
*/
class DiscreteBanditPolicy {
public:
/// Destructor
virtual ~DiscreteBanditPolicy() {}
/// Select an action according to this policy
virtual int SelectAction() = 0;
virtual void Reset() = 0;
virtual void Observe(int a, real r) = 0;
};
/** A greedy policy
*/
class EpsilonGreedyPolicy : public DiscreteBanditPolicy {
public:
int n_actions;
real epsilon;
ActionValueEstimate* estimator;
EpsilonGreedyPolicy(int n_actions, real epsilon,
ActionValueEstimate* estimator);
virtual void Reset();
virtual void Observe(int a, real r);
virtual ~EpsilonGreedyPolicy();
virtual int SelectAction();
};
/** An optimal policy.
This policy is optimistically optimal.
*/
class PopulationOptimalPolicy : public DiscreteBanditPolicy {
public:
int n_actions;
ActionValueEstimate* estimator;
real gamma;
int n_samples;
PopulationOptimalPolicy(int n_actions, ActionValueEstimate* estimator,
real gamma, int n_samples);
virtual void Reset();
virtual void Observe(int a, real r);
virtual ~PopulationOptimalPolicy();
virtual int SelectAction();
};
/** A sampling optimal policy.
This policy is asymptoticall optimal.
*/
class PopulationSamplePolicy : public DiscreteBanditPolicy {
public:
int n_actions;
ActionValueEstimate* estimator;
real gamma;
PopulationSamplePolicy(int n_actions, ActionValueEstimate* estimator,
real gamma);
virtual void Reset();
virtual void Observe(int a, real r);
virtual ~PopulationSamplePolicy();
virtual int SelectAction();
};
/** A naive version of the E3 algorithm
We have an MDP \f$M\f$ with states \f$\{1, .., N\}\f$ and action
\f$\{a_i\}_{i=1}^k\f$.
- \f$P_M^a(ij) \geq 0\f$, transitions from \f$i\f$ to \f$j\f$.
- \f$R_(i), R_{max} \geq R_M(i) \geq 0, Var_M(i) \leq Var_{max}\f$
I disagree with the discussion that there is no 'unambiguously' better policy.
- T-path in M: a sequence p of T+1 states,
\f$p = \{i_1, i_2, \cdots, i_{T+1}\}\f$
- Return along p \f$U_M(p) = (1/T)(R_{i_1}+ \ldots +R_{i_T})\f$
- T-step return from state i \f$U_M^\pi(p) = \sum_p P_M^\pi[p]U_m(p)\f$,
where the sum is over all T-paths p in M that start at i.
- \f$U_M^\pi(i) = \lim_{T \to \infty} U_M^\pi(i,T)\f$. Since we
are in the unichain, we are independent of i, so we just write
\f$U_M^\pi\f$ (but this is not the case for discounting.
- Optimal T-step return \f$U_M^*(i,T) = \max_\pi \{U_M^\pi(i,T)\}\f$.
Also \f$U_M^*(i) = \lim_{T \to \infty} U_M^*(i,T)\f$.
- The maximum possible T-step return is \f$R_{max}\f$.
- The \f$\epsilon\f$-return mixing time of \f$\pi\f$ is the smallest \f$T\f$
such that for all \f$T' \geq T, |U_M^\pi(i,T') - U_M^\pi| \leq \epsilon\f$ for
all \f$i\f$.
Theorem 1: There exists an algortihm \f$A\f$, taking inputs
\f$\epsilon, \delta, N, T, opt(\Pi_M^{T,\epsilon})\f$, such that if
the total number of actions and computation time taken by A exceeds a
polynomial in \f$1/\epsilon, 1/\delta, N, T, R_{max}\f$, then with
probability at least \f$1-\delta\f$, the total undescounted return of \f$A\f$
will exceed \f$opt(\Pi_M^{T,\epsilon}) - \epsilon\f$.
*/
class NaiveE3Policy : public DiscreteBanditPolicy {
public:
int n_actions;
real epsilon;
real gamma;
real T;
ActionValueE3Estimate* estimator;
/// Create a new e-greedy policy
NaiveE3Policy(int n_actions, real epsilon, real gamma);
virtual void Reset();
virtual void Observe(int a, real r);
virtual ~NaiveE3Policy();
virtual int SelectAction();
};
/** A VPI policy for population estimates.
This policy is optimal according to the VPI criterion.
It is specialised to a population estimate.
*/
class PopulationVPIPolicy : public DiscreteBanditPolicy {
public:
int n_actions;
PopulationEstimate* estimator;
real gamma;
PopulationVPIPolicy(int n_actions, PopulationEstimate* estimator, real gamma);
virtual void Reset();
virtual void Observe(int a, real r);
virtual ~PopulationVPIPolicy();
virtual int SelectAction();
};
/** A VPI policy.
This policy is optimal according to the VPI criterion.
It is not specialised for a particular estimate.
*/
class VPIPolicy : public DiscreteBanditPolicy {
public:
int n_actions;
ActionValueEstimate* estimator;
real gamma;
int n_samples;
VPIPolicy(int n_actions, ActionValueEstimate* estimator, real gamma,
int n_samples);
virtual void Reset();
virtual void Observe(int a, real r);
virtual ~VPIPolicy();
virtual int SelectAction();
};
/** A VPI policy for particle filter estimates.
This policy is optimal according to the VPI criterion.
It is specialised for a particle filter estimate.
*/
class PFVPIPolicy : public DiscreteBanditPolicy {
public:
int n_actions;
PFActionValueEstimate* estimator;
real gamma;
int n_samples;
PFVPIPolicy(int n_actions, PFActionValueEstimate* estimator, real gamma,
int n_samples);
/// Reset
virtual void Reset();
virtual void Observe(int a, real r);
virtual ~PFVPIPolicy();
virtual int SelectAction();
};
class OptimalInfinitePolicy : public DiscreteBanditPolicy {
public:
int n_actions;
CountingBernoulliEstimate* estimate;
OptimalInfinitePolicy(int n_actions);
virtual void Reset();
virtual void Observe(int a, real r);
virtual ~OptimalInfinitePolicy();
virtual int SelectAction();
};
#endif
|
joaotux/pdv | src/main/java/net/originmobi/pdv/model/NotaFiscal.java | <filename>src/main/java/net/originmobi/pdv/model/NotaFiscal.java
package net.originmobi.pdv.model;
import java.io.Serializable;
import java.sql.Date;
import java.sql.Time;
import java.util.List;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.EnumType;
import javax.persistence.Enumerated;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne;
import javax.persistence.OneToMany;
import javax.persistence.OneToOne;
import javax.persistence.Table;
import org.springframework.format.annotation.DateTimeFormat;
import net.originmobi.pdv.enumerado.notafiscal.NotaFiscalTipo;
@Entity
@Table(name = "nota_fiscal")
public class NotaFiscal implements Serializable {
private static final long serialVersionUID = 1L;
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
private Long codigo;
private Long numero;
private int modelo;
private int serie;
@Enumerated(EnumType.ORDINAL)
private NotaFiscalTipo tipo;
private String chave_acesso;
private String natureza_operacao;
private String situacao;
private int tipo_emissao;
private int tipo_impressao;
@Column(name = "cdv")
private Long dv;
// valor 0 - emissão de nfe com aplicativo do contribuinte
@Column(name = "procemi")
private int procEmis;
@Column(name = "verproc")
private String verProc;
@DateTimeFormat(pattern = "dd/MM/yyyy")
private Date data_emissao;
@DateTimeFormat(pattern = "dd/MM/yyyy")
private Date data_saida;
@DateTimeFormat(pattern = "hh:mm:ss")
private Time hora_saida;
@DateTimeFormat(pattern = "dd/MM/yyyy")
private Date data_cadastro;
@ManyToOne
@JoinColumn(name = "emissor_codigo")
private Empresa emissor;
@ManyToOne
@JoinColumn(name = "destinatario_codigo")
private Pessoa destinatario;
@ManyToOne
@JoinColumn(name = "frete_tipo_codigo")
private FreteTipo freteTipo;
@ManyToOne
@JoinColumn(name = "finalidade_codigo")
private NotaFiscalFinalidade finalidade;
@OneToOne
@JoinColumn(name = "totais_codigo")
private NotaFiscalTotais totais;
@OneToMany(mappedBy = "notaFiscal")
private List<NotaFiscalItem> itens;
private int tipo_ambiente;
public NotaFiscal() {
super();
}
public NotaFiscal(Long numeroNota, int modelo, NotaFiscalTipo tipoNota, String natureza, int serie, Empresa emissor,
Pessoa destinatario, int tipoEmissao, String verProc, FreteTipo tipoFrete, NotaFiscalFinalidade finalidade,
NotaFiscalTotais totais, int tipo_ambiente, Date cadastro) {
this.numero = numeroNota;
this.modelo = modelo;
this.tipo = tipoNota;
this.natureza_operacao = natureza;
this.serie = serie;
this.emissor = emissor;
this.destinatario = destinatario;
this.tipo_emissao = tipoEmissao;
this.verProc = verProc;
this.freteTipo = tipoFrete;
this.finalidade = finalidade;
this.totais = totais;
this.tipo_ambiente = tipo_ambiente;
this.data_cadastro = cadastro;
}
public Long getCodigo() {
return codigo;
}
public void setCodigo(Long codigo) {
this.codigo = codigo;
}
public Long getNumero() {
return numero;
}
public void setNumero(Long numero) {
this.numero = numero;
}
public int getModelo() {
return modelo;
}
public void setModelo(int modelo) {
this.modelo = modelo;
}
public int getSerie() {
return serie;
}
public void setSerie(int serie) {
this.serie = serie;
}
public NotaFiscalTipo getTipo() {
return tipo;
}
public void setTipo(NotaFiscalTipo tipo) {
this.tipo = tipo;
}
public String getChave_acesso() {
return chave_acesso;
}
public void setChave_acesso(String chave_acesso) {
this.chave_acesso = chave_acesso;
}
public String getNatureza_operacao() {
return natureza_operacao;
}
public void setNatureza_operacao(String natureza_operacao) {
this.natureza_operacao = natureza_operacao;
}
public String getSituacao() {
return situacao;
}
public void setSituacao(String situacao) {
this.situacao = situacao;
}
public int getTipo_emissao() {
return tipo_emissao;
}
public void setTipo_emissao(int tipo_emissao) {
this.tipo_emissao = tipo_emissao;
}
public int getTipo_impressao() {
return tipo_impressao;
}
public void setTipo_impressao(int tipo_impressao) {
this.tipo_impressao = tipo_impressao;
}
public Long getDv() {
return dv;
}
public void setDv(Long dv) {
this.dv = dv;
}
public int getProcEmis() {
return procEmis;
}
public void setProcEmis(int procEmis) {
this.procEmis = procEmis;
}
public String getVerProc() {
return verProc;
}
public void setVerProc(String verProc) {
this.verProc = verProc;
}
public Date getData_emissao() {
return data_emissao;
}
public void setData_emissao(Date data_emissao) {
this.data_emissao = data_emissao;
}
public Date getData_saida() {
return data_saida;
}
public void setData_saida(Date data_saida) {
this.data_saida = data_saida;
}
public Time getHora_saida() {
return hora_saida;
}
public void setHora_saida(Time hora_saida) {
this.hora_saida = hora_saida;
}
public Date getData_cadastro() {
return data_cadastro;
}
public void setData_cadastro(Date data_cadastro) {
this.data_cadastro = data_cadastro;
}
public Empresa getEmissor() {
return emissor;
}
public void setEmissor(Empresa emissor) {
this.emissor = emissor;
}
public Pessoa getDestinatario() {
return destinatario;
}
public void setDestinatario(Pessoa destinatario) {
this.destinatario = destinatario;
}
public FreteTipo getFreteTipo() {
return freteTipo;
}
public void setFreteTipo(FreteTipo freteTipo) {
this.freteTipo = freteTipo;
}
public NotaFiscalFinalidade getFinalidade() {
return finalidade;
}
public void setFinalidade(NotaFiscalFinalidade finalidade) {
this.finalidade = finalidade;
}
public NotaFiscalTotais getTotais() {
return totais;
}
public void setTotais(NotaFiscalTotais totais) {
this.totais = totais;
}
public List<NotaFiscalItem> getItens() {
return itens;
}
public void setItens(List<NotaFiscalItem> itens) {
this.itens = itens;
}
public int getTipo_ambiente() {
return tipo_ambiente;
}
public void setTipo_ambiente(int tipo_ambiente) {
this.tipo_ambiente = tipo_ambiente;
}
}
|
jasonLiu001/invest-service | src/main/java/cn/lands/liuwang/investservice/model/query/QueryListBeforeTime.java | package cn.lands.liuwang.investservice.model.query;
public class QueryListBeforeTime extends QueryListBase {
private String beforeTimeStr;
public String getBeforeTimeStr() {
return beforeTimeStr;
}
public void setBeforeTimeStr(String beforeTimeStr) {
this.beforeTimeStr = beforeTimeStr;
}
}
|
sushovande/quizdrum | controller/commonapi_test.go | <reponame>sushovande/quizdrum
// Copyright 2020 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package controller
import (
"bytes"
"net/http/httptest"
"quizdrum/model"
"quizdrum/view"
"testing"
)
func TestGuestLogin(t *testing.T) {
var p model.Persistence
p.Initialize(":memory:", "oauth_client_fake_id")
var v view.View
v.Initialize()
c := Controller{
P: &p, V: &v,
}
var by []byte
a := bytes.NewReader(by)
req := httptest.NewRequest("POST", "/api/common/guest-login", a)
resp := httptest.NewRecorder()
c.HandleGuestLogin(resp, req)
if r := resp.Body.String(); r != "1" {
t.Errorf("unexpected user id response. want %v, got %v", "1", r)
}
}
|
la-moore/scarlab-icons | react/outline/git-fork.js | <gh_stars>1-10
import * as React from "react"
function SvgComponent(props) {
return <svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="scarlab scarlab-git-fork" {...props}>
<path d="M6 3C7.65685 3 9 4.34315 9 6C9 7.65685 7.65685 9 6 9C4.34315 9 3 7.65685 3 6C3 4.34315 4.34315 3 6 3Z" />
<path d="M18 3C19.6569 3 21 4.34315 21 6C21 7.65685 19.6569 9 18 9C16.3431 9 15 7.65685 15 6C15 4.34315 16.3431 3 18 3Z" />
<path d="M12 15C13.6569 15 15 16.3431 15 18C15 19.6569 13.6569 21 12 21C10.3431 21 9 19.6569 9 18C9 16.3431 10.3431 15 12 15Z" />
<path d="M6.01221 9C6.11299 11.4506 6.87561 12 9.65202 12H14.348C17.1244 12 17.887 11.4506 17.9878 9" />
<path d="M12 15V12" />
</svg>
}
export default SvgComponent
|
faturita/mne-python | mne/include/qt/QtQuick/qquickitemgrabresult.h | <gh_stars>1-10
/****************************************************************************
**
** Copyright (C) 2014 Jolla Ltd, author: <<EMAIL>>
** Contact: http://www.qt.io/licensing/
**
** This file is part of the QtQuick module of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL21$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and The Qt Company. For licensing terms
** and conditions see http://www.qt.io/terms-conditions. For further
** information use the contact form at http://www.qt.io/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 or version 3 as published by the Free
** Software Foundation and appearing in the file LICENSE.LGPLv21 and
** LICENSE.LGPLv3 included in the packaging of this file. Please review the
** following information to ensure the GNU Lesser General Public License
** requirements will be met: https://www.gnu.org/licenses/lgpl.html and
** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** As a special exception, The Qt Company gives you certain additional
** rights. These rights are described in The Qt Company LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** $QT_END_LICENSE$
**
****************************************************************************/
#ifndef QQUICKITEMGRABRESULT_H
#define QQUICKITEMGRABRESULT_H
#include <QtCore/QObject>
#include <QtCore/QSize>
#include <QtCore/QUrl>
#include <QtGui/QImage>
#include <QtQml/QJSValue>
#include <QtQuick/qtquickglobal.h>
QT_BEGIN_NAMESPACE
class QImage;
class QQuickItemGrabResultPrivate;
class Q_QUICK_EXPORT QQuickItemGrabResult : public QObject
{
Q_OBJECT
Q_DECLARE_PRIVATE(QQuickItemGrabResult)
Q_PROPERTY(QImage image READ image CONSTANT)
Q_PROPERTY(QUrl url READ url CONSTANT)
public:
QImage image() const;
QUrl url() const;
Q_INVOKABLE bool saveToFile(const QString &fileName);
protected:
bool event(QEvent *);
Q_SIGNALS:
void ready();
private Q_SLOTS:
void setup();
void render();
private:
friend class QQuickItem;
QQuickItemGrabResult(QObject *parent = Q_NULLPTR);
};
QT_END_NAMESPACE
#endif
|
ver13/ava | pkg/registry/endpoint.go | package registry
type Endpoint struct {
Name string `json:"name"`
Request *Value `json:"request"`
Response *Value `json:"response"`
Metadata map[string]string `json:"metadata"`
}
|
madanagopaltcomcast/pxCore | examples/pxScene2d/external/libnode-v0.12.7/deps/v8/src/conversions.cc | <reponame>madanagopaltcomcast/pxCore
// Copyright 2011 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include <limits.h>
#include <stdarg.h>
#include <cmath>
#include "src/v8.h"
#include "src/assert-scope.h"
#include "src/conversions-inl.h"
#include "src/conversions.h"
#include "src/dtoa.h"
#include "src/factory.h"
#include "src/list-inl.h"
#include "src/strtod.h"
#include "src/utils.h"
#ifndef _STLP_VENDOR_CSTD
// STLPort doesn't import fpclassify into the std namespace.
using std::fpclassify;
#endif
namespace v8 {
namespace internal {
namespace {
// C++-style iterator adaptor for StringCharacterStream
// (unlike C++ iterators the end-marker has different type).
class StringCharacterStreamIterator {
public:
class EndMarker {};
explicit StringCharacterStreamIterator(StringCharacterStream* stream);
uint16_t operator*() const;
void operator++();
bool operator==(EndMarker const&) const { return end_; }
bool operator!=(EndMarker const& m) const { return !end_; }
private:
StringCharacterStream* const stream_;
uint16_t current_;
bool end_;
};
StringCharacterStreamIterator::StringCharacterStreamIterator(
StringCharacterStream* stream) : stream_(stream) {
++(*this);
}
uint16_t StringCharacterStreamIterator::operator*() const {
return current_;
}
void StringCharacterStreamIterator::operator++() {
end_ = !stream_->HasMore();
if (!end_) {
current_ = stream_->GetNext();
}
}
} // End anonymous namespace.
double StringToDouble(UnicodeCache* unicode_cache,
const char* str, int flags, double empty_string_val) {
// We cast to const uint8_t* here to avoid instantiating the
// InternalStringToDouble() template for const char* as well.
const uint8_t* start = reinterpret_cast<const uint8_t*>(str);
const uint8_t* end = start + StrLength(str);
return InternalStringToDouble(unicode_cache, start, end, flags,
empty_string_val);
}
double StringToDouble(UnicodeCache* unicode_cache,
Vector<const uint8_t> str,
int flags,
double empty_string_val) {
// We cast to const uint8_t* here to avoid instantiating the
// InternalStringToDouble() template for const char* as well.
const uint8_t* start = reinterpret_cast<const uint8_t*>(str.start());
const uint8_t* end = start + str.length();
return InternalStringToDouble(unicode_cache, start, end, flags,
empty_string_val);
}
double StringToDouble(UnicodeCache* unicode_cache,
Vector<const uc16> str,
int flags,
double empty_string_val) {
const uc16* end = str.start() + str.length();
return InternalStringToDouble(unicode_cache, str.start(), end, flags,
empty_string_val);
}
// Converts a string into an integer.
double StringToInt(UnicodeCache* unicode_cache,
Vector<const uint8_t> vector,
int radix) {
return InternalStringToInt(
unicode_cache, vector.start(), vector.start() + vector.length(), radix);
}
double StringToInt(UnicodeCache* unicode_cache,
Vector<const uc16> vector,
int radix) {
return InternalStringToInt(
unicode_cache, vector.start(), vector.start() + vector.length(), radix);
}
const char* DoubleToCString(double v, Vector<char> buffer) {
switch (fpclassify(v)) {
case FP_NAN: return "NaN";
case FP_INFINITE: return (v < 0.0 ? "-Infinity" : "Infinity");
case FP_ZERO: return "0";
default: {
SimpleStringBuilder builder(buffer.start(), buffer.length());
int decimal_point;
int sign;
const int kV8DtoaBufferCapacity = kBase10MaximalLength + 1;
char decimal_rep[kV8DtoaBufferCapacity];
int length;
DoubleToAscii(v, DTOA_SHORTEST, 0,
Vector<char>(decimal_rep, kV8DtoaBufferCapacity),
&sign, &length, &decimal_point);
if (sign) builder.AddCharacter('-');
if (length <= decimal_point && decimal_point <= 21) {
// ECMA-262 section 9.8.1 step 6.
builder.AddString(decimal_rep);
builder.AddPadding('0', decimal_point - length);
} else if (0 < decimal_point && decimal_point <= 21) {
// ECMA-262 section 9.8.1 step 7.
builder.AddSubstring(decimal_rep, decimal_point);
builder.AddCharacter('.');
builder.AddString(decimal_rep + decimal_point);
} else if (decimal_point <= 0 && decimal_point > -6) {
// ECMA-262 section 9.8.1 step 8.
builder.AddString("0.");
builder.AddPadding('0', -decimal_point);
builder.AddString(decimal_rep);
} else {
// ECMA-262 section 9.8.1 step 9 and 10 combined.
builder.AddCharacter(decimal_rep[0]);
if (length != 1) {
builder.AddCharacter('.');
builder.AddString(decimal_rep + 1);
}
builder.AddCharacter('e');
builder.AddCharacter((decimal_point >= 0) ? '+' : '-');
int exponent = decimal_point - 1;
if (exponent < 0) exponent = -exponent;
builder.AddDecimalInteger(exponent);
}
return builder.Finalize();
}
}
}
const char* IntToCString(int n, Vector<char> buffer) {
bool negative = false;
if (n < 0) {
// We must not negate the most negative int.
if (n == kMinInt) return DoubleToCString(n, buffer);
negative = true;
n = -n;
}
// Build the string backwards from the least significant digit.
int i = buffer.length();
buffer[--i] = '\0';
do {
buffer[--i] = '0' + (n % 10);
n /= 10;
} while (n);
if (negative) buffer[--i] = '-';
return buffer.start() + i;
}
char* DoubleToFixedCString(double value, int f) {
const int kMaxDigitsBeforePoint = 21;
const double kFirstNonFixed = 1e21;
const int kMaxDigitsAfterPoint = 20;
DCHECK(f >= 0);
DCHECK(f <= kMaxDigitsAfterPoint);
bool negative = false;
double abs_value = value;
if (value < 0) {
abs_value = -value;
negative = true;
}
// If abs_value has more than kMaxDigitsBeforePoint digits before the point
// use the non-fixed conversion routine.
if (abs_value >= kFirstNonFixed) {
char arr[100];
Vector<char> buffer(arr, ARRAY_SIZE(arr));
return StrDup(DoubleToCString(value, buffer));
}
// Find a sufficiently precise decimal representation of n.
int decimal_point;
int sign;
// Add space for the '\0' byte.
const int kDecimalRepCapacity =
kMaxDigitsBeforePoint + kMaxDigitsAfterPoint + 1;
char decimal_rep[kDecimalRepCapacity];
int decimal_rep_length;
DoubleToAscii(value, DTOA_FIXED, f,
Vector<char>(decimal_rep, kDecimalRepCapacity),
&sign, &decimal_rep_length, &decimal_point);
// Create a representation that is padded with zeros if needed.
int zero_prefix_length = 0;
int zero_postfix_length = 0;
if (decimal_point <= 0) {
zero_prefix_length = -decimal_point + 1;
decimal_point = 1;
}
if (zero_prefix_length + decimal_rep_length < decimal_point + f) {
zero_postfix_length = decimal_point + f - decimal_rep_length -
zero_prefix_length;
}
unsigned rep_length =
zero_prefix_length + decimal_rep_length + zero_postfix_length;
SimpleStringBuilder rep_builder(rep_length + 1);
rep_builder.AddPadding('0', zero_prefix_length);
rep_builder.AddString(decimal_rep);
rep_builder.AddPadding('0', zero_postfix_length);
char* rep = rep_builder.Finalize();
// Create the result string by appending a minus and putting in a
// decimal point if needed.
unsigned result_size = decimal_point + f + 2;
SimpleStringBuilder builder(result_size + 1);
if (negative) builder.AddCharacter('-');
builder.AddSubstring(rep, decimal_point);
if (f > 0) {
builder.AddCharacter('.');
builder.AddSubstring(rep + decimal_point, f);
}
DeleteArray(rep);
return builder.Finalize();
}
static char* CreateExponentialRepresentation(char* decimal_rep,
int exponent,
bool negative,
int significant_digits) {
bool negative_exponent = false;
if (exponent < 0) {
negative_exponent = true;
exponent = -exponent;
}
// Leave room in the result for appending a minus, for a period, the
// letter 'e', a minus or a plus depending on the exponent, and a
// three digit exponent.
unsigned result_size = significant_digits + 7;
SimpleStringBuilder builder(result_size + 1);
if (negative) builder.AddCharacter('-');
builder.AddCharacter(decimal_rep[0]);
if (significant_digits != 1) {
builder.AddCharacter('.');
builder.AddString(decimal_rep + 1);
int rep_length = StrLength(decimal_rep);
builder.AddPadding('0', significant_digits - rep_length);
}
builder.AddCharacter('e');
builder.AddCharacter(negative_exponent ? '-' : '+');
builder.AddDecimalInteger(exponent);
return builder.Finalize();
}
char* DoubleToExponentialCString(double value, int f) {
const int kMaxDigitsAfterPoint = 20;
// f might be -1 to signal that f was undefined in JavaScript.
DCHECK(f >= -1 && f <= kMaxDigitsAfterPoint);
bool negative = false;
if (value < 0) {
value = -value;
negative = true;
}
// Find a sufficiently precise decimal representation of n.
int decimal_point;
int sign;
// f corresponds to the digits after the point. There is always one digit
// before the point. The number of requested_digits equals hence f + 1.
// And we have to add one character for the null-terminator.
const int kV8DtoaBufferCapacity = kMaxDigitsAfterPoint + 1 + 1;
// Make sure that the buffer is big enough, even if we fall back to the
// shortest representation (which happens when f equals -1).
DCHECK(kBase10MaximalLength <= kMaxDigitsAfterPoint + 1);
char decimal_rep[kV8DtoaBufferCapacity];
int decimal_rep_length;
if (f == -1) {
DoubleToAscii(value, DTOA_SHORTEST, 0,
Vector<char>(decimal_rep, kV8DtoaBufferCapacity),
&sign, &decimal_rep_length, &decimal_point);
f = decimal_rep_length - 1;
} else {
DoubleToAscii(value, DTOA_PRECISION, f + 1,
Vector<char>(decimal_rep, kV8DtoaBufferCapacity),
&sign, &decimal_rep_length, &decimal_point);
}
DCHECK(decimal_rep_length > 0);
DCHECK(decimal_rep_length <= f + 1);
int exponent = decimal_point - 1;
char* result =
CreateExponentialRepresentation(decimal_rep, exponent, negative, f+1);
return result;
}
char* DoubleToPrecisionCString(double value, int p) {
const int kMinimalDigits = 1;
const int kMaximalDigits = 21;
DCHECK(p >= kMinimalDigits && p <= kMaximalDigits);
USE(kMinimalDigits);
bool negative = false;
if (value < 0) {
value = -value;
negative = true;
}
// Find a sufficiently precise decimal representation of n.
int decimal_point;
int sign;
// Add one for the terminating null character.
const int kV8DtoaBufferCapacity = kMaximalDigits + 1;
char decimal_rep[kV8DtoaBufferCapacity];
int decimal_rep_length;
DoubleToAscii(value, DTOA_PRECISION, p,
Vector<char>(decimal_rep, kV8DtoaBufferCapacity),
&sign, &decimal_rep_length, &decimal_point);
DCHECK(decimal_rep_length <= p);
int exponent = decimal_point - 1;
char* result = NULL;
if (exponent < -6 || exponent >= p) {
result =
CreateExponentialRepresentation(decimal_rep, exponent, negative, p);
} else {
// Use fixed notation.
//
// Leave room in the result for appending a minus, a period and in
// the case where decimal_point is not positive for a zero in
// front of the period.
unsigned result_size = (decimal_point <= 0)
? -decimal_point + p + 3
: p + 2;
SimpleStringBuilder builder(result_size + 1);
if (negative) builder.AddCharacter('-');
if (decimal_point <= 0) {
builder.AddString("0.");
builder.AddPadding('0', -decimal_point);
builder.AddString(decimal_rep);
builder.AddPadding('0', p - decimal_rep_length);
} else {
const int m = Min(decimal_rep_length, decimal_point);
builder.AddSubstring(decimal_rep, m);
builder.AddPadding('0', decimal_point - decimal_rep_length);
if (decimal_point < p) {
builder.AddCharacter('.');
const int extra = negative ? 2 : 1;
if (decimal_rep_length > decimal_point) {
const int len = StrLength(decimal_rep + decimal_point);
const int n = Min(len, p - (builder.position() - extra));
builder.AddSubstring(decimal_rep + decimal_point, n);
}
builder.AddPadding('0', extra + (p - builder.position()));
}
}
result = builder.Finalize();
}
return result;
}
char* DoubleToRadixCString(double value, int radix) {
DCHECK(radix >= 2 && radix <= 36);
// Character array used for conversion.
static const char chars[] = "0123456789abcdefghijklmnopqrstuvwxyz";
// Buffer for the integer part of the result. 1024 chars is enough
// for max integer value in radix 2. We need room for a sign too.
static const int kBufferSize = 1100;
char integer_buffer[kBufferSize];
integer_buffer[kBufferSize - 1] = '\0';
// Buffer for the decimal part of the result. We only generate up
// to kBufferSize - 1 chars for the decimal part.
char decimal_buffer[kBufferSize];
decimal_buffer[kBufferSize - 1] = '\0';
// Make sure the value is positive.
bool is_negative = value < 0.0;
if (is_negative) value = -value;
// Get the integer part and the decimal part.
double integer_part = std::floor(value);
double decimal_part = value - integer_part;
// Convert the integer part starting from the back. Always generate
// at least one digit.
int integer_pos = kBufferSize - 2;
do {
double remainder = std::fmod(integer_part, radix);
integer_buffer[integer_pos--] = chars[static_cast<int>(remainder)];
integer_part -= remainder;
integer_part /= radix;
} while (integer_part >= 1.0);
// Sanity check.
DCHECK(integer_pos > 0);
// Add sign if needed.
if (is_negative) integer_buffer[integer_pos--] = '-';
// Convert the decimal part. Repeatedly multiply by the radix to
// generate the next char. Never generate more than kBufferSize - 1
// chars.
//
// TODO(1093998): We will often generate a full decimal_buffer of
// chars because hitting zero will often not happen. The right
// solution would be to continue until the string representation can
// be read back and yield the original value. To implement this
// efficiently, we probably have to modify dtoa.
int decimal_pos = 0;
while ((decimal_part > 0.0) && (decimal_pos < kBufferSize - 1)) {
decimal_part *= radix;
decimal_buffer[decimal_pos++] =
chars[static_cast<int>(std::floor(decimal_part))];
decimal_part -= std::floor(decimal_part);
}
decimal_buffer[decimal_pos] = '\0';
// Compute the result size.
int integer_part_size = kBufferSize - 2 - integer_pos;
// Make room for zero termination.
unsigned result_size = integer_part_size + decimal_pos;
// If the number has a decimal part, leave room for the period.
if (decimal_pos > 0) result_size++;
// Allocate result and fill in the parts.
SimpleStringBuilder builder(result_size + 1);
builder.AddSubstring(integer_buffer + integer_pos + 1, integer_part_size);
if (decimal_pos > 0) builder.AddCharacter('.');
builder.AddSubstring(decimal_buffer, decimal_pos);
return builder.Finalize();
}
double StringToDouble(UnicodeCache* unicode_cache,
String* string,
int flags,
double empty_string_val) {
DisallowHeapAllocation no_gc;
String::FlatContent flat = string->GetFlatContent();
// ECMA-262 section 15.1.2.3, empty string is NaN
if (flat.IsAscii()) {
return StringToDouble(
unicode_cache, flat.ToOneByteVector(), flags, empty_string_val);
} else {
return StringToDouble(
unicode_cache, flat.ToUC16Vector(), flags, empty_string_val);
}
}
} } // namespace v8::internal
|
mtorromeo/vue-patternfly4 | packages/core/src/components/EmptyState/EmptyStateIcon.js | <reponame>mtorromeo/vue-patternfly4
import styles from '@patternfly/react-styles/css/components/EmptyState/empty-state';
import { h, mergeProps } from 'vue';
import { findChildrenVNodes } from '../../util.ts';
export default {
name: 'PfEmptyStateIcon',
props: {
container: Boolean,
},
render() {
if (!this.container) {
if (!this.$slots.default) {
return [];
}
const children = findChildrenVNodes(this.$slots.default());
return children.map((e, index) => {
e.props = mergeProps({
class: styles.emptyStateIcon,
'aria-hidden': 'true',
}, e.props);
return e;
});
}
return h('div', { class: styles.emptyStateIcon }, this.$slots);
},
};
|
FizzCorp/fizz-platform | components/chat-group/src/main/java/io/fizz/chat/group/application/query/UserGroupDTO.java | package io.fizz.chat.group.application.query;
import io.fizz.chat.group.domain.group.GroupId;
import io.fizz.chat.group.domain.group.GroupMember;
import io.fizz.chataccess.domain.role.RoleName;
import io.fizz.common.domain.UserId;
import java.util.Date;
public class UserGroupDTO {
private final UserId userId;
private final GroupId groupId;
private final GroupMember.State state;
private final RoleName role;
private final Long lastReadMessageId;
private final Date createdOn;
public UserGroupDTO(final UserId aUserId,
final GroupId aGroupId,
final GroupMember.State aState,
final RoleName aRole,
final Long aLastReadMessageId,
final Date aCreatedOn) {
this.userId = aUserId;
this.groupId = aGroupId;
this.state = aState;
this.role = aRole;
this.lastReadMessageId = aLastReadMessageId;
this.createdOn = aCreatedOn;
}
public UserId userId() {
return userId;
}
public GroupId groupId() {
return groupId;
}
public GroupMember.State state() {
return state;
}
public RoleName role() {
return role;
}
public Long lastReadMessageId() {
return lastReadMessageId;
}
public Date createdOn() {
return createdOn;
}
}
|
pmacik/fabric8-test | booster_bdd/features/environment.py | """Module with code to be run before and after certain events during the testing."""
import os
from src.support import helpers
from subprocess import check_output, CalledProcessError, STDOUT
def before_all(_context):
"""Perform the setup before the first event."""
if not helpers.is_user_logged_in():
username = os.getenv("OSIO_USERNAME")
password = os.getenv("<PASSWORD>")
assert username is not None
assert password is not None
assert username != ""
assert password != ""
print("Loggin user {} in...".format(username))
helpers.login_user(username, password)
_context.username = username
_context.password = password
|
givedirectly/Google-Partnership | cypress/integration/unit_tests/update_test.js | import * as ErrorLib from '../../../docs/error.js';
import * as LayerUtil from '../../../docs/layer_util.js';
import * as Run from '../../../docs/run.js';
import {setUpScoreComputationParameters} from '../../../docs/update.js';
import {loadScriptsBeforeForUnitTests} from '../../support/script_loader.js';
describe('Unit test for updates.js', () => {
loadScriptsBeforeForUnitTests('firebase', 'jquery');
before(() => global.google = {maps: {event: {clearListeners: () => {}}}});
// creates the form div and stubs the relevant document methods.
beforeEach(() => {
cy.wrap(cy.stub(Run, 'createAndDisplayJoinedData')).as(
'createAndDisplayJoinedDataStub');
cy.wrap(cy.stub(LayerUtil, 'removeScoreLayer')).as('removeScoreLayerStub');
cy.visit('test_utils/empty.html');
cy.document().then((doc) => {
const formDiv = doc.createElement('div');
formDiv.id = 'form-div';
doc.body.appendChild(formDiv);
cy.stub(document, 'getElementById')
.callsFake((id) => doc.getElementById(id));
});
});
it('does not have a damage asset', () => {
cy.wrap(setUpScoreComputationParameters(
Promise.resolve({damageAssetPath: null}), {}));
cy.get('input').should('have.length', 2);
cy.get('[id="poverty threshold"]').clear().type('0.05');
cy.get('#update').click().then(() => assertDisplayCalledWith(1, 0.3, 0));
cy.get('#error').should('have.text', '');
});
it('does have a damage asset', () => {
setUpDamageAsset();
cy.get('input').should('have.length', 4);
});
it('updates weight labels', () => {
setUpDamageAsset();
cy.get('[id="poverty weight"]').invoke('val', 0.01).trigger('input');
cy.get('#poverty-weight-value').should('have.text', '0.01');
cy.get('#damage-weight-value').should('have.text', '0.99');
});
it('updates toggles', () => {
setUpDamageAsset();
cy.get('[id="poverty weight"]').invoke('val', 0.01).trigger('input');
cy.get('[id="damage threshold"]').invoke('val', 0.24).trigger('input');
cy.get('#update').click().then(
() => assertDisplayCalledWith(0.01, 0.3, 0.24));
cy.get('#error').should('have.text', '');
});
it('updates toggles with errors', () => {
const errorStub =
cy.stub(ErrorLib, 'showError')
.withArgs('poverty threshold must be between 0.00 and 1.00');
setUpDamageAsset();
cy.get('[id="poverty threshold"]').clear().type('-0.01').blur();
cy.get('#update').click();
cy.get('@createAndDisplayJoinedDataStub')
.then(
(createAndDisplayJoinedDataStub) =>
expect(createAndDisplayJoinedDataStub).to.not.be.called);
cy.get('@removeScoreLayerStub')
.then(
(removeScoreLayerStub) =>
expect(removeScoreLayerStub).to.not.be.called);
cy.get('#error')
.should(
'have.text',
'ERROR: poverty threshold must be between 0.00 and 1.00')
.then(() => expect(errorStub).to.be.calledOnce);
cy.get('[id="poverty threshold"]').clear().type('0.0').blur();
cy.get('#update').click().then(
() => assertDisplayCalledWith(0.5, 0.0, 0.5));
cy.get('#error').should('have.text', '');
});
/**
* Checks that stub was called with the expected toggles values.
* @param {number} povertyWeight
* @param {number} povertyThreshold
* @param {number} damageThreshold
*/
function assertDisplayCalledWith(
povertyWeight, povertyThreshold, damageThreshold) {
cy.get('@createAndDisplayJoinedDataStub')
.then((createAndDisplayJoinedDataStub) => {
expect(createAndDisplayJoinedDataStub).to.be.calledOnce;
expect(createAndDisplayJoinedDataStub)
.to.be.calledWith(
{},
Promise.resolve(
{povertyWeight, povertyThreshold, damageThreshold}));
});
cy.get('@removeScoreLayerStub')
.then(
(removeScoreLayerStub) =>
expect(removeScoreLayerStub).to.be.calledOnce);
}
});
/**
* Sets up as if we have a damage asset.
* @return {Cypress.Chainable<Array<number>>}
*/
function setUpDamageAsset() {
return cy.wrap(setUpScoreComputationParameters(
Promise.resolve({damageAssetPath: 'foo'}), {}));
}
|
jamesanto/scala | test/scalacheck/scala/reflect/quasiquotes/ForProps.scala | package scala.reflect.quasiquotes
import org.scalacheck._, Prop._, Gen._, Arbitrary._
import scala.reflect.runtime.universe._, Flag._, internal.reificationSupport._
object ForProps extends QuasiquoteProperties("for") {
case class ForEnums(val value: List[Tree])
def genSimpleBind: Gen[Bind] =
for(name <- genTermName)
yield pq"$name @ _"
def genForFilter: Gen[Tree] =
for(cond <- genIdent(genTermName))
yield fq"if $cond"
def genForFrom: Gen[Tree] =
for(lhs <- genSimpleBind; rhs <- genIdent(genTermName))
yield fq"$lhs <- $rhs"
def genForEq: Gen[Tree] =
for(lhs <- genSimpleBind; rhs <- genIdent(genTermName))
yield fq"$lhs = $rhs"
def genForEnums(size: Int): Gen[ForEnums] =
for(first <- genForFrom; rest <- listOfN(size, oneOf(genForFrom, genForFilter, genForEq)))
yield new ForEnums(first :: rest)
implicit val arbForEnums: Arbitrary[ForEnums] = arbitrarySized(genForEnums)
property("construct-reconstruct for") = forAll { (enums: ForEnums, body: Tree) =>
val SyntacticFor(recoveredEnums, recoveredBody) = SyntacticFor(enums.value, body)
recoveredEnums ≈ enums.value && recoveredBody ≈ body
}
property("construct-reconstruct for-yield") = forAll { (enums: ForEnums, body: Tree) =>
val SyntacticForYield(recoveredEnums, recoveredBody) = SyntacticForYield(enums.value, body)
recoveredEnums ≈ enums.value && recoveredBody ≈ body
}
val abcde = List(fq"a <-b", fq"if c", fq"d = e")
val foobarbaz = pq"foo @ Bar(baz)"
val fv = q"f(v)"
property("construct/deconstruct for loop with fq") = test {
val for0 = q"for(..$abcde) $fv"
assertEqAst(for0, "for(a <- b; if c; d = e) f(v)")
val q"for(..$enums) $body" = for0
assert(enums ≈ abcde)
assert(body ≈ fv)
}
property("construct/deconstruct valfrom with fq") = test {
assert(fq"$foobarbaz <- $fv" ≈ fq"foo @ Bar(baz) <- f(v)")
val fq"$lhs <- $rhs" = fq"$foobarbaz <- $fv"
assert(lhs ≈ foobarbaz)
assert(rhs ≈ fv)
}
property("construct/deconstruct valeq with fq") = test {
assert(fq"$foobarbaz = $fv" ≈ fq"foo @ Bar(baz) = f(v)")
val fq"$lhs = $rhs" = fq"$foobarbaz = $fv"
assert(lhs ≈ foobarbaz)
assert(rhs ≈ fv)
}
property("construct/deconstruct filter with fq") = test {
assert(fq"if $fv" ≈ fq"if f(v)")
val fq"if $cond" = fq"if $fv"
assert(cond ≈ fv)
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.