repo_name stringlengths 6 101 | path stringlengths 4 300 | text stringlengths 7 1.31M |
|---|---|---|
gatarelib/webiny-js | packages/webiny-app-cms/src/editor/plugins/elementSettings/delete/DeleteAction.js | <reponame>gatarelib/webiny-js
// @flow
import * as React from "react";
import { connect } from "webiny-app-cms/editor/redux";
import { compose, withHandlers } from "recompose";
import { getPlugin } from "webiny-plugins";
import { deleteElement } from "webiny-app-cms/editor/actions";
import { getActiveElement } from "webiny-app-cms/editor/selectors";
const DeleteAction = ({ element, children, deleteElement }: Object) => {
const plugin = getPlugin(element.type);
if (!plugin) {
return null;
}
if (typeof plugin.canDelete === "function") {
if (!plugin.canDelete({ element })) {
return null;
}
}
return React.cloneElement(children, { onClick: deleteElement });
};
export default compose(
connect(
state => ({ element: getActiveElement(state) }),
{ deleteElement }
),
withHandlers({
deleteElement: ({ deleteElement, element }) => () => {
deleteElement({ element });
}
})
)(DeleteAction);
|
LiuJiaBaiDing/spring-boot-modules | spring-boot-modules-common/src/main/java/com/bai/ding/common/constant/CommonConstant.java | package com.bai.ding.common.constant;
import com.google.common.collect.Maps;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.util.StringUtils;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.util.Enumeration;
import java.util.Iterator;
import java.util.Map;
import java.util.Properties;
/**
* @author BaiDing
* @date 2020/3/10 15:43
*
* 读取公共的配置文件 键值对
*/
public final class CommonConstant {
private static final Logger log = LoggerFactory.getLogger(CommonConstant.class);
private static String CONSTANT_PROPERTIES = "constant.properties";
private static Map<String, String> CONSTANTS = Maps.newConcurrentMap();
public CommonConstant() {
}
private static String getConstantProperties() {
return CONSTANT_PROPERTIES;
}
public static void loadConstant() {
Properties properties = new Properties();
try {
String constantFileName = getConstantProperties();
loadConstant(properties, constantFileName);
String profile = properties.getProperty("constant.profile.active");
if (profile != null && !"".equals(profile)) {
int dotIdx = constantFileName.lastIndexOf(".");
if (dotIdx > 0) {
constantFileName = constantFileName.substring(0, dotIdx) + "-" + profile + constantFileName.substring(dotIdx);
} else {
constantFileName = constantFileName + "-" + profile;
}
loadConstant(properties, constantFileName);
}
properties.putAll(System.getProperties());
setProperties(properties);
} catch (Exception var4) {
log.error("CommonConstant.loadConstant.exception: ",var4);
}
}
private static void loadConstant(Properties properties, String propsFile) {
try {
log.debug("CommonConstant.loadConstant:constantFileName:{}", propsFile);
Enumeration enumeration = CommonConstant.class.getClassLoader().getResources(propsFile);
while(enumeration.hasMoreElements()) {
URL url = (URL)enumeration.nextElement();
log.debug("CommonConstant.loadConstant:path:{}", url);
properties.putAll(loadProperties(url.openStream()));
}
} catch (Exception var4) {
log.error("CommonConstant.loadConstant.exception: ",var4);
}
}
private static Properties loadProperties(InputStream in) throws IOException {
Properties props = new Properties();
props.load(in);
return props;
}
public static void setProperties(Properties props) {
for (Map.Entry<Object, Object> objectEntry : props.entrySet()) {
CONSTANTS.put((String) objectEntry.getKey(), (String) objectEntry.getValue());
log.debug("CommonConstant.setProperty:" + objectEntry);
}
}
static {
loadConstant();
}
public static String getStringConstant(String key, String defaultVal) {
String val = CONSTANTS.get(key);
return StringUtils.isEmpty(val) ? defaultVal : val;
}
}
|
bovlb/the-blue-alliance | src/backend/common/queries/database_query.py | from __future__ import annotations
import abc
import logging
from typing import Any, Dict, Generator, Generic, List, Optional, Set, Type, Union
from google.appengine.ext import ndb
from pyre_extensions import none_throws
from backend.common.consts.api_version import ApiMajorVersion
from backend.common.futures import TypedFuture
from backend.common.models.cached_query_result import CachedQueryResult
from backend.common.profiler import Span
from backend.common.queries.dict_converters.converter_base import ConverterBase
from backend.common.queries.types import DictQueryReturn, QueryReturn
class DatabaseQuery(abc.ABC, Generic[QueryReturn, DictQueryReturn]):
_query_args: Dict[str, Any]
DICT_CONVERTER: Optional[Type[ConverterBase[QueryReturn, DictQueryReturn]]]
def __init__(self, *args, **kwargs) -> None:
self._query_args = kwargs
@abc.abstractmethod
def _query_async(self) -> TypedFuture[QueryReturn]:
...
@ndb.tasklet
def _do_query(self, *args, **kwargs) -> Generator[Any, Any, QueryReturn]:
# This gives CachedDatabaseQuery a place to hook into
res = yield self._query_async(*args, **kwargs)
return res
@ndb.tasklet
def _do_dict_query(
self, _dict_version: ApiMajorVersion, *args, **kwargs
) -> Generator[Any, Any, Union[None, DictQueryReturn, List[DictQueryReturn]]]:
# This gives CachedDatabaseQuery a place to hook into
res = yield self._query_async(*args, **kwargs)
if self.DICT_CONVERTER is None:
raise Exception(
f"{self.__class__.__name__} does not provide a Dict converter!"
)
# See https://github.com/facebook/pyre-check/issues/267
return self.DICT_CONVERTER(res).convert(_dict_version) # pyre-ignore[45]
def fetch(self) -> QueryReturn:
return self.fetch_async().get_result()
@ndb.tasklet
def fetch_async(self) -> Generator[Any, Any, QueryReturn]:
with Span("{}.fetch_async".format(self.__class__.__name__)):
query_result = yield self._do_query(**self._query_args)
return query_result
def fetch_dict(self, version: ApiMajorVersion) -> DictQueryReturn:
return self.fetch_dict_async(version).get_result()
@ndb.tasklet
def fetch_dict_async(
self, version: ApiMajorVersion
) -> Generator[Any, Any, DictQueryReturn]:
with Span("{}.fetch_dict_async".format(self.__class__.__name__)):
query_result = yield self._do_dict_query(version, **self._query_args)
return query_result
class CachedDatabaseQuery(
DatabaseQuery, Generic[QueryReturn, DictQueryReturn], metaclass=abc.ABCMeta
):
DATABASE_QUERY_VERSION = 4
BASE_CACHE_KEY_FORMAT: str = (
"{}:{}:{}" # (partial_cache_key, cache_version, database_query_version)
)
CACHE_KEY_FORMAT: str = ""
CACHE_VERSION: int = 0
DICT_CACHING_ENABLED: bool = True
MODEL_CACHING_ENABLED: bool = True
CACHE_WRITES_ENABLED: bool = True
_cache_key: Optional[str] = None
def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
@property
def cache_key(self) -> str:
if not self._cache_key:
self._cache_key = self.BASE_CACHE_KEY_FORMAT.format(
self.CACHE_KEY_FORMAT.format(**self._query_args),
self.CACHE_VERSION,
self.DATABASE_QUERY_VERSION,
)
return none_throws(self._cache_key)
def dict_cache_key(self, dict_version: ApiMajorVersion) -> str:
return self._dict_cache_key(self.cache_key, dict_version)
@classmethod
def _dict_cache_key(cls, cache_key: str, dict_version: ApiMajorVersion) -> str:
subvserion = none_throws(cls.DICT_CONVERTER).SUBVERSIONS[dict_version]
return f"{cache_key}~dictv{dict_version}.{subvserion}"
@classmethod
def delete_cache_multi(cls, cache_keys: Set[str]) -> None:
all_cache_keys = []
for cache_key in cache_keys:
all_cache_keys.append(cache_key)
if cls.DICT_CONVERTER is not None:
all_cache_keys += [
cls._dict_cache_key(cache_key, valid_dict_version)
for valid_dict_version in set(ApiMajorVersion)
]
logging.info("Deleting db query cache keys: {}".format(all_cache_keys))
ndb.delete_multi(
[ndb.Key(CachedQueryResult, cache_key) for cache_key in all_cache_keys]
)
@ndb.tasklet
def _do_query(self, *args, **kwargs) -> Generator[Any, Any, QueryReturn]:
if not self.MODEL_CACHING_ENABLED:
result = yield self._query_async(*args, **kwargs)
return result
cache_key = self.cache_key
cached_query_result = yield CachedQueryResult.get_by_id_async(cache_key)
if cached_query_result is None:
query_result = yield self._query_async(*args, **kwargs)
if self.CACHE_WRITES_ENABLED:
yield CachedQueryResult(id=cache_key, result=query_result).put_async()
return query_result
return cached_query_result.result
@ndb.tasklet
def _do_dict_query(
self, _dict_version: ApiMajorVersion, *args, **kwargs
) -> Generator[Any, Any, Union[None, DictQueryReturn, List[DictQueryReturn]]]:
if not self.DICT_CACHING_ENABLED:
result = yield self._query_async(*args, **kwargs)
return result
cache_key = self.dict_cache_key(_dict_version)
cached_query_result = yield CachedQueryResult.get_by_id_async(cache_key)
if cached_query_result is None:
query_result = yield self._query_async(*args, **kwargs)
# See https://github.com/facebook/pyre-check/issues/267
converted_result = none_throws(self.DICT_CONVERTER)( # pyre-ignore[45]
query_result
).convert(_dict_version)
if self.CACHE_WRITES_ENABLED:
yield CachedQueryResult(
id=cache_key, result_dict=converted_result
).put_async()
return converted_result
return cached_query_result.result_dict
|
khartig/assimilator | rio-lib/src/main/java/org/rioproject/jmx/JMXConnectionUtil.java | /*
* Copyright 2008 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.rioproject.jmx;
import com.sun.tools.attach.VirtualMachine;
import org.rioproject.config.Constants;
import org.rioproject.net.HostUtil;
import org.rioproject.rmi.RegistryUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.management.MBeanServer;
import javax.management.MBeanServerConnection;
import javax.management.ObjectName;
import javax.management.remote.*;
import java.io.File;
import java.lang.management.ManagementFactory;
/**
* Provides JMX connection utilities.
*
* @author <NAME>
*/
public class JMXConnectionUtil {
static final Logger logger = LoggerFactory.getLogger(JMXConnectionUtil.class);
/**
* Create a {@link javax.management.remote.JMXConnectorServer}, bound to
* the RMI Registry created by an infrastructure service (Cybernode or
* Monitor).
*
* <p>If <tt>JMXConnectorServer</tt> has been created, return immediately.
* If the <tt>JMXConnectorServer</tt> needs to be created, it will be
* bound to the RMI Registry, and set the
* <tt>org.rioproject.jmxServiceURL</tt> system property.
*
* <p>This utility uses the {@link org.rioproject.rmi.RegistryUtil} class to
* obtain the port to access the RMI Registry.
*
* @throws Exception If there are errors reading the configuration, or
* creating the {@link javax.management.remote.JMXConnectorServer}
*/
public static void createJMXConnection() throws Exception {
if(System.getProperty(Constants.JMX_SERVICE_URL)!=null)
return;
RegistryUtil.checkRegistry();
String sPort = System.getProperty(Constants.REGISTRY_PORT, "0");
int registryPort = Integer.parseInt(sPort);
if(registryPort==0) {
logger.error("RMI Registry property [{}] not found, unable to create MBeanServer", Constants.REGISTRY_PORT);
throw new Exception("Unable to create the JMXConnectorServer");
}
MBeanServer mbs = MBeanServerFactory.getMBeanServer();
String hostAddress = HostUtil.getHostAddressFromProperty(Constants.RMI_HOST_ADDRESS);
JMXServiceURL jmxServiceURL = new JMXServiceURL("service:jmx:rmi://"+hostAddress+":"+registryPort+
"/jndi/rmi://"+hostAddress+":"+registryPort+"/jmxrmi");
System.setProperty(Constants.JMX_SERVICE_URL, jmxServiceURL.toString());
if(logger.isInfoEnabled())
logger.info("JMXServiceURL={}", jmxServiceURL);
JMXConnectorServer jmxConn = JMXConnectorServerFactory.newJMXConnectorServer(jmxServiceURL, null, mbs);
jmxConn.start();
if(logger.isDebugEnabled())
logger.debug("JMX Platform MBeanServer exported with RMI Connector");
}
/**
* Get the agentID of the Platform MBeanServer
*
* @return The agentID of the Platform MBeanServer
*
* @throws Exception if the agent ID cannot be found
*/
public static String getPlatformMBeanServerAgentId() throws Exception {
MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
final String SERVER_DELEGATE = "JMImplementation:type=MBeanServerDelegate";
final String MBEAN_SERVER_ID_KEY = "MBeanServerId";
ObjectName delegateObjName = new ObjectName(SERVER_DELEGATE);
return (String) mbs.getAttribute(delegateObjName, MBEAN_SERVER_ID_KEY );
}
/**
* Using the <a
* href="http://java.sun.com/javase/6/docs/technotes/guides/attach/index.html">
* JMX Attach API </a>, connect to a local Java Virtual Machine.
*
* <p>This utility requires Java 6 or greater.
*
* @param id The identifier used to connect to the Java Virtual Machine
* @return An MBeanServerConnection to the platform MBeanServer of the
* Java Virtual Machine identified, or null if the connection cannot be
* created.
*
* @throws Exception if the following occurs:
* <ul>
* <li>MBeanServerConnection cannot be created</li>
* <li>If the underlying provider either does not exist, or if the provider attempts to attach to a
* Java virtual machine with which it is not compatible.</li>
* <li>If an agent fails to initialize in the target Java virtual machine.</li>
* <li>If an agent cannot be loaded into the target Java virtual machine.</li>
* </ul>
*/
public static MBeanServerConnection attach(final String id) throws Exception {
String jvmVersion = System.getProperty("java.version");
if(jvmVersion.contains("1.5")) {
logger.info("The JMX Attach APIs require Java 6 or above. You are running Java {}", jvmVersion);
return null;
}
VirtualMachine vm = VirtualMachine.attach(id);
String connectorAddr = vm.getAgentProperties().getProperty("com.sun.management.jmxremote.localConnectorAddress");
if (connectorAddr == null) {
String agent = vm.getSystemProperties().getProperty("java.home")+File.separator+"lib"+File.separator+
"management-agent.jar";
vm.loadAgent(agent);
connectorAddr = vm.getAgentProperties().getProperty("com.sun.management.jmxremote.localConnectorAddress");
}
MBeanServerConnection mbs = null;
if(connectorAddr!=null) {
JMXServiceURL serviceURL = new JMXServiceURL(connectorAddr);
JMXConnector connector = JMXConnectorFactory.connect(serviceURL);
mbs = connector.getMBeanServerConnection();
}
return mbs;
}
}
|
rubicon/node-red-contrib-home-assistant-websocket | ui/js/common/nodeversion.js | <filename>ui/js/common/nodeversion.js
/* global ha: false */
// eslint-disable-next-line no-unused-vars
const nodeVersion = (function ($, RED, haMigrations) {
function check(node) {
if (isCurrentVersion(node)) {
return;
}
migrateNode(node);
}
function migrate(node) {
const data = { type: node.type };
const ignoreProperties = [
'x',
'y',
'z',
'd',
'g',
'l',
'id',
'type',
'wires',
'inputs',
'outputs',
];
for (const property in node._config) {
if (
!ignoreProperties.includes(property) &&
Object.prototype.hasOwnProperty.call(node._config, property)
) {
data[property] = node[property];
}
}
const migratedData = haMigrations.migrate(data);
delete migratedData.type;
for (const property in migratedData) {
if (
node._def.defaults[property] &&
node._def.defaults[property].value === undefined
) {
// delete deprecated properties
delete node[property];
} else {
node[property] = migratedData[property];
}
}
node.dirty = true;
node.changed = true;
const $upgradeHaNode = $('#upgrade-ha-node');
if ($upgradeHaNode.is(':visible') && getOldNodeCount() === 0) {
$upgradeHaNode.hide();
}
}
function migrateAllNodes() {
RED.nodes.eachNode((node) => {
if (isHomeAssistantNode(node) && !isCurrentVersion(node)) {
migrate(node);
}
});
RED.nodes.eachConfig((node) => {
if (isHomeAssistantNode(node) && !isCurrentVersion(node)) {
migrate(node);
}
});
RED.nodes.dirty(true);
RED.notify(ha.i18n('home-assistant.ui.migrations.all_nodes_updated'));
RED.view.redraw();
}
function migrateAllNodesConfirm() {
const namespace = 'home-assistant.ui.migrations';
const ok = ha.i18n(`${namespace}.button_ok`);
const cancel = ha.i18n(`${namespace}.button_cancel`);
$('#ha-dialog-confirm').dialog({
resizable: false,
height: 'auto',
width: 400,
modal: true,
buttons: {
[ok]: function () {
$(this).dialog('close');
migrateAllNodes();
},
[cancel]: function () {
$(this).dialog('close');
},
},
});
}
function migrateNode(node) {
migrate(node);
RED.nodes.dirty(true);
RED.events.on('editor:close', function reopen() {
RED.events.off('editor:close', reopen);
RED.editor.edit(node);
});
RED.tray.close();
RED.notify(ha.i18n('home-assistant.ui.migrations.node_schema_updated'));
}
function isCurrentVersion(node) {
return (
node.version !== undefined &&
node.version >= node._def.defaults.version.value
);
}
function isHomeAssistantNode(node) {
return (
node._def.set.module === 'node-red-contrib-home-assistant-websocket'
);
}
function getOldNodeCount() {
let count = 0;
RED.nodes.eachNode((n) => {
if (isHomeAssistantNode(n) && !isCurrentVersion(n)) {
count++;
}
});
RED.nodes.eachConfig((n) => {
if (isHomeAssistantNode(n) && !isCurrentVersion(n)) {
count++;
}
});
return count;
}
return {
check,
isCurrentVersion,
isHomeAssistantNode,
migrateAllNodesConfirm,
getOldNodeCount,
};
// eslint-disable-next-line no-undef
})(jQuery, RED, haMigrations);
|
YuJaeSeok/Leetcode-Practice | src/Leetcode101_200/Leetcode108_ConvertSortedArraytoBinarySearchTree.java | <filename>src/Leetcode101_200/Leetcode108_ConvertSortedArraytoBinarySearchTree.java<gh_stars>1-10
package Leetcode101_200;
/**
* 将有序数组转换为二叉搜索树
* @author DeLL
*
*/
public class Leetcode108_ConvertSortedArraytoBinarySearchTree {
public TreeNode sortedArrayToBST(int[] nums) {
// 左右等分建立左右子树,中间节点作为根节点,递归该过程
return nums == null ? null : buildTree(nums, 0, nums.length - 1);
}
private TreeNode buildTree(int[] nums, int left, int right) {
// 终止条件
if (left > right) return null;
int mid = left + (right - left) / 2;
TreeNode root = new TreeNode(nums[mid]);
root.left = buildTree(nums, left, mid - 1);
root.right = buildTree(nums, mid + 1, right);
return root;
}
}
|
Scottx86-64/dotfiles-1 | source/gomuks-0.2.3/ui/messages/html/list.go | // gomuks - A terminal Matrix client written in Go.
// Copyright (C) 2020 <NAME>
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
//
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
package html
import (
"fmt"
"math"
"strings"
"maunium.net/go/gomuks/ui/widget"
"maunium.net/go/mauview"
)
type ListEntity struct {
*ContainerEntity
Ordered bool
Start int
}
func digits(num int) int {
if num <= 0 {
return 0
}
return int(math.Floor(math.Log10(float64(num))) + 1)
}
func NewListEntity(ordered bool, start int, children []Entity) *ListEntity {
entity := &ListEntity{
ContainerEntity: &ContainerEntity{
BaseEntity: &BaseEntity{
Tag: "ul",
Block: true,
},
Indent: 2,
Children: children,
},
Ordered: ordered,
Start: start,
}
if ordered {
entity.Tag = "ol"
entity.Indent += digits(start + len(children) - 1)
}
return entity
}
func (le *ListEntity) AdjustStyle(fn AdjustStyleFunc) Entity {
le.BaseEntity = le.BaseEntity.AdjustStyle(fn).(*BaseEntity)
return le
}
func (le *ListEntity) Clone() Entity {
return &ListEntity{
ContainerEntity: le.ContainerEntity.Clone().(*ContainerEntity),
Ordered: le.Ordered,
Start: le.Start,
}
}
func (le *ListEntity) Draw(screen mauview.Screen) {
width, _ := screen.Size()
proxyScreen := &mauview.ProxyScreen{Parent: screen, OffsetX: le.Indent, Width: width - le.Indent, Style: le.Style}
for i, entity := range le.Children {
proxyScreen.Height = entity.Height()
if le.Ordered {
number := le.Start + i
line := fmt.Sprintf("%d. %s", number, strings.Repeat(" ", le.Indent-2-digits(number)))
widget.WriteLine(screen, mauview.AlignLeft, line, 0, proxyScreen.OffsetY, le.Indent, le.Style)
} else {
screen.SetContent(0, proxyScreen.OffsetY, '●', nil, le.Style)
}
entity.Draw(proxyScreen)
proxyScreen.SetStyle(le.Style)
proxyScreen.OffsetY += entity.Height()
}
}
func (le *ListEntity) PlainText() string {
if len(le.Children) == 0 {
return ""
}
var buf strings.Builder
for i, child := range le.Children {
indent := strings.Repeat(" ", le.Indent)
if le.Ordered {
number := le.Start + i
_, _ = fmt.Fprintf(&buf, "%d. %s", number, strings.Repeat(" ", le.Indent-2-digits(number)))
} else {
buf.WriteString("● ")
}
for j, row := range strings.Split(child.PlainText(), "\n") {
if j != 0 {
buf.WriteRune('\n')
buf.WriteString(indent)
}
buf.WriteString(row)
}
buf.WriteRune('\n')
}
return strings.TrimSpace(buf.String())
}
func (le *ListEntity) String() string {
return fmt.Sprintf("&html.ListEntity{Ordered=%t, Start=%d, Base=%s},\n", le.Ordered, le.Start, le.BaseEntity)
}
|
mskoenz/arduino_crash_course | program/03_actual_progs/glove_preparation/atmega_src/keyop.cpp | <filename>program/03_actual_progs/glove_preparation/atmega_src/keyop.cpp<gh_stars>0
// Author: <NAME> <<EMAIL>>
// Date: 24.07.2013 18:58:59 CEST
// File: key_operator.cpp
#define COLOR_ON
#define DEBUG_ON
#define ADVANCED_INTERRUPTS
#include <Arduino.h>
#include <ustd.hpp>
#include <diag.hpp>
#include <tool.hpp>
#include <com/eeprom.hpp>
#include <com/uart.hpp>
#include <com/i2c.hpp>
#include <device/bluesmirf_hid.hpp>
#include <device/MMA8452.hpp>
#include "position.hpp"
#include "bridge.hpp"
#include "depreller.hpp"
#include "keyop_protocol.hpp"
char in;
typedef ustd::array<uint8_t, 9> array_type;
class program {
public:
program(): keyboard_(SOFTWARE_SERIAL_RX, SOFTWARE_SERIAL_TX) {
setup();
}
void setup() {
ustd::cout.init(keyop::speed);
//~ com::eeprom.clear();
//~ DEBUG_MSG("start")
com::eeprom & data_;
for(uint8_t i = 0; i < keyop::max_keys; ++i) {
data_[i].calc_mask();
//~ ustd::cout << data_[i] << ustd::endl;
}
com::i2c_begin();
//~ acc_meter_.init();
keyboard_.connect();
delay(50);
}
void update() {
tool::clock.update();
//~ acc_meter_.update();
double max = 0;
uint8_t max_ind = 0;
for(uint8_t i = 0; i < 3; ++i) {
//~ g[i] << acc_meter_[i];
if(g[i] > max) {
max = g[i];
max_ind = 2*i + 1;
} else if(-g[i] > max) {
max = -g[i];
max_ind = 2*i + 2;
}
}
uint8_t curr_gest;
com::i2cout(core::i2c_adress) << core::current_gesture << ustd::endl;
delayMicroseconds(100);
com::i2cin(core::i2c_adress) >> curr_gest;
depreller_.update((max_ind << 5) + (31 & curr_gest));
for(uint8_t i = 0; i < data_.size(); ++i) {
btn_[i].update((depreller_.state() & (data_[i].mask)) == data_[i].trigger
and (depreller_.old_state() & (data_[i].pre_mask)) == data_[i].pre_trigger);
}
//~ DEBUG_VAR(curr_gest)
//~ diag::speed_report();
}
void loop() {
update();
if(com::uart) {
com::uart >> in;
if(in == core::raw_value) {
com::i2cout(core::i2c_adress) << core::raw_value << ustd::endl;
delayMicroseconds(150);
com::i2cin(core::i2c_adress) >> pos_;
com::uart << pos_;
} else if(in == core::read_gesture) {
//~ while(!com::uart.available());
com::uart >> in;
com::i2cout(core::i2c_adress) << core::read_gesture << in << ustd::endl;
delayMicroseconds(300);
com::i2cin(core::i2c_adress) >> gest_;
com::uart << gest_;
} else if(in == core::write_gesture) {
com::uart >> in;
com::uart >> gest_;
com::i2cout(core::i2c_adress) << core::write_gesture << in << gest_ << ustd::endl;
} else if(in == core::read_n_gestures) {
com::i2cout(core::i2c_adress) << core::read_n_gestures << ustd::endl;
com::i2cin(core::i2c_adress) >> in;
com::uart << in;
} else if(in == core::write_n_gestures) {
com::uart >> in;
com::i2cout(core::i2c_adress) << core::write_n_gestures << in << ustd::endl;
} else if(in == core::write_to_eeprom) {
for(uint8_t i = 0; i < keyop::max_keys; ++i) { //bc of the checksum :-/
data_[i].mask = 0;
data_[i].pre_mask = 0;
}
com::eeprom << data_;
for(uint8_t i = 0; i < keyop::max_keys; ++i) //bc of the checksum :-/
data_[i].calc_mask();
com::i2cout(core::i2c_adress) << core::write_to_eeprom << ustd::endl;
} else if(in == core::remove_all_gestures) {
com::i2cout(core::i2c_adress) << core::remove_all_gestures << ustd::endl;
} else if(in == core::current_gesture) {
com::i2cout(core::i2c_adress) << core::current_gesture << ustd::endl;
com::i2cin(core::i2c_adress) >> in;
com::uart << in;
} else if(in == keyop::read_key) {
com::uart >> in;
com::uart << data_[in];
} else if(in == keyop::write_key) {
com::uart >> in;
com::uart >> data_[in];
data_[in].calc_mask();
} else if(in == keyop::read_n_keys) {
in = data_.size();
com::uart << in;
} else if(in == keyop::write_n_keys) {
com::uart >> in;
data_.set_size(in);
}
}
for(uint8_t i = 0; i < data_.size(); ++i) {
if(btn_[i] == state::falling)
keyboard_.press(data_[i].key, data_[i].mod);
else if(btn_[i] == state::rising)
keyboard_.release(data_[i].key, data_[i].mod);
}
}
void receive(int n){}
void request(){}
private:
array_type pos_;
gesture_class gest_;
tool::depreller_class depreller_;
ustd::array<tool::button_class<tool::fake>, keyop::max_keys> btn_;
ustd::static_vector<key_data_struct, keyop::max_keys> data_;
//~ device::MMA8452_class acc_meter_;
ustd::lowpass_filter<double, 40> g[3];
device::bluesmirf_hid_class keyboard_;
};
#include <main.hpp>
|
binodthapachhetry/JustInTimeAdaptiveIntervention | wocketslib/src/main/java/edu/neu/android/wocketslib/audio/record/RecordAmplitudeTask.java | <gh_stars>1-10
/*
* Copyright 2012 <NAME> and <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.neu.android.wocketslib.audio.record;
import java.io.File;
import java.io.IOException;
import android.content.Context;
import android.os.AsyncTask;
import android.util.Log;
/**
* @author <NAME> <<a href="mailto:<EMAIL>"><EMAIL></a>>
*
*/
public class RecordAmplitudeTask extends
AsyncTask<AmplitudeClipListener, Void, Boolean>
{
private static final String TAG = "RecordAmplitudeTask";
private Context context;
private String taskName;
private static final String TEMP_AUDIO_DIR_NAME = "temp_audio";
/**
* time between amplitude checks
*/
private static final int CLIP_TIME = 1000;
public RecordAmplitudeTask(Context context,
String taskName)
{
this.context = context;
this.taskName = taskName;
}
@Override
protected void onPreExecute()
{
// tell UI recording is starting
super.onPreExecute();
}
/**
* note: only uses the first listener passed in
*/
@Override
protected Boolean doInBackground(AmplitudeClipListener... listeners)
{
Log.d(TAG, "recording amplitude");
// // construct recorder, using only the first listener passed in
// AmplitudeClipListener listener = listeners[0];
String appStorageLocation =
context.getExternalFilesDir(TEMP_AUDIO_DIR_NAME).getAbsolutePath()
+ File.separator + "audio.3gp";
MaxAmplitudeRecorder recorder =
new MaxAmplitudeRecorder(CLIP_TIME, appStorageLocation, this);
//set to true if the recorder successfully detected something
//false if it was canceled or otherwise stopped
boolean heard = false;
try
{
// start recording
heard = recorder.startRecording(context);
} catch (IOException io)
{
Log.e(TAG, "failed to record", io);
heard = false;
} catch (IllegalStateException se)
{
Log.e(TAG, "failed to record, recorder not setup properly", se);
heard = false;
} catch (RuntimeException se)
{
Log.e(TAG, "failed to record, recorder already being used", se);
heard = false;
}
return heard;
}
@Override
protected void onPostExecute(Boolean result)
{
// update UI
if (result)
{
Log.e(TAG, "Heard clap at "
+ AudioTaskUtil.getNow());
}
else
{
Log.e(TAG, "heard no claps");
}
super.onPostExecute(result);
}
@Override
protected void onCancelled()
{
Log.d(TAG, "cancelled " + taskName);
super.onCancelled();
}
}
//@Override
//public void heardClip(int maxAmplitude)
//{
// AudioTaskUtil.appendToStartOfLog(log, "heard no claps: " + maxAmplitude);
//if (xyPlot != null)
//{
// xyPlot.addSeries(audio,
// LineAndPointRenderer.class,
// new LineAndPointFormatter(Color.RED, null, null));
// long uptime = SystemClock.uptimeMillis();
// audio.addLast(increment, maxAmplitude);
// xyPlot.redraw();
// Log.d(TAG, "heard: " + maxAmplitude);
// increment++;
////
//// if ((uptime - lastChartRefresh) >= CHART_REFRESH)
//// {
//// long timestamp = (event.timestamp / 1000000) - startTime;
////
//// }
//}
//}
|
kagemeka/atcoder-submissions | jp.atcoder/abc121/abc121_c/9508639.py | import sys
n, m, *ab = map(int, sys.stdin.read().split())
ab = sorted(zip(*[iter(ab)] * 2))
def main():
cost = 0
r = m
for a, b in ab:
if b <= r:
cost += a * b
r -= b
else:
cost += a * r
break
return cost
if __name__ == '__main__':
ans = main()
print(ans)
|
spocino/brutal | sources/kernel/x86_64/interrupts.c | <gh_stars>0
#include <brutal/debug.h>
#include <brutal/sync.h>
#include <brutal/time.h>
#include <brutal/types.h>
#include "kernel/context.h"
#include "kernel/cpu.h"
#include "kernel/global.h"
#include "kernel/interrupts.h"
#include "kernel/sched.h"
#include "kernel/x86_64/apic.h"
#include "kernel/x86_64/asm.h"
#include "kernel/x86_64/cmos.h"
#include "kernel/x86_64/interrupts.h"
#include "kernel/x86_64/pic.h"
#include "kernel/x86_64/simd.h"
#include "kernel/x86_64/smp.h"
#include "kernel/x86_64/syscall.h"
Lock error_lock;
static char *_exception_messages[32] = {
"DivisionByZero",
"Debug",
"NonMaskableInterrupt",
"Breakpoint",
"DetectedOverflow",
"OutOfBounds",
"InvalidOpcode",
"NoCoprocessor",
"DoubleFault",
"CoprocessorSegmentOverrun",
"BadTss",
"SegmentNotPresent",
"StackFault",
"GeneralProtectionFault",
"PageFault",
"UnknownInterrupt",
"CoprocessorFault",
"AlignmentCheck",
"MachineCheck",
"Reserved",
"Reserved",
"Reserved",
"Reserved",
"Reserved",
"Reserved",
"Reserved",
"Reserved",
"Reserved",
"Reserved",
"Reserved",
"Reserved",
"Reserved",
};
static void dump_register(Regs const *regs)
{
log_unlock("RIP: {#016p} | RSP: {#016p}", regs->rip, regs->rsp);
log_unlock("CR2: {#016p} | CR3: {#016p} ", asm_read_cr2(), asm_read_cr3());
log_unlock("CS : {#02p} | SS : {#02p} | RFlags: {#p}", regs->cs, regs->ss, regs->rflags);
log_unlock("");
log_unlock("RAX: {#016p} | RBX: {#016p}", regs->rax, regs->rbx);
log_unlock("RCX: {#016p} | RDX: {#016p}", regs->rcx, regs->rdx);
log_unlock("RSI: {#016p} | RDI: {#016p}", regs->rsi, regs->rdi);
log_unlock("RBP: {#016p} | R8 : {#016p}", regs->rbp, regs->r8);
log_unlock("R9 : {#016p} | R10: {#016p}", regs->r9, regs->r10);
log_unlock("R11: {#016p} | R12: {#016p}", regs->r11, regs->r12);
log_unlock("R13: {#016p} | R14: {#016p}", regs->r13, regs->r14);
log_unlock("R15: {#016p}", regs->r15);
}
static void interrupt_error_handler(Regs *regs, uintptr_t rsp)
{
lock_acquire(&error_lock);
smp_stop_all();
log_unlock("");
log_unlock("------------------------------------------------------------");
log_unlock("");
log_unlock("KERNEL PANIC ON CPU N°{}", cpu_self_id(), regs->rip, regs->rbp, rsp);
log_unlock("");
log_unlock("{}({}) with error_code={}!", _exception_messages[regs->int_no], regs->int_no, regs->error_code);
log_unlock("");
if (task_self() != nullptr)
{
log_unlock("Running task is {}({})", str$(&task_self()->name), task_self()->id);
log_unlock("");
}
dump_register(regs);
log_unlock("------------------------------------------------------------");
log_unlock("");
while (true)
{
asm_cli();
asm_hlt();
}
}
uint64_t interrupt_handler(uint64_t rsp)
{
Regs *regs = (Regs *)rsp;
cpu_begin_interrupt();
if (regs->int_no >= 32 && regs->int_no <= 48)
{
irq_dispatch(regs->int_no - 32);
}
if (regs->int_no < 32)
{
interrupt_error_handler(regs, rsp);
}
else if (regs->int_no == 32)
{
global()->tick++;
if (global()->tick % 500)
{
global()->time = datetime_to_timestamp(cmos_read_rtc());
}
context_save(task_self()->context, regs);
sched_schedule();
sched_switch();
context_load(task_self()->context, regs);
space_switch(task_self()->space);
}
else if (regs->int_no == IPI_RESCHED)
{
context_save(task_self()->context, regs);
sched_switch();
context_load(task_self()->context, regs);
space_switch(task_self()->space);
}
else if (regs->int_no == IPI_STOP)
{
while (true)
{
asm_cli();
asm_hlt();
}
}
else if (regs->int_no == 0xf0)
{
log_unlock("Non maskable interrupt from APIC (Possible hardware error).");
}
cpu_end_interrupt();
lapic_eoi();
return rsp;
}
|
daboyuka/PIQUE | include/pique/indexing/impl/index-builder-impl.hpp | /*
* Copyright 2015 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* index-build-impl.hpp
*
* Created on: Jan 28, 2014
* Author: drew
*/
#ifndef _INDEX_BUILD_IMPL_HPP
#define _INDEX_BUILD_IMPL_HPP
#include <iostream>
#include <cassert>
#include <vector>
#include <boost/unordered_map.hpp>
#include <boost/make_shared.hpp>
//#include "pique/util/zo-iter2.hpp"
#include "pique/util/datatypes.hpp"
#include "pique/data/dataset.hpp"
#include "pique/indexing/binned-index.hpp"
#include "pique/region/region-encoding.hpp"
#include "pique/encoding/index-encoding.hpp"
// Template definitions and specializations
template<typename datatype_t, typename RegionEncoderT, typename BinningSpecificationT>
boost::shared_ptr< BinnedIndex >
IndexBuilder< datatype_t, RegionEncoderT, BinningSpecificationT >::build_index(const Dataset &data) {
return this->build_index(data, GridSubset(data.get_grid()));
}
template<typename datatype_t>
static boost::shared_ptr< BufferedDatasetStream< datatype_t > > open_buffered_dataset_stream(const Dataset &data, GridSubset subset) {
static constexpr const Datatypes::IndexableDatatypeID DTID = Datatypes::CTypeToDatatypeID< datatype_t >::value;
using TypedDatasetStream = DatasetStream< datatype_t >;
using TypedBufferedDatasetStream = BufferedDatasetStream< datatype_t >;
boost::shared_ptr< AbstractDatasetStream > abstract_datastream = data.open_stream(std::move(subset));
assert(abstract_datastream->get_datatype() == DTID);
// Cast it to the datatype of this index build (check validity of cast)
boost::shared_ptr< TypedDatasetStream > datastream = boost::dynamic_pointer_cast< TypedDatasetStream >(abstract_datastream);
assert(datastream);
// Get a buffered dataset stream (use the current one if it's already buffered, otherwise wrap it in a buffer); this is to amortize virtual function call cost
boost::shared_ptr< TypedBufferedDatasetStream > buffered_datastream = boost::dynamic_pointer_cast< TypedBufferedDatasetStream >(datastream);
if (!buffered_datastream)
buffered_datastream = boost::make_shared< BlockBufferingDatasetStream< datatype_t > >(datastream);
return buffered_datastream;
}
template<typename datatype_t, typename RegionEncoderT, typename BinningSpecificationT>
boost::shared_ptr< BinnedIndex >
IndexBuilder< datatype_t, RegionEncoderT, BinningSpecificationT >::build_index(const Dataset &data, GridSubset subset) {
using IndexableDatatypeID = Datatypes::IndexableDatatypeID;
//static constexpr const IndexableDatatypeID DTID = Datatypes::CTypeToDatatypeID< datatype_t >::value;
using bin_id_t = BinnedIndexTypes::bin_id_t;
using bin_count_t = BinnedIndexTypes::bin_count_t;
using bin_size_t = BinnedIndexTypes::bin_size_t;
using region_id_t = BinnedIndexTypes::region_id_t;
using region_count_t = BinnedIndexTypes::region_count_t;
using bin_qkey_t = typename BinningSpecificationType::QKeyType;
using bin_qkey_to_encoder_map_t = boost::unordered_map< bin_qkey_t, RegionEncoderType >; // Because for a flat index, bin ID == region ID
using TypedDatasetStream = DatasetStream< datatype_t >;
using TypedBufferedDatasetStream = BufferedDatasetStream< datatype_t >;
TIME_STATS_TIME_BEGIN(stats.totaltime) // Time everything
// Builder data structures
bin_qkey_to_encoder_map_t encoders_by_qkey;
boost::shared_ptr< BinningSpecificationType > binning_spec_dup = boost::make_shared< BinningSpecificationType >(*this->binning_spec);
const QuantizationType &quant = binning_spec_dup->get_quantization();
const QuantizedKeyCompareType &qcompare = binning_spec_dup->get_quantized_key_compare();
// Open a dataset stream
boost::shared_ptr< TypedBufferedDatasetStream > buffered_datastream = open_buffered_dataset_stream< datatype_t >(data, std::move(subset));
// Extract constants for ease of use
const uint64_t nelem = buffered_datastream->get_element_count();
// Iterate over all runs of bin-equal values in the dataset
uint64_t value_pos = 0;
while (buffered_datastream->has_next()) {
using buffer_iterator_t = typename TypedBufferedDatasetStream::buffer_iterator_t;
buffer_iterator_t data_it, data_end_it;
buffered_datastream->get_buffered_data(data_it, data_end_it);
++stats.num_read_buffer_blocks;
TIME_STATS_TIME_BEGIN(stats.indexingtime)
while (data_it != data_end_it) {
// Retrieve the current value, which will begin a new run
const datatype_t run_val = *data_it;
const uint64_t run_start_pos = value_pos;
// Move past this element in preparation to continue scanning
++data_it;
++value_pos;
// Compute the corresponding bin header for this run value
const bin_qkey_t run_qkey = quant.quantize(run_val);
auto encoder_for_qkey_it = encoders_by_qkey.find(run_qkey);
// If this is a new bin, also allocate a new bin builder
if (encoder_for_qkey_it == encoders_by_qkey.end())
encoder_for_qkey_it = encoders_by_qkey.emplace(std::make_pair(run_qkey, RegionEncoderType(this->encoder_conf, nelem))).first;
// Move it to the first value != run_val (or the end of the array)
while (data_it != data_end_it) {
const datatype_t cur_val = *data_it;
const bin_qkey_t cur_qkey = quant.quantize(cur_val);
if (cur_qkey != run_qkey)
break;
++data_it;
++value_pos;
}
// Compute the run length, then append bits to the run's bin's region encoder
const uint64_t run_length = value_pos - run_start_pos;
encoder_for_qkey_it->second.insert_bits(run_start_pos, run_length);
}
TIME_STATS_TIME_END()
}
stats.iostats += buffered_datastream->get_stats();
stats.num_bins += encoders_by_qkey.size();
std::vector< boost::shared_ptr< RegionEncoding > > sorted_regions;
TIME_STATS_TIME_BEGIN(stats.indexingtime)
// Construct a vector of all qkeys seen
std::vector< bin_qkey_t > sorted_bin_qkeys;
sorted_bin_qkeys.reserve(encoders_by_qkey.size());
for (auto it = encoders_by_qkey.begin(); it != encoders_by_qkey.end(); ++it)
sorted_bin_qkeys.push_back(it->first);
// Sort qkeys to form the final bin list
std::sort(sorted_bin_qkeys.begin(), sorted_bin_qkeys.end(), [&qcompare](const bin_qkey_t &k1, const bin_qkey_t &k2)->bool { return qcompare.compare(k1, k2); });
// Load the sorted qkeys into the binning specification as the final bin list
binning_spec_dup->populate(sorted_bin_qkeys);
// Collect the region encodings in sorted-bin order
for (bin_qkey_t bin_qkey : sorted_bin_qkeys) {
RegionEncoderType &encoder = encoders_by_qkey.find(bin_qkey)->second;
encoder.finalize();
boost::shared_ptr< RegionEncoding > bin_region = encoder.to_region_encoding();
sorted_regions.push_back(bin_region);
}
TIME_STATS_TIME_END()
// Finally, compose the output index to be returned
boost::shared_ptr< BinnedIndex > index_out =
boost::make_shared< BinnedIndex >(
typeid(datatype_t),
nelem,
IndexEncoding::get_equality_encoding_instance(),
RegionEncodingType::TYPE,
binning_spec_dup,
sorted_regions);
return index_out;
TIME_STATS_TIME_END()
}
/*
template<typename datatype_t, typename RegionEncoderT>
boost::shared_ptr< AbstractBinnedIndex >
IndexBuilder<datatype_t, RegionEncoderT>::build_index_zo(const Dataset<datatype_t> &data, int sigbits) {
typedef typename AbstractBinnedIndex::bin_header_t bin_header_t;
typedef BinnedIndexTypes::bin_count_t bin_count_t;
typedef BinnedIndexTypes::bin_id_t bin_id_t;
typedef BinnedIndexTypes::bin_size_t bin_size_t;
typedef boost::unordered_map<bin_header_t, bin_id_t> bin_header_to_id_map_t;
// Extract constants for ease of use
const int insigbits = (sizeof(datatype_t) << 3) - sigbits;
const int ndim = data.get_grid()->get_ndim();
const uint64_t *dims = data.get_grid()->get_grid()->dims;
const size_t nelem = data.get_grid()->get_npts();
// Builder data structures
// The bin header -> bin ID mapping table for use during construction
bin_header_to_id_map_t bin_header_to_bin_id;
// Bin index CBLQ builders
std::vector<RegionEncoderT> bin_encoders;
std::vector<bin_header_t> bin_headers;
// Iterate over all runs of bin-equal values in the dataset
const std::vector<datatype_t> &data_arr = *data.get_data();
bool first_run = true;
bin_header_t run_bin_header = (type_convert<datatype_t, bin_header_t>(data_arr[0]) >> insigbits) + 1; // != first bin header, will trigger the first run
bin_id_t run_bin;
uint64_t run_start_pos;
uint64_t last_zid;
ZOIterLoopBody loop = [&](uint64_t zid, uint64_t rmoid, uint64_t coords[]) {
// Retrieve the current value, which will begin a new run
const datatype_t cur_val = data_arr[rmoid];
// Compute the corresponding bin header for this run value
const bin_header_t cur_bin_header = type_convert<datatype_t, bin_header_t>(cur_val) >> insigbits;
// If we changed bins OR skipped elements due to out-of-bounds...
if (cur_bin_header != run_bin_header || zid != last_zid + 1) {
// Insert the current run as a bit run (unless this is the first
if (!first_run) {
const bin_size_t run_length_smear = zid - run_start_pos;
const bin_size_t run_length_nosmear = last_zid - run_start_pos + 1;
const bin_size_t run_length = (IndexBuilderModeSmearOutOfBounds< RegionEncodingT >() == true) ? run_length_smear : run_length_nosmear;
bin_encoders[run_bin].insert_bits(run_start_pos, run_length);
} else {
first_run = false;
}
// Start a new run
run_bin_header = cur_bin_header;
run_start_pos = zid;
// Find the bin ID for this bin header, allocating a new bin ID
// if this is its first appearance
const bin_id_t next_bin_id = bin_encoders.size();
std::pair<typename bin_header_to_id_map_t::iterator, bool> bin_id_register =
bin_header_to_bin_id.emplace(run_bin_header, next_bin_id);
// If this is a new bin, also allocate a new bin builder
if (bin_id_register.second) {
bin_encoders.emplace_back(this->encoder_conf, nelem); // emplace = push new region encoder object constructed with an encoder conf and the total elements
bin_headers.push_back(run_bin_header);
}
run_bin = bin_id_register.first->second; // emplace_result.iterator->key(bin_id)
}
last_zid = zid;
};
zo_loop_iterate(ndim, dims, dims, true, loop);
// The last run is always unfinished
const bin_size_t last_run_length = last_zid - run_start_pos + 1;
bin_encoders[run_bin].insert_bits(run_start_pos, last_run_length);
// The output index to be returned
boost::shared_ptr< AbstractBinnedIndex > index_out = boost::make_shared< AbstractBinnedIndex >(sigbits, typeid(datatype_t), AbstractBinnedIndex::BinCompositionType::FLAT);
// All data has been partitioned into index bins. The bins are out of order, and are
// also "unfinalized" (need trailing 0's appended).
auto bin_encoder_it = bin_encoders.begin();
auto bin_header_it = bin_headers.begin();
while (bin_encoder_it != bin_encoders.end()) {
// Finalize and retrieve the bin index
bin_encoder_it->finalize();
boost::shared_ptr< RegionEncoding > bin_index = bin_encoder_it->to_region_encoding();
// Append the bin index to the aggregate output index
index_out->append_bin(*bin_header_it, bin_index);
// Increment to the next bin
bin_header_it++;
bin_encoder_it++;
}
index_out->sort_bins();
return index_out;
}
*/
#endif /* _INDEX_BUILD_IMPL_HPP */
|
ronnyacacio/Comercial-RND | web/src/contexts/auth.js | import React, { useState, useEffect, useContext, createContext } from 'react';
import { toast } from 'react-toastify';
import api from '~/services/api';
const AuthContext = createContext({});
export function AuthProvider({ children }) {
const [provider, setProvider] = useState(null);
useEffect(() => {
async function loadStorageData() {
const storageProvider = localStorage.getItem('@MarketFy:provider');
const storageToken = localStorage.getItem('@MarketFy:token');
if (storageProvider && storageToken) {
api.defaults.headers.Authorization = `Bearer ${storageToken}`;
setProvider(JSON.parse(storageProvider));
}
}
loadStorageData();
}, []);
async function signIn(email, password) {
try {
const response = await api.post('providers/sessions', {
email,
password,
});
const { token } = response.data;
setProvider(response.data.provider);
api.defaults.headers.Authorization = `Bearer ${token}`;
localStorage.setItem(
'@MarketFy:provider',
JSON.stringify(response.data.provider)
);
localStorage.setItem('@MarketFy:token', token);
} catch (err) {
toast.error('Falha na autenticação, verifique seus dados');
}
}
function signOut() {
localStorage.clear();
setProvider(null);
}
return (
<AuthContext.Provider
value={{ signed: !!provider, provider, signIn, signOut }}
>
{children}
</AuthContext.Provider>
);
}
export function useAuth() {
const context = useContext(AuthContext);
return context;
}
|
specialattack/DiscoTek | src/main/java/net/specialattack/forge/discotek/client/model/ModelLightMover.java | package net.specialattack.forge.discotek.client.model;
import cpw.mods.fml.relauncher.Side;
import cpw.mods.fml.relauncher.SideOnly;
import net.minecraft.client.model.ModelBase;
import net.minecraft.client.model.ModelRenderer;
@SideOnly(Side.CLIENT)
public class ModelLightMover extends ModelBase {
public ModelRenderer base;
public ModelRenderer lens;
public ModelLightMover() {
this.base = new ModelRenderer(this, 0, 0).setTextureSize(64, 64);
//Four sides. + Back
//Left
this.base.setTextureOffset(0, 0);
this.base.addBox(-4.0F, -3.0F, -3.0F, 1, 6, 5, 0.0F);
//Right
this.base.setTextureOffset(12, 0);
this.base.addBox(3.0F, -3.0F, -3.0F, 1, 6, 5, 0.0F);
//Top
this.base.setTextureOffset(24, 0);
this.base.addBox(-3.0F, -4.0F, -3.0F, 6, 1, 5, 0.0F);
//Bottom
this.base.setTextureOffset(24, 6);
this.base.addBox(-3.0F, 3.0F, -3.0F, 6, 1, 5, 0.0F);
//Back
this.base.setTextureOffset(46, 0);
this.base.addBox(-3.0F, -3.0F, 2.0F, 6, 6, 1);
this.base.setTextureOffset(46, 7);
this.base.addBox(-2.0F, -2.0F, 3.0F, 4, 4, 1);
this.lens = new ModelRenderer(this, 0, 0).setTextureSize(64, 64);
this.lens.setTextureOffset(0, 11);
this.lens.addBox(-3.0F, -3.0F, -3.0F, 6, 6, 1);
}
public void render() {
this.base.renderWithRotation(0.0625F);
}
public void renderLens() {
this.lens.renderWithRotation(0.0625F);
}
public void setRotations(float pitch, float yaw) {
this.base.rotateAngleY = yaw;
this.base.rotateAngleX = pitch;
this.lens.rotateAngleY = yaw;
this.lens.rotateAngleX = pitch;
}
}
|
camertron/antlr4-ruby-runtime | lib/antlr4/runtime/prediction_mode.rb | require 'antlr4/runtime/flexible_hash_map'
require 'antlr4/runtime/bit_set'
module Antlr4::Runtime
class PredictionMode
SLL = 0
LL = 1
LL_EXACT_AMBIG_DETECTION = 2
class AltAndContextMap < FlexibleHashMap
def initialize
super(AltAndContextConfigEqualityComparator.instance)
end
end
class AltAndContextConfigEqualityComparator
include Singleton
def hash(o)
MurmurHash.hash_int_obj(o.state.state_number, o.context)
end
def equals(a, b)
return true if a == b
return false if a.nil? || b.nil?
a.state.state_number == b.state.state_number && a.context.eql?(b.context)
end
end
def self.has_sll_conflict_terminating_prediction(mode, configs)
return true if all_configs_in_rule_stop_states?(configs)
# pure SLL mode parsing
if mode == PredictionMode::SLL
# Don't bother with combining configs from different semantic
# contexts if we can fail over to full LL costs more time
# since we'll often fail over anyway.
if configs.has_semantic_context
# dup configs, tossing out semantic predicates
dup = ATNConfigSet.new
i = 0
while i < configs.configs.length
cfg = configs.configs[i]
c = ATNConfig.new
c.atn_config5(cfg, SemanticContext::NONE)
dup.add(c)
i += 1
end
configs = dup
end
# now we have combined contexts for configs with dissimilar preds
end
# pure SLL or combined SLL+LL mode parsing
alt_sets = conflicting_alt_subsets(configs)
heuristic = has_conflicting_alt_set?(alt_sets) && !has_state_associated_with_one_alt?(configs)
heuristic
end
def self.has_config_in_rule_stop_state?(configs)
i = 0
while i < configs.configs.length
return true if configs.configs[i].state.is_a? RuleStopState
i += 1
end
false
end
def self.all_configs_in_rule_stop_states?(configs)
i = 0
while i < configs.configs.length
return false unless configs.configs[i].state.is_a? RuleStopState
i += 1
end
true
end
def self.resolves_to_just_one_viable_alt?(altsets)
single_viable_alt(altsets)
end
def self.all_subsets_conflict?(altsets)
!has_non_conflicting_alt_set?(altsets)
end
def self.has_non_conflicting_alt_set?(altsets)
i = 0
while i < altsets.length
alts = altsets[i]
return true if alts.cardinality == 1
i += 1
end
false
end
def self.has_conflicting_alt_set?(altsets)
i = 0
while i < altsets.length
alts = altsets[i]
return true if alts.cardinality > 1
i += 1
end
false
end
def all_subsets_equal?(altsets)
first = nil
i = 0
while i < altsets.length
alt = altsets[i]
if i == 0
first = altsets[0]
else
return false unless alt.eql?(first)
end
i += 1
end
true
end
def self.unique_alt(altsets)
all = get_alts1(altsets)
return all.next_set_bit(0) if all.cardinality == 1
ATN::INVALID_ALT_NUMBER
end
def self.get_alts1(altsets)
all = BitSet.new
i = 0
while i < altsets.length
alts = altsets[i]
all.or(alts)
i += 1
end
all
end
def get_alts2(configs)
alts = BitSet.new
i = 0
while i < configs.length
config = configs[i]
alts.set(config.alt)
i += 1
end
alts
end
def self.conflicting_alt_subsets(configs)
config_to_alts = AltAndContextMap.new
i = 0
while i < configs.configs.length
c = configs.configs[i]
alts = config_to_alts.get(c)
if alts.nil?
alts = BitSet.new
config_to_alts.put(c, alts)
end
alts.set(c.alt)
i += 1
end
config_to_alts.values
end
def self.state_to_alt_map(configs)
m = {}
i = 0
while i < configs.configs.length
c = configs.configs[i]
alts = m[c.state]
if alts.nil?
alts = BitSet.new
m[c.state] = alts
end
alts.set(c.alt)
i += 1
end
m
end
def self.has_state_associated_with_one_alt?(configs)
x = state_to_alt_map(configs)
i = 0
while i < x.values.length
alts = x.values[i]
return true if alts.cardinality == 1
i += 1
end
false
end
def self.single_viable_alt(altsets)
viable_alts = BitSet.new
i = 0
while i < altsets.length
alts = altsets[i]
min_alt = alts.next_set_bit(0)
viable_alts.set(min_alt)
return ATN::INVALID_ALT_NUMBER if viable_alts.cardinality > 1 # more than 1 viable alt
i += 1
end
viable_alts.next_set_bit(0)
end
end
end |
UniconLabs/grouper | grouper-ui/java/src/edu/internet2/middleware/grouper/grouperUi/serviceLogic/UiV2GroupImport.java | <reponame>UniconLabs/grouper<gh_stars>0
/*******************************************************************************
* Copyright 2014 Internet2
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package edu.internet2.middleware.grouper.grouperUi.serviceLogic;
import java.io.InputStreamReader;
import java.io.Reader;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.csv.CSVRecord;
import org.apache.commons.fileupload.FileItem;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import edu.internet2.middleware.grouper.Group;
import edu.internet2.middleware.grouper.GroupFinder;
import edu.internet2.middleware.grouper.GrouperSession;
import edu.internet2.middleware.grouper.GrouperSourceAdapter;
import edu.internet2.middleware.grouper.Member;
import edu.internet2.middleware.grouper.SubjectFinder;
import edu.internet2.middleware.grouper.audit.AuditEntry;
import edu.internet2.middleware.grouper.audit.AuditTypeBuiltin;
import edu.internet2.middleware.grouper.exception.GrouperSessionException;
import edu.internet2.middleware.grouper.grouperUi.beans.api.GuiGroup;
import edu.internet2.middleware.grouper.grouperUi.beans.api.GuiSubject;
import edu.internet2.middleware.grouper.grouperUi.beans.json.GuiPaging;
import edu.internet2.middleware.grouper.grouperUi.beans.json.GuiResponseJs;
import edu.internet2.middleware.grouper.grouperUi.beans.json.GuiScreenAction;
import edu.internet2.middleware.grouper.grouperUi.beans.json.GuiScreenAction.GuiMessageType;
import edu.internet2.middleware.grouper.grouperUi.beans.simpleMembershipUpdate.ImportSubjectWrapper;
import edu.internet2.middleware.grouper.grouperUi.beans.ui.GroupContainer;
import edu.internet2.middleware.grouper.grouperUi.beans.ui.GroupImportContainer;
import edu.internet2.middleware.grouper.grouperUi.beans.ui.GroupImportError;
import edu.internet2.middleware.grouper.grouperUi.beans.ui.GroupImportGroupSummary;
import edu.internet2.middleware.grouper.grouperUi.beans.ui.GrouperRequestContainer;
import edu.internet2.middleware.grouper.grouperUi.beans.ui.TextContainer;
import edu.internet2.middleware.grouper.grouperUi.serviceLogic.SimpleMembershipUpdateImportExport.GrouperImportException;
import edu.internet2.middleware.grouper.hibernate.AuditControl;
import edu.internet2.middleware.grouper.hibernate.GrouperTransactionType;
import edu.internet2.middleware.grouper.hibernate.HibernateHandler;
import edu.internet2.middleware.grouper.hibernate.HibernateHandlerBean;
import edu.internet2.middleware.grouper.hibernate.HibernateSession;
import edu.internet2.middleware.grouper.internal.dao.GrouperDAOException;
import edu.internet2.middleware.grouper.internal.dao.QueryOptions;
import edu.internet2.middleware.grouper.internal.util.GrouperUuid;
import edu.internet2.middleware.grouper.j2ee.GrouperRequestWrapper;
import edu.internet2.middleware.grouper.j2ee.GrouperUiRestServlet;
import edu.internet2.middleware.grouper.misc.GrouperSessionHandler;
import edu.internet2.middleware.grouper.privs.AccessPrivilege;
import edu.internet2.middleware.grouper.ui.GrouperUiFilter;
import edu.internet2.middleware.grouper.ui.exceptions.ControllerDone;
import edu.internet2.middleware.grouper.ui.exceptions.NoSessionException;
import edu.internet2.middleware.grouper.ui.tags.GrouperPagingTag2;
import edu.internet2.middleware.grouper.ui.util.GrouperUiConfig;
import edu.internet2.middleware.grouper.ui.util.GrouperUiUserData;
import edu.internet2.middleware.grouper.ui.util.GrouperUiUtils;
import edu.internet2.middleware.grouper.ui.util.ProgressBean;
import edu.internet2.middleware.grouper.userData.GrouperUserDataApi;
import edu.internet2.middleware.grouper.util.GrouperCallable;
import edu.internet2.middleware.grouper.util.GrouperFuture;
import edu.internet2.middleware.grouper.util.GrouperUtil;
import edu.internet2.middleware.grouperClient.collections.MultiKey;
import edu.internet2.middleware.grouperClient.util.ExpirableCache;
import edu.internet2.middleware.subject.Subject;
import edu.internet2.middleware.subject.SubjectNotUniqueException;
import edu.internet2.middleware.subject.SubjectUtils;
/**
* operations in the group screen
* @author mchyzer
*
*/
public class UiV2GroupImport {
/** logger */
protected static final Log LOG = LogFactory.getLog(UiV2GroupImport.class);
/**
* validate import list
* @param request
* @param response
*/
public void groupImportValidateList(HttpServletRequest request, HttpServletResponse response) {
final Subject loggedInSubject = GrouperUiFilter.retrieveSubjectLoggedIn();
GrouperSession grouperSession = null;
try {
grouperSession = GrouperSession.start(loggedInSubject);
GuiResponseJs guiResponseJs = GuiResponseJs.retrieveGuiResponseJs();
String entityList = StringUtils.defaultString(request.getParameter("entityList"));
//split trim by comma, semi, or whitespace
entityList = StringUtils.replace(entityList, ",", " ");
entityList = StringUtils.replace(entityList, ";", " ");
String[] entityIdOrIdentifiers = GrouperUtil.splitTrim(entityList, null, true);
if (GrouperUtil.length(entityIdOrIdentifiers) == 0) {
guiResponseJs.addAction(GuiScreenAction.newValidationMessage(GuiMessageType.error,
"#entityListId",
TextContainer.retrieveFromRequest().getText().get("groupImportNoEntitiesSpecified")));
return;
}
if (GrouperUtil.length(entityIdOrIdentifiers) > 100) {
guiResponseJs.addAction(GuiScreenAction.newMessage(GuiMessageType.error,
TextContainer.retrieveFromRequest().getText().get("groupImportTooManyEntitiesToValidate")));
return;
}
//extra source ids and subjects ids
Set<GuiSubject> extraGuiSubjects = new LinkedHashSet<GuiSubject>();
GrouperRequestContainer.retrieveFromRequestOrCreate().getGroupImportContainer().setGroupImportExtraGuiSubjects(extraGuiSubjects);
String source = request.getParameter("searchEntitySourceName");
List<String> entityIdOrIdentifiersList = new ArrayList<String>(Arrays.asList(GrouperUtil.nonNull(
entityIdOrIdentifiers, String.class)));
Map<String, Subject> entityIdOrIdentifierMap = null;
if (StringUtils.equals("all", source)) {
entityIdOrIdentifierMap = SubjectFinder.findByIdsOrIdentifiers(entityIdOrIdentifiersList);
} else {
entityIdOrIdentifierMap = SubjectFinder.findByIdsOrIdentifiers(entityIdOrIdentifiersList, source);
}
//lets add all the subjects
for (Subject subject : GrouperUtil.nonNull(entityIdOrIdentifierMap).values()) {
extraGuiSubjects.add(new GuiSubject(subject));
}
//lets see which are missing
entityIdOrIdentifiersList.removeAll(GrouperUtil.nonNull(entityIdOrIdentifierMap).keySet());
if (entityIdOrIdentifiersList.size() > 0) {
GrouperRequestContainer.retrieveFromRequestOrCreate().getGroupImportContainer().setEntityIdsNotFound(GrouperUtil.join(entityIdOrIdentifiersList.iterator(), ", "));
guiResponseJs.addAction(GuiScreenAction.newMessage(GuiMessageType.error,
TextContainer.retrieveFromRequest().getText().get("groupImportEntityIdsNotFound")));
}
//clear out combobox
guiResponseJs.addAction(GuiScreenAction.newScript(
"dijit.byId('groupAddMemberComboId').set('displayedValue', ''); " +
"dijit.byId('groupAddMemberComboId').set('value', '');"));
//select the option for enter in list
guiResponseJs.addAction(GuiScreenAction.newFormFieldValue("bulkAddOptions", "input"));
//fill in the extra subjects
guiResponseJs.addAction(GuiScreenAction.newInnerHtmlFromJsp("#groupImportExtraMembersDivId",
"/WEB-INF/grouperUi2/groupImport/groupImportExtraSubjects.jsp"));
} finally {
GrouperSession.stopQuietly(grouperSession);
}
}
/**
* export a group
* @param request
* @param response
*/
public void groupExportSubmit(HttpServletRequest request, HttpServletResponse response) {
final Subject loggedInSubject = GrouperUiFilter.retrieveSubjectLoggedIn();
GrouperSession grouperSession = null;
Group group = null;
try {
grouperSession = GrouperSession.start(loggedInSubject);
List<String> urlStrings = GrouperUiRestServlet.extractUrlStrings(request);
//groupId=721e4e8ae6e54c4087db092f0a6372f7
String groupIdString = urlStrings.get(2);
String groupId = GrouperUtil.prefixOrSuffix(groupIdString, "=", false);
group = GroupFinder.findByUuid(grouperSession, groupId, false);
if (group == null) {
throw new RuntimeException("Cant find group by id: " + groupId);
}
GroupContainer groupContainer = GrouperRequestContainer.retrieveFromRequestOrCreate().getGroupContainer();
GroupImportContainer groupImportContainer = GrouperRequestContainer.retrieveFromRequestOrCreate().getGroupImportContainer();
groupContainer.setGuiGroup(new GuiGroup(group));
if (!groupContainer.isCanRead()) {
throw new RuntimeException("Cant read group: " + group.getName());
}
//ids
String groupExportOptions = urlStrings.get(3);
boolean exportAll = false;
if (StringUtils.equals("all", groupExportOptions)) {
groupImportContainer.setExportAll(true);
exportAll = true;
} else if (StringUtils.equals("ids", groupExportOptions)) {
groupImportContainer.setExportAll(false);
} else {
throw new RuntimeException("Not expecting group-export-options value: '" + groupExportOptions + "'");
}
//groupExportSubjectIds_removeAllMembers.csv
@SuppressWarnings("unused")
String fileName = urlStrings.get(4);
if (exportAll) {
String headersCommaSeparated = GrouperUiConfig.retrieveConfig().propertyValueString(
"uiV2.group.exportAllSubjectFields");
String exportAllSortField = GrouperUiConfig.retrieveConfig().propertyValueString(
"uiV2.group.exportAllSortField");
SimpleMembershipUpdateImportExport.exportGroupAllFieldsToBrowser(group, headersCommaSeparated, exportAllSortField, false);
} else {
SimpleMembershipUpdateImportExport.exportGroupSubjectIdsCsv(group, false);
}
GrouperUserDataApi.recentlyUsedGroupAdd(GrouperUiUserData.grouperUiGroupNameForUserData(),
loggedInSubject, group);
} finally {
GrouperSession.stopQuietly(grouperSession);
}
}
/**
* export group members screen
* @param request
* @param response
*/
public void groupExport(HttpServletRequest request, HttpServletResponse response) {
final Subject loggedInSubject = GrouperUiFilter.retrieveSubjectLoggedIn();
GrouperSession grouperSession = null;
Group group = null;
try {
grouperSession = GrouperSession.start(loggedInSubject);
group = UiV2Group.retrieveGroupHelper(request, AccessPrivilege.READ).getGroup();
if (group == null) {
return;
}
GuiResponseJs guiResponseJs = GuiResponseJs.retrieveGuiResponseJs();
guiResponseJs.addAction(GuiScreenAction.newInnerHtmlFromJsp("#grouperMainContentDivId",
"/WEB-INF/grouperUi2/groupImport/groupExport.jsp"));
} finally {
GrouperSession.stopQuietly(grouperSession);
}
}
/**
* export group members screen change the type of export
* @param request
* @param response
*/
public void groupExportTypeChange(HttpServletRequest request, HttpServletResponse response) {
final Subject loggedInSubject = GrouperUiFilter.retrieveSubjectLoggedIn();
GrouperSession grouperSession = null;
Group group = null;
try {
grouperSession = GrouperSession.start(loggedInSubject);
group = UiV2Group.retrieveGroupHelper(request, AccessPrivilege.READ).getGroup();
if (group == null) {
return;
}
String groupExportOptions = request.getParameter("group-export-options[]");
GroupImportContainer groupImportContainer = GrouperRequestContainer.retrieveFromRequestOrCreate().getGroupImportContainer();
if (StringUtils.equals("all", groupExportOptions)) {
groupImportContainer.setExportAll(true);
} else if (StringUtils.equals("ids", groupExportOptions)) {
groupImportContainer.setExportAll(false);
} else {
throw new RuntimeException("Not expecting group-export-options value: '" + groupExportOptions + "'");
}
GuiResponseJs guiResponseJs = GuiResponseJs.retrieveGuiResponseJs();
guiResponseJs.addAction(GuiScreenAction.newInnerHtmlFromJsp("#formActionsDivId",
"/WEB-INF/grouperUi2/groupImport/groupExportButtons.jsp"));
} finally {
GrouperSession.stopQuietly(grouperSession);
}
}
/**
* setup the extra groups (other than combobox), and maybe move the combobox down
* @param loggedInSubject
* @param request
* @param removeGroupId if removing one
* @param includeCombobox
* @param allGroups, pass in a blank linked hash set, and all groups will be populated including combobox
* @param errorOnNullCombobox true if an error should appear if there is nothing in the combobox
* @return true if ok, false if not
*/
private boolean groupImportSetupExtraGroups(Subject loggedInSubject,
HttpServletRequest request, GuiResponseJs guiResponseJs, boolean considerRemoveGroupId, boolean includeCombobox,
Set<Group> allGroups, boolean errorOnNullCombobox) {
Set<GuiGroup> extraGuiGroups = new LinkedHashSet<GuiGroup>();
GrouperRequestContainer.retrieveFromRequestOrCreate().getGroupImportContainer().setGroupImportExtraGuiGroups(extraGuiGroups);
String removeGroupId = null;
//if removing a group id
if (considerRemoveGroupId) {
removeGroupId = request.getParameter("removeGroupId");
if (StringUtils.isBlank(removeGroupId)) {
throw new RuntimeException("Why would removeGroupId be empty????");
}
}
//if moving combobox down to extra list or getting all groups
String comboValue = request.getParameter("groupImportGroupComboName");
if (StringUtils.isBlank(comboValue)) {
//if didnt pick one from results
comboValue = request.getParameter("groupImportGroupComboNameDisplay");
}
Group theGroup = StringUtils.isBlank(comboValue) ? null : new GroupFinder()
.assignPrivileges(AccessPrivilege.UPDATE_PRIVILEGES)
.assignSubject(loggedInSubject)
.assignFindByUuidOrName(true).assignScope(comboValue).findGroup();
boolean success = true;
if (theGroup == null) {
if (includeCombobox && errorOnNullCombobox) {
guiResponseJs.addAction(GuiScreenAction.newValidationMessage(GuiMessageType.error,
"#groupImportGroupComboErrorId",
TextContainer.retrieveFromRequest().getText().get("groupImportGroupNotFound")));
success = false;
}
} else {
if (includeCombobox) {
extraGuiGroups.add(new GuiGroup(theGroup));
}
//always add to all groups
allGroups.add(theGroup);
}
//loop through all the hidden fields (max 100)
//TODO cant this loop and the above logic be collapsed?
for (int i=0;i<100;i++) {
String extraGroupId = request.getParameter("extraGroupId_" + i);
//we are at the end
if (StringUtils.isBlank(extraGroupId)) {
break;
}
//might be removing this one
if (considerRemoveGroupId && StringUtils.equals(removeGroupId, extraGroupId)) {
continue;
}
theGroup = new GroupFinder()
.assignPrivileges(AccessPrivilege.UPDATE_PRIVILEGES)
.assignSubject(loggedInSubject)
.assignFindByUuidOrName(true).assignScope(extraGroupId).findGroup();
extraGuiGroups.add(new GuiGroup(theGroup));
//always add to all groups
allGroups.add(theGroup);
}
return success;
}
/**
* keep an expirable cache of import progress for 5 hours (longest an import is expected). This has multikey of session id and some random uuid
* uniquely identifies this import as opposed to other imports in other tabs. This cannot have any request objects or j2ee objects
*/
private static ExpirableCache<MultiKey, GroupImportContainer> importThreadProgress = new ExpirableCache<MultiKey, GroupImportContainer>(300);
/**
* submit a group import
* @param request
* @param response
*/
public void groupImportSubmit(HttpServletRequest request, HttpServletResponse response) {
Map<String, Object> debugMap = new LinkedHashMap<String, Object>();
long startNanos = System.nanoTime();
debugMap.put("method", "groupImportSubmit");
try {
final Subject loggedInSubject = GrouperUiFilter.retrieveSubjectLoggedIn();
GrouperRequestContainer grouperRequestContainer = GrouperRequestContainer.retrieveFromRequestOrCreate();
final GroupImportContainer groupImportContainer = grouperRequestContainer.getGroupImportContainer();
String sessionId = request.getSession().getId();
debugMap.put("sessionId", GrouperUtil.abbreviate(sessionId, 8));
// uniquely identifies this import as opposed to other imports in other tabs
String uniqueImportId = GrouperUuid.getUuid();
debugMap.put("uniqueImportId", GrouperUtil.abbreviate(uniqueImportId, 8));
groupImportContainer.setUniqueImportId(uniqueImportId);
MultiKey reportMultiKey = new MultiKey(sessionId, uniqueImportId);
importThreadProgress.put(reportMultiKey, groupImportContainer);
GrouperSession grouperSession = null;
GuiResponseJs guiResponseJs = GuiResponseJs.retrieveGuiResponseJs();
final String bulkAddOption = request.getParameter("bulkAddOptions");
//TODO should this be called "groupsTheUserCanUpdate" ?
final Set<Group> groups = new LinkedHashSet<Group>();
final Set<Subject> subjectSet = new LinkedHashSet<Subject>();
final Map<String, Integer> listInvalidSubjectIdsAndRow = new LinkedHashMap<String, Integer>();
final boolean importReplaceMembers = GrouperUtil.booleanValue(request.getParameter("replaceExistingMembers"), false);
final boolean removeMembers = GrouperUtil.booleanValue(request.getParameter("removeMembers"), false);
final Object[] csvEntriesObject = new Object[1];
final String[] fileName = new String[1];
try {
grouperSession = GrouperSession.start(loggedInSubject);
boolean success = groupImportSetupExtraGroups(loggedInSubject, request, guiResponseJs, false, true, groups, false);
if (!success) {
//error message already shown
return;
}
debugMap.put("groups", groups.size());
if (groups.size() == 0) {
guiResponseJs.addAction(GuiScreenAction.newValidationMessage(GuiMessageType.error,
"#groupImportGroupComboErrorId",
TextContainer.retrieveFromRequest().getText().get("groupImportGroupNotFound")));
return;
}
// can be import, input, list
debugMap.put("bulkAddOption", bulkAddOption);
if (StringUtils.equals(bulkAddOption, "import")) {
GrouperRequestWrapper grouperRequestWrapper = (GrouperRequestWrapper)request;
FileItem importCsvFile = grouperRequestWrapper.getParameterFileItem("importCsvFile");
if (importCsvFile == null) {
guiResponseJs.addAction(GuiScreenAction.newValidationMessage(GuiMessageType.error,
"#importCsvFileId",
TextContainer.retrieveFromRequest().getText().get("groupImportUploadFile")));
return;
}
Reader reader = null;
reader = new InputStreamReader(importCsvFile.getInputStream());
fileName[0] = StringUtils.defaultString(importCsvFile == null ? "" : importCsvFile.getName());
try {
List<CSVRecord> csvEntries = SimpleMembershipUpdateImportExport.parseCsvImportFileToCsv(reader, fileName[0]);
debugMap.put("csvEntries", GrouperUtil.length(csvEntries));
csvEntriesObject[0] = csvEntries;
} catch (GrouperImportException gie) {
if (LOG.isDebugEnabled()) {
LOG.debug("error in import", gie);
}
guiResponseJs.addAction(GuiScreenAction.newValidationMessage(GuiMessageType.error,
"#importCsvFileId", GrouperUtil.xmlEscape(gie.getMessage())));
return;
}
} else if (StringUtils.equals(bulkAddOption, "input")) {
//combobox
success = groupImportSetupExtraSubjects(loggedInSubject, request, guiResponseJs, false, true, subjectSet, false);
if (!success) {
//error message already shown
return;
}
if (subjectSet.size() == 0) {
guiResponseJs.addAction(GuiScreenAction.newValidationMessage(GuiMessageType.error,
"#groupAddMemberComboErrorId",
TextContainer.retrieveFromRequest().getText().get("groupImportSubjectNotFound")));
return;
}
} else if (StringUtils.equals(bulkAddOption, "list")) {
String entityList = StringUtils.defaultString(request.getParameter("entityList"));
//split trim by comma, semi, or whitespace
entityList = StringUtils.replace(entityList, ",", " ");
entityList = StringUtils.replace(entityList, ";", " ");
String[] entityIdOrIdentifiers = GrouperUtil.splitTrim(entityList, null, true);
debugMap.put("entityIdOrIdentifiers", GrouperUtil.length(entityIdOrIdentifiers));
if (GrouperUtil.length(entityIdOrIdentifiers) == 0) {
guiResponseJs.addAction(GuiScreenAction.newValidationMessage(GuiMessageType.error,
"#entityListId",
TextContainer.retrieveFromRequest().getText().get("groupImportNoEntitiesSpecified")));
return;
}
String source = request.getParameter("searchEntitySourceName");
List<String> entityIdOrIdentifiersList = new ArrayList<String>(Arrays.asList(GrouperUtil.nonNull(
entityIdOrIdentifiers, String.class)));
Map<String, Subject> entityIdOrIdentifierMap = null;
if (StringUtils.equals("all", source)) {
entityIdOrIdentifierMap = SubjectFinder.findByIdsOrIdentifiers(entityIdOrIdentifiersList);
} else {
entityIdOrIdentifierMap = SubjectFinder.findByIdsOrIdentifiers(entityIdOrIdentifiersList, source);
}
//lets add all the subjects
subjectSet.addAll(GrouperUtil.nonNull(entityIdOrIdentifierMap).values());
//lets see which are missing
List<String> originalIdList = new ArrayList<String>(entityIdOrIdentifiersList);
//lets see which are missing
entityIdOrIdentifiersList.removeAll(GrouperUtil.nonNull(entityIdOrIdentifierMap).keySet());
//keep trac of the index of the invalid ids
for (String invalidId : entityIdOrIdentifiersList) {
int index = originalIdList.indexOf(invalidId);
listInvalidSubjectIdsAndRow.put(invalidId, index == -1 ? null : index);
}
} else {
throw new RuntimeException("Not expecting bulk add option: " + bulkAddOption);
}
{
Group group = UiV2Group.retrieveGroupHelper(request, AccessPrivilege.UPDATE, false).getGroup();
if (group != null) {
groupImportContainer.setImportFromGroup(true);
groupImportContainer.setGroupId(group.getId());
}
}
{
Subject subject = UiV2Subject.retrieveSubjectHelper(request, false);
if (subject != null) {
groupImportContainer.setImportFromSubject(true);
groupImportContainer.setSubjectId(subject.getId());
groupImportContainer.setSourceId(subject.getSourceId());
}
}
if (importReplaceMembers && removeMembers) {
guiResponseJs.addAction(GuiScreenAction.newValidationMessage(GuiMessageType.error,
"#replaceExistingMembersId",
TextContainer.retrieveFromRequest().getText().get("groupImportCantReplaceAndRemove")));
return;
}
Iterator<Group> groupIterator = groups.iterator();
//TODO first off, why checking VIEW? should it be READ? or just UPDATE?
//TODO second, are groups not checked for UPDATE above in groupImportSetupExtraGroups()? or is it just groups added from gruop screen?
//lets go through the groups that were submitted
while (groupIterator.hasNext()) {
final Group group = groupIterator.next();
{
//remove groups that cannot be viewed
boolean canView = (Boolean)GrouperSession.callbackGrouperSession(
GrouperSession.staticGrouperSession().internal_getRootSession(), new GrouperSessionHandler() {
@Override
public Object callback(GrouperSession grouperSession) throws GrouperSessionException {
return group.canHavePrivilege(loggedInSubject, AccessPrivilege.VIEW.getName(), false);
}
});
if (!canView) {
guiResponseJs.addAction(GuiScreenAction.newMessage(GuiMessageType.error,
TextContainer.retrieveFromRequest().getText().get("groupImportGroupCantView")));
groupIterator.remove();
continue;
}
}
{
//give error if cant update
boolean canUpdate = (Boolean)GrouperSession.callbackGrouperSession(
GrouperSession.staticGrouperSession().internal_getRootSession(), new GrouperSessionHandler() {
@Override
public Object callback(GrouperSession grouperSession) throws GrouperSessionException {
return group.canHavePrivilege(loggedInSubject, AccessPrivilege.UPDATE.getName(), false);
}
});
if (!canUpdate) {
guiResponseJs.addAction(GuiScreenAction.newMessage(GuiMessageType.error,
TextContainer.retrieveFromRequest().getText().get("groupImportGroupCantUpdate")));
continue;
}
}
}
} catch (Exception e) {
throw new RuntimeException("error", e);
} finally {
GrouperSession.stopQuietly(grouperSession);
}
GrouperCallable<Void> grouperCallable = new GrouperCallable<Void>("groupImportMembers") {
@Override
public Void callLogic() {
try {
groupImportContainer.getProgressBean().setStartedMillis(System.currentTimeMillis());
UiV2GroupImport.this.groupImportSubmitHelper(loggedInSubject, groupImportContainer, groups, subjectSet,
listInvalidSubjectIdsAndRow, removeMembers, importReplaceMembers, bulkAddOption, fileName[0], (List<CSVRecord>)csvEntriesObject[0]);
} catch (RuntimeException re) {
groupImportContainer.getProgressBean().setHasException(true);
// log this since the thread will just end and will never get logged
LOG.error("error", re);
} finally {
// we done
groupImportContainer.getProgressBean().setComplete(true);
}
return null;
}
};
// see if running in thread
boolean useThreads = GrouperUiConfig.retrieveConfig().propertyValueBooleanRequired("grouperUi.import.useThread");
debugMap.put("useThreads", useThreads);
if (useThreads) {
GrouperFuture<Void> grouperFuture = GrouperUtil.executorServiceSubmit(GrouperUtil.retrieveExecutorService(), grouperCallable);
Integer waitForCompleteForSeconds = GrouperUiConfig.retrieveConfig().propertyValueInt("grouperUi.import.progressStartsInSeconds");
debugMap.put("waitForCompleteForSeconds", waitForCompleteForSeconds);
GrouperFuture.waitForJob(grouperFuture, waitForCompleteForSeconds);
debugMap.put("threadAlive", !grouperFuture.isDone());
} else {
grouperCallable.callLogic();
}
guiResponseJs.addAction(GuiScreenAction.newInnerHtmlFromJsp("#grouperMainContentDivId",
"/WEB-INF/grouperUi2/groupImport/groupImportReportWrapper.jsp"));
groupImportReportStatusHelper(sessionId, uniqueImportId);
} catch (RuntimeException re) {
debugMap.put("exception", GrouperUtil.getFullStackTrace(re));
throw re;
} finally {
if (LOG.isDebugEnabled()) {
debugMap.put("tookMillis", (System.nanoTime()-startNanos)/1000000);
LOG.debug(GrouperUtil.mapToString(debugMap));
}
}
}
/**
* get the status of a report
* @param request
* @param response
*/
public void groupImportReportStatus(HttpServletRequest request, HttpServletResponse response) {
String sessionId = request.getSession().getId();
String uniqueImportId = request.getParameter("uniqueImportId");
groupImportReportStatusHelper(sessionId, uniqueImportId);
}
/**
* get the status of a report
* @param request
* @param response
*/
private void groupImportReportStatusHelper(String sessionId, String uniqueImportId) {
Map<String, Object> debugMap = new LinkedHashMap<String, Object>();
debugMap.put("method", "groupImportReportStatus");
debugMap.put("sessionId", GrouperUtil.abbreviate(sessionId, 8));
debugMap.put("uniqueImportId", GrouperUtil.abbreviate(uniqueImportId, 8));
long startNanos = System.nanoTime();
try {
GuiResponseJs guiResponseJs = GuiResponseJs.retrieveGuiResponseJs();
MultiKey reportMultiKey = new MultiKey(sessionId, uniqueImportId);
GroupImportContainer groupImportContainer = importThreadProgress.get(reportMultiKey);
GrouperRequestContainer.retrieveFromRequestOrCreate().setGroupImportContainer(groupImportContainer);
//show the report screen
guiResponseJs.addAction(GuiScreenAction.newInnerHtmlFromJsp("#id_"+uniqueImportId,
"/WEB-INF/grouperUi2/groupImport/groupImportReport.jsp"));
// guiResponseJs.addAction(GuiScreenAction.newScript("guiScrollTop()"));
debugMap.put("percentComplete", groupImportContainer.getProgressBean().getPercentComplete());
debugMap.put("progressCompleteRecords", groupImportContainer.getProgressBean().getProgressCompleteRecords());
debugMap.put("progressTotalRecords", groupImportContainer.getProgressBean().getProgressTotalRecords());
if (groupImportContainer != null) {
// endless loop?
if (groupImportContainer.getProgressBean().isThisLastStatus()) {
return;
}
if (groupImportContainer.getProgressBean().isHasException()) {
guiResponseJs.addAction(GuiScreenAction.newMessage(GuiMessageType.error,
TextContainer.retrieveFromRequest().getText().get("groupImportException")));
// it has an exception, leave it be
importThreadProgress.put(reportMultiKey, null);
return;
}
// kick it off again?
debugMap.put("complete", groupImportContainer.getProgressBean().isComplete());
if (!groupImportContainer.getProgressBean().isComplete()) {
int progressRefreshSeconds = GrouperUiConfig.retrieveConfig().propertyValueInt("grouperUi.import.progressRefreshSeconds");
progressRefreshSeconds = Math.max(progressRefreshSeconds, 1);
progressRefreshSeconds *= 1000;
guiResponseJs.addAction(GuiScreenAction.newScript("setTimeout(function() {ajax('../app/UiV2GroupImport.groupImportReportStatus?uniqueImportId=" + uniqueImportId + "')}, " + progressRefreshSeconds + ")"));
} else {
// it is complete, leave it be
importThreadProgress.put(reportMultiKey, null);
}
}
} catch (RuntimeException re) {
debugMap.put("exception", GrouperUtil.getFullStackTrace(re));
throw re;
} finally {
if (LOG.isDebugEnabled()) {
debugMap.put("tookMillis", (System.nanoTime()-startNanos)/1000000);
LOG.debug(GrouperUtil.mapToString(debugMap));
}
}
}
/**
* method to do logic for import submit (note, dont use anything related to session here)
* @param loggedInSubject
* @param groupImportContainer
* @param groups
* @param subjectSet
* @param listInvalidSubjectIdsAndRow
* @param removeMembers
* @param importReplaceMembers
* @param bulkAddOption
* @param fileName
*/
private void groupImportSubmitHelper(final Subject loggedInSubject, final GroupImportContainer groupImportContainer,
final Set<Group> groups, final Set<Subject> subjectSet, Map<String, Integer> listInvalidSubjectIdsAndRow,
boolean removeMembers, boolean importReplaceMembers, String bulkAddOption, String fileName, List<CSVRecord> csvEntries) {
Map<String, Object> debugMap = new LinkedHashMap<String, Object>();
debugMap.put("method", "groupImportSubmit");
GrouperSession grouperSession = null;
int pauseBetweenRecordsMillis = GrouperUiConfig.retrieveConfig().propertyValueIntRequired("grouperUi.import.pauseInBetweenRecordsMillis");
try {
grouperSession = GrouperSession.start(loggedInSubject);
ProgressBean progressBean = groupImportContainer.getProgressBean();
if (GrouperUtil.length(subjectSet) == 0 && csvEntries != null) {
subjectSet.addAll(SimpleMembershipUpdateImportExport.parseCsvImportFile(csvEntries, new ArrayList<String>(),
listInvalidSubjectIdsAndRow, true));
}
Iterator<Group> groupIterator = groups.iterator();
Set<GuiGroup> guiGroups = new LinkedHashSet<GuiGroup>();
groupImportContainer.setGuiGroups(guiGroups);
progressBean.setProgressTotalRecords(GrouperUtil.length(groups) * GrouperUtil.length(subjectSet));
//lets go through the groups that were submitted
while (groupIterator.hasNext()) {
final Group group = groupIterator.next();
guiGroups.add(new GuiGroup(group));
GroupImportGroupSummary groupImportGroupSummary = new GroupImportGroupSummary();
groupImportContainer.getGroupImportGroupSummaryForGroupMap().put(group, groupImportGroupSummary);
List<Member> existingMembers = new ArrayList<Member>(GrouperUtil.nonNull(group.getImmediateMembers()));
List<Subject> subjectList = new ArrayList<Subject>(GrouperUtil.nonNull(subjectSet));
groupImportGroupSummary.setGroupCountOriginal(GrouperUtil.length(existingMembers));
List<Member> overlappingMembers = new ArrayList<Member>(GrouperUtil.nonNull(GrouperUiUtils.removeOverlappingSubjects(existingMembers, subjectList)));
// figure out subject not founds
if (listInvalidSubjectIdsAndRow.size() > 0) {
for (String subjectLabel : listInvalidSubjectIdsAndRow.keySet()) {
int rowNumber = listInvalidSubjectIdsAndRow.get(subjectLabel);
GroupImportError groupImportError = new GroupImportError(subjectLabel, TextContainer.retrieveFromRequest().getText().get(
"groupImportProblemFindingSubjectError"), rowNumber);
groupImportGroupSummary.getGroupImportErrors().add(groupImportError);
groupImportGroupSummary.groupCountErrorsIncrement();
}
}
if (!removeMembers) {
progressBean.addProgressCompleteRecords(GrouperUtil.length(subjectSet) - GrouperUtil.length(subjectList));
//first lets add some members
for (int i=0;i<subjectList.size();i++) {
Subject subject = subjectList.get(i);
boolean hasError = false;
if (subject instanceof ImportSubjectWrapper) {
try {
subject = ((ImportSubjectWrapper)subject).wrappedSubject();
} catch (Exception e) {
int rowNumber = ((ImportSubjectWrapper)subject).getRow();
String label = ImportSubjectWrapper.errorLabelForRowStatic(rowNumber, ((ImportSubjectWrapper)subject).getRowData());
GroupImportError groupImportError = new GroupImportError(label, TextContainer.retrieveFromRequest().getText().get(
"groupImportProblemFindingSubjectError"), rowNumber);
groupImportGroupSummary.getGroupImportErrors().add(groupImportError);
groupImportGroupSummary.groupCountErrorsIncrement();
hasError = true;
}
}
try {
// try this even if we have an error
group.addMember(subject, false);
GrouperUtil.sleep(pauseBetweenRecordsMillis);
groupImportGroupSummary.groupCountAddedIncrement();
} catch (Exception e) {
if (!hasError) {
// if not already logged
String subjectString = SubjectUtils.subjectToString(subject);
GroupImportError groupImportError = new GroupImportError(subjectString, GrouperUtil.xmlEscape(e.getMessage()));
groupImportGroupSummary.getGroupImportErrors().add(groupImportError);
groupImportGroupSummary.groupCountErrorsIncrement();
LOG.warn("error with " + subjectString, e);
}
}
progressBean.addProgressCompleteRecords(1);
}
} else {
progressBean.addProgressCompleteRecords(GrouperUtil.length(subjectSet) - GrouperUtil.length(overlappingMembers));
//first lets remove some members
for (int i=0;i<overlappingMembers.size();i++) {
Member member = overlappingMembers.get(i);
try {
group.deleteMember(member, false);
GrouperUtil.sleep(pauseBetweenRecordsMillis);
groupImportGroupSummary.groupCountDeletedIncrement();
} catch (Exception e) {
String subjectString = SubjectUtils.subjectToString(member.getSubject());
GroupImportError groupImportError = new GroupImportError(subjectString, GrouperUtil.xmlEscape(e.getMessage()));
groupImportGroupSummary.getGroupImportErrors().add(groupImportError);
groupImportGroupSummary.groupCountErrorsIncrement();
LOG.warn("error with " + subjectString, e);
}
progressBean.addProgressCompleteRecords(1);
}
}
boolean didntImportDueToSubjects = groupImportGroupSummary.getGroupCountErrors() > 0;
//remove the ones which are already there
if (importReplaceMembers && !didntImportDueToSubjects && !removeMembers) {
progressBean.addProgressCompleteRecords(GrouperUtil.length(subjectSet) - GrouperUtil.length(existingMembers));
for (Member existingMember : existingMembers) {
try {
group.deleteMember(existingMember, false);
GrouperUtil.sleep(pauseBetweenRecordsMillis);
groupImportGroupSummary.groupCountDeletedIncrement();
} catch (Exception e) {
String subjectString = SubjectUtils.subjectToString(existingMember.getSubject());
GroupImportError groupImportError = new GroupImportError(subjectString, GrouperUtil.xmlEscape(e.getMessage()));
groupImportGroupSummary.getGroupImportErrors().add(groupImportError);
groupImportGroupSummary.groupCountErrorsIncrement();
LOG.warn("error with " + subjectString, e);
}
progressBean.addProgressCompleteRecords(1);
}
}
//this might be a little wasteful, but I think it is a good sanity check
int newSize = group.getImmediateMembers().size();
groupImportGroupSummary.setGroupCountNew(newSize);
try {
GrouperUserDataApi.recentlyUsedGroupAdd(GrouperUiUserData.grouperUiGroupNameForUserData(),
loggedInSubject, group);
} catch (Exception e) {
LOG.warn("Cant add recently used group: " + group.getName() + ", for subject: " + SubjectUtils.subjectToString(loggedInSubject) + ", maybe a priv was lost after import started???", e);
}
if (StringUtils.equals(bulkAddOption, "import")) {
auditImport(group.getUuid(), group.getName(), fileName, groupImportGroupSummary.getGroupCountAdded(), groupImportGroupSummary.getGroupCountDeleted());
}
groupImportGroupSummary.setComplete(true);
}
// done
progressBean.setProgressCompleteRecords(progressBean.getProgressTotalRecords());
} catch (NoSessionException se) {
throw se;
} catch (ControllerDone cd) {
throw cd;
} finally {
GrouperSession.stopQuietly(grouperSession);
}
}
private void auditImport(final String groupId, final String groupName, final String fileName,
final int countAdded, final int countDeleted) {
HibernateSession.callbackHibernateSession(
GrouperTransactionType.READ_WRITE_OR_USE_EXISTING, AuditControl.WILL_AUDIT,
new HibernateHandler() {
public Object callback(HibernateHandlerBean hibernateHandlerBean)
throws GrouperDAOException {
AuditEntry auditEntry = new AuditEntry(AuditTypeBuiltin.MEMBERSHIP_GROUP_IMPORT, "file", fileName, "totalAdded",
String.valueOf(countAdded), "groupId", groupId, "groupName", groupName, "totalDeleted", String.valueOf(countDeleted));
String description = "Added : " + countAdded + " subjects "
+ " and deleted "+countDeleted + " subjects in group ."+groupName;
auditEntry.setDescription(description);
auditEntry.saveOrUpdate(true);
return null;
}
});
}
/**
* modal search form results for add group to import
* @param request
* @param response
*/
public void groupImportGroupSearch(HttpServletRequest request, HttpServletResponse response) {
GrouperRequestContainer grouperRequestContainer = GrouperRequestContainer.retrieveFromRequestOrCreate();
GroupContainer groupContainer = grouperRequestContainer.getGroupContainer();
final Subject loggedInSubject = GrouperUiFilter.retrieveSubjectLoggedIn();
GuiResponseJs guiResponseJs = GuiResponseJs.retrieveGuiResponseJs();
GrouperSession grouperSession = null;
try {
grouperSession = GrouperSession.start(loggedInSubject);
String searchString = request.getParameter("addGroupSearch");
boolean searchOk = GrouperUiUtils.searchStringValid(searchString);
if (!searchOk) {
guiResponseJs.addAction(GuiScreenAction.newInnerHtml("#addGroupResults",
TextContainer.retrieveFromRequest().getText().get("groupImportAddToGroupNotEnoughChars")));
return;
}
String matchExactIdString = request.getParameter("matchExactId[]");
boolean matchExactId = GrouperUtil.booleanValue(matchExactIdString, false);
GuiPaging guiPaging = groupContainer.getGuiPaging();
QueryOptions queryOptions = new QueryOptions();
GrouperPagingTag2.processRequest(request, guiPaging, queryOptions);
Set<Group> groups = null;
GroupFinder groupFinder = new GroupFinder().assignPrivileges(AccessPrivilege.UPDATE_PRIVILEGES)
.assignScope(searchString).assignSplitScope(true).assignQueryOptions(queryOptions);
if (matchExactId) {
groupFinder.assignFindByUuidOrName(true);
}
groups = groupFinder.findGroups();
guiPaging.setTotalRecordCount(queryOptions.getQueryPaging().getTotalRecordCount());
if (GrouperUtil.length(groups) == 0) {
guiResponseJs.addAction(GuiScreenAction.newInnerHtml("#addGroupResults",
TextContainer.retrieveFromRequest().getText().get("groupImportAddGroupNotFound")));
return;
}
Set<GuiGroup> guiGroups = GuiGroup.convertFromGroups(groups);
groupContainer.setGuiGroups(guiGroups);
guiResponseJs.addAction(GuiScreenAction.newInnerHtmlFromJsp("#addGroupResults",
"/WEB-INF/grouperUi2/groupImport/groupImportAddGroupResults.jsp"));
} finally {
GrouperSession.stopQuietly(grouperSession);
}
}
/**
* import group members screen remove group from list
* @param request
* @param response
*/
public void groupImportRemoveGroup(HttpServletRequest request, HttpServletResponse response) {
final Subject loggedInSubject = GrouperUiFilter.retrieveSubjectLoggedIn();
GrouperSession grouperSession = null;
try {
grouperSession = GrouperSession.start(loggedInSubject);
GuiResponseJs guiResponseJs = GuiResponseJs.retrieveGuiResponseJs();
groupImportSetupExtraGroups(loggedInSubject, request, guiResponseJs, true, false, new LinkedHashSet<Group>(), false);
guiResponseJs.addAction(GuiScreenAction.newInnerHtmlFromJsp("#groupImportExtraGroupsDivId",
"/WEB-INF/grouperUi2/groupImport/groupImportExtraGroups.jsp"));
} finally {
GrouperSession.stopQuietly(grouperSession);
}
}
/**
* import group members screen remove subject from list
* @param request
* @param response
*/
public void groupImportRemoveSubject(HttpServletRequest request, HttpServletResponse response) {
final Subject loggedInSubject = GrouperUiFilter.retrieveSubjectLoggedIn();
GrouperSession grouperSession = null;
try {
grouperSession = GrouperSession.start(loggedInSubject);
GuiResponseJs guiResponseJs = GuiResponseJs.retrieveGuiResponseJs();
groupImportSetupExtraSubjects(loggedInSubject, request, guiResponseJs, true, false, new LinkedHashSet<Subject>(), false);
guiResponseJs.addAction(GuiScreenAction.newInnerHtmlFromJsp("#groupImportExtraMembersDivId",
"/WEB-INF/grouperUi2/groupImport/groupImportExtraSubjects.jsp"));
} finally {
GrouperSession.stopQuietly(grouperSession);
}
}
/**
* import group members screen add group to list
* @param request
* @param response
*/
public void groupImportAddGroup(HttpServletRequest request, HttpServletResponse response) {
final Subject loggedInSubject = GrouperUiFilter.retrieveSubjectLoggedIn();
GrouperSession grouperSession = null;
try {
grouperSession = GrouperSession.start(loggedInSubject);
GuiResponseJs guiResponseJs = GuiResponseJs.retrieveGuiResponseJs();
groupImportSetupExtraGroups(loggedInSubject, request, guiResponseJs, false, true, new LinkedHashSet<Group>(), true);
//clear out combobox
guiResponseJs.addAction(GuiScreenAction.newScript(
"dijit.byId('groupImportGroupComboId').set('displayedValue', ''); " +
"dijit.byId('groupImportGroupComboId').set('value', '');"));
guiResponseJs.addAction(GuiScreenAction.newInnerHtmlFromJsp("#groupImportExtraGroupsDivId",
"/WEB-INF/grouperUi2/groupImport/groupImportExtraGroups.jsp"));
} finally {
GrouperSession.stopQuietly(grouperSession);
}
}
/**
* import group members screen
* @param request
* @param response
*/
public void groupImport(HttpServletRequest request, HttpServletResponse response) {
final Subject loggedInSubject = GrouperUiFilter.retrieveSubjectLoggedIn();
GrouperSession grouperSession = null;
try {
grouperSession = GrouperSession.start(loggedInSubject);
GrouperRequestContainer grouperRequestContainer = new GrouperRequestContainer();
GroupImportContainer groupImportContainer = grouperRequestContainer.getGroupImportContainer();
String backTo = request.getParameter("backTo");
{
//this will also put the group in the group container so it can populate the combobox
Group group = UiV2Group.retrieveGroupHelper(request, AccessPrivilege.UPDATE, false).getGroup();
if (group != null && StringUtils.equals("group", backTo)) {
groupImportContainer.setImportFromGroup(true);
}
}
{
Subject subject = UiV2Subject.retrieveSubjectHelper(request, false);
if (subject != null && StringUtils.equals("subject", backTo)) {
groupImportContainer.setImportFromSubject(true);
}
}
GuiResponseJs guiResponseJs = GuiResponseJs.retrieveGuiResponseJs();
guiResponseJs.addAction(GuiScreenAction.newInnerHtmlFromJsp("#grouperMainContentDivId",
"/WEB-INF/grouperUi2/groupImport/groupImport.jsp"));
} finally {
GrouperSession.stopQuietly(grouperSession);
}
}
/**
* import group members screen add member to list
* @param request
* @param response
*/
public void groupImportAddMember(HttpServletRequest request, HttpServletResponse response) {
final Subject loggedInSubject = GrouperUiFilter.retrieveSubjectLoggedIn();
GrouperSession grouperSession = null;
try {
grouperSession = GrouperSession.start(loggedInSubject);
GuiResponseJs guiResponseJs = GuiResponseJs.retrieveGuiResponseJs();
groupImportSetupExtraSubjects(loggedInSubject, request, guiResponseJs, false, true, new LinkedHashSet<Subject>(), true);
//clear out combobox
guiResponseJs.addAction(GuiScreenAction.newScript(
"dijit.byId('groupAddMemberComboId').set('displayedValue', ''); " +
"dijit.byId('groupAddMemberComboId').set('value', '');"));
guiResponseJs.addAction(GuiScreenAction.newInnerHtmlFromJsp("#groupImportExtraMembersDivId",
"/WEB-INF/grouperUi2/groupImport/groupImportExtraSubjects.jsp"));
} finally {
GrouperSession.stopQuietly(grouperSession);
}
}
/**
* setup the extra members (other than combobox), and maybe move the combobox down
* @param loggedInSubject
* @param request
* @param considerRemoveSubjectSourceAndId if removing one
* @param includeCombobox
* @param allSubjects is a LinkedHashSet of subjects
* @param errorOnNullCombobox is true if an error should appear if there is nothing in the combobox
* @return true if ok, false if not
*/
private boolean groupImportSetupExtraSubjects(Subject loggedInSubject,
HttpServletRequest request, GuiResponseJs guiResponseJs, boolean considerRemoveSubjectSourceAndId,
boolean includeCombobox, Set<Subject> allSubjects, boolean errorOnNullCombobox) {
//extra source ids and subjects ids
Set<MultiKey> extraSubjectSourceAndIds = new HashSet<MultiKey>();
Set<GuiSubject> extraGuiSubjects = new LinkedHashSet<GuiSubject>();
GrouperRequestContainer.retrieveFromRequestOrCreate().getGroupImportContainer().setGroupImportExtraGuiSubjects(extraGuiSubjects);
Set<MultiKey> allSubjectsSourceAndIds = new HashSet<MultiKey>();
String removeSubjectSourceAndId = null;
//if removing a group id
if (considerRemoveSubjectSourceAndId) {
removeSubjectSourceAndId = request.getParameter("removeSubjectSourceAndId");
if (StringUtils.isBlank(removeSubjectSourceAndId)) {
throw new RuntimeException("Why would removeSubjectSourceAndId be empty????");
}
}
Subject theSubject = null;
{
//if moving combobox down to extra list or getting all groups
String comboValue = request.getParameter("groupAddMemberComboName");
if (StringUtils.isBlank(comboValue)) {
//if didnt pick one from results
comboValue = request.getParameter("groupAddMemberComboDisplay");
}
try {
GrouperSourceAdapter.searchForGroupsWithReadPrivilege(true);
if (comboValue != null && comboValue.contains("||")) {
String sourceId = GrouperUtil.prefixOrSuffix(comboValue, "||", true);
String subjectId = GrouperUtil.prefixOrSuffix(comboValue, "||", false);
theSubject = SubjectFinder.findByIdOrIdentifierAndSource(subjectId, sourceId, false);
} else {
try {
theSubject = StringUtils.isBlank(comboValue) ? null : SubjectFinder.findByIdOrIdentifier(comboValue, false);
} catch (SubjectNotUniqueException snue) {
//ignore
}
}
} finally {
GrouperSourceAdapter.clearSearchForGroupsWithReadPrivilege();
}
}
boolean success = true;
if (theSubject == null) {
if (includeCombobox && errorOnNullCombobox) {
guiResponseJs.addAction(GuiScreenAction.newValidationMessage(GuiMessageType.error,
"#groupAddMemberComboErrorId",
TextContainer.retrieveFromRequest().getText().get("groupImportSubjectNotFound")));
success = false;
}
} else {
MultiKey multiKey = new MultiKey(theSubject.getSourceId(), theSubject.getId());
if (includeCombobox) {
if (!extraSubjectSourceAndIds.contains(multiKey)) {
extraGuiSubjects.add(new GuiSubject(theSubject));
extraSubjectSourceAndIds.add(multiKey);
}
}
//always add to all groups
if (!allSubjectsSourceAndIds.contains(multiKey)) {
allSubjects.add(theSubject);
allSubjectsSourceAndIds.add(multiKey);
}
}
//loop through all the hidden fields (max 100)
for (int i=0;i<100;i++) {
String extraSourceIdSubjectId = request.getParameter("extraSourceIdSubjectId_" + i);
//we are at the end
if (StringUtils.isBlank(extraSourceIdSubjectId)) {
break;
}
//might be removing this one
if (considerRemoveSubjectSourceAndId && StringUtils.equals(removeSubjectSourceAndId, extraSourceIdSubjectId)) {
continue;
}
theSubject = null;
try {
GrouperSourceAdapter.searchForGroupsWithReadPrivilege(true);
if (extraSourceIdSubjectId != null && extraSourceIdSubjectId.contains("||")) {
String sourceId = GrouperUtil.prefixOrSuffix(extraSourceIdSubjectId, "||", true);
String subjectId = GrouperUtil.prefixOrSuffix(extraSourceIdSubjectId, "||", false);
theSubject = SubjectFinder.findByIdOrIdentifierAndSource(subjectId, sourceId, false);
}
} finally {
GrouperSourceAdapter.clearSearchForGroupsWithReadPrivilege();
}
if (theSubject != null) {
MultiKey multiKey = new MultiKey(theSubject.getSourceId(), theSubject.getId());
if (!extraSubjectSourceAndIds.contains(multiKey)) {
extraGuiSubjects.add(new GuiSubject(theSubject));
extraSubjectSourceAndIds.add(multiKey);
}
if (!allSubjectsSourceAndIds.contains(multiKey)) {
allSubjects.add(theSubject);
allSubjectsSourceAndIds.add(multiKey);
}
}
}
return success;
}
}
|
JoseTomasTocino/AdventOfCode2020 | day03/code/main.py | import functools
import logging
import operator
logger = logging.getLogger(__name__)
def get_map_cell(map_template, row, column):
column = column % len(map_template[0])
return map_template[row][column]
def traverse_map(map_string, slope=[1, 3]):
map_template = map_string.split("\n")
num_rows = len(map_template)
current_position = [0, 0]
num_trees = 0
while current_position[0] < num_rows:
cell_type = get_map_cell(map_template, *current_position)
if cell_type == "#":
num_trees += 1
current_position[0] += slope[0]
current_position[1] += slope[1]
return num_trees
def traverse_map_multiple_slopes(map_string, slopes):
found_trees = []
for slope in slopes:
trees = traverse_map(map_string, slope)
logger.info(f"Traversing with slope: {slope}, found trees: {trees}")
found_trees.append(trees)
return functools.reduce(operator.mul, found_trees)
|
vrutkovs/pulumi-gcp | sdk/python/pulumi_gcp/organizations/folder.py | <filename>sdk/python/pulumi_gcp/organizations/folder.py<gh_stars>0
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import pulumi
import pulumi.runtime
from .. import utilities, tables
class Folder(pulumi.CustomResource):
"""
Allows management of a Google Cloud Platform folder. For more information see
[the official documentation](https://cloud.google.com/resource-manager/docs/creating-managing-folders)
and
[API](https://cloud.google.com/resource-manager/reference/rest/v2/folders).
A folder can contain projects, other folders, or a combination of both. You can use folders to group projects under an organization in a hierarchy. For example, your organization might contain multiple departments, each with its own set of Cloud Platform resources. Folders allows you to group these resources on a per-department basis. Folders are used to group resources that share common IAM policies.
Folders created live inside an Organization. See the [Organization documentation](https://cloud.google.com/resource-manager/docs/quickstarts) for more details.
The service account used to run Terraform when creating a `google_folder`
resource must have `roles/resourcemanager.folderCreator`. See the
[Access Control for Folders Using IAM](https://cloud.google.com/resource-manager/docs/access-control-folders)
doc for more information.
"""
def __init__(__self__, __name__, __opts__=None, display_name=None, parent=None):
"""Create a Folder resource with the given unique name, props, and options."""
if not __name__:
raise TypeError('Missing resource name argument (for URN creation)')
if not isinstance(__name__, str):
raise TypeError('Expected resource name to be a string')
if __opts__ and not isinstance(__opts__, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
__props__ = dict()
if not display_name:
raise TypeError('Missing required property display_name')
__props__['display_name'] = display_name
if not parent:
raise TypeError('Missing required property parent')
__props__['parent'] = parent
__props__['create_time'] = None
__props__['lifecycle_state'] = None
__props__['name'] = None
super(Folder, __self__).__init__(
'gcp:organizations/folder:Folder',
__name__,
__props__,
__opts__)
def translate_output_property(self, prop):
return tables._CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return tables._SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
|
AliFrank608-TMW/RacingReact | src/components/syndicate/SyndicateNew/index.js | import React, { Component } from 'react'
import TextButton from 'components/buttons/TextButton'
import { Link } from 'react-router-dom'
class SyndicateNew extends Component {
constructor (props) {
super(props)
}
render () {
return (
<div className="new-syndicate">
<h2 className="uppercase">
Create a new syndicate
</h2>
<div className="underline" />
<div className="small-group">
<p className="small">
Interested in creating and managing a new syndicate? In
partnership with the <span>British Horseracing Authority</span> and
<span> Wetherbys, The Racing Manager</span> makes the whole registration
process very simple.
</p>
</div>
<div className="new-syndicate-btn">
<Link to='/create-new-syndicate'>
<TextButton
text="CREATE A NEW SYNDICATE"
className="syndicate__button"
onClick={() => {}}/>
</Link>
</div>
</div>
)
}
}
export default SyndicateNew
|
sammck/secret-kv | secret_kv/config/passphrase.py | <reponame>sammck/secret-kv
# Copyright (c) 2022 <NAME>
#
# MIT License - See LICENSE file accompanying this package.
#
"""Configuration support for retrieving a secret passphrase."""
from typing import Optional, Dict, TextIO
from ..internal_types import JsonableDict
import keyring
from ..util import full_name_of_type, full_type
from .base import Config
class PassphraseConfig(Config):
_default_passphrase_cfg: Optional['PassphraseConfig'] = None
def bake(self):
default_cfg_data = self.get_template_cfg_property('default_passphrase_cfg', None)
if not default_cfg_data is None:
self._default_passphrase_cfg = self._context.load_json_data(default_cfg_data)
def get_passphrase(self) -> str:
raise NotImplementedError(f"{full_type(self)} does not implement get_passphrase")
def set_passphrase(self, s: str):
raise NotImplementedError(f"{full_type(self)} does not implement set_passphrase")
def delete_passphrase(self):
raise NotImplementedError(f"{full_type(self)} does not implement delete_passphrase")
def passphrase_exists(self) -> bool:
try:
self.get_passphrase()
except KeyError:
return False
return True
|
futur/usergrid-stack | core/src/main/java/org/usergrid/persistence/cassandra/Setup.java | /*******************************************************************************
* Copyright 2012 Apigee Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package org.usergrid.persistence.cassandra;
import static me.prettyprint.hector.api.factory.HFactory.createColumnFamilyDefinition;
import static org.usergrid.persistence.cassandra.CassandraPersistenceUtils.getCfDefs;
import static org.usergrid.persistence.cassandra.CassandraService.APPLICATIONS_CF;
import static org.usergrid.persistence.cassandra.CassandraService.DEFAULT_APPLICATION;
import static org.usergrid.persistence.cassandra.CassandraService.DEFAULT_APPLICATION_ID;
import static org.usergrid.persistence.cassandra.CassandraService.DEFAULT_ORGANIZATION;
import static org.usergrid.persistence.cassandra.CassandraService.MANAGEMENT_APPLICATION;
import static org.usergrid.persistence.cassandra.CassandraService.MANAGEMENT_APPLICATION_ID;
import static org.usergrid.persistence.cassandra.CassandraService.PROPERTIES_CF;
import static org.usergrid.persistence.cassandra.CassandraService.STATIC_APPLICATION_KEYSPACE;
import static org.usergrid.persistence.cassandra.CassandraService.SYSTEM_KEYSPACE;
import static org.usergrid.persistence.cassandra.CassandraService.TOKENS_CF;
import static org.usergrid.persistence.cassandra.CassandraService.USE_VIRTUAL_KEYSPACES;
import static org.usergrid.persistence.cassandra.CassandraService.keyspaceForApplication;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
import me.prettyprint.hector.api.ddl.ColumnFamilyDefinition;
import me.prettyprint.hector.api.ddl.ComparatorType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.usergrid.mq.cassandra.QueuesCF;
// TODO: Auto-generated Javadoc
/**
* Cassandra-specific setup utilities.
*
* @author edanuff
*/
public class Setup {
private static final Logger logger = LoggerFactory.getLogger(Setup.class);
private final org.usergrid.persistence.EntityManagerFactory emf;
private final CassandraService cass;
/**
* Instantiates a new setup object.
*
* @param emf
* the emf
* @param cass
*/
Setup(EntityManagerFactoryImpl emf, CassandraService cass) {
this.emf = emf;
this.cass = cass;
}
/**
* Initialize.
*
* @throws Exception
* the exception
*/
public synchronized void setup() throws Exception {
cass.init();
setupSystemKeyspace();
setupStaticKeyspace();
((EntityManagerFactoryImpl) emf).initializeApplication(
DEFAULT_ORGANIZATION, DEFAULT_APPLICATION_ID,
DEFAULT_APPLICATION, null);
((EntityManagerFactoryImpl) emf).initializeApplication(
DEFAULT_ORGANIZATION, MANAGEMENT_APPLICATION_ID,
MANAGEMENT_APPLICATION, null);
}
/**
* Initialize system keyspace.
*
* @throws Exception
* the exception
*/
public void setupSystemKeyspace() throws Exception {
logger.info("Initialize system keyspace");
List<ColumnFamilyDefinition> cf_defs = new ArrayList<ColumnFamilyDefinition>();
cf_defs.add(createColumnFamilyDefinition(SYSTEM_KEYSPACE,
APPLICATIONS_CF, ComparatorType.BYTESTYPE));
cf_defs.add(createColumnFamilyDefinition(SYSTEM_KEYSPACE,
PROPERTIES_CF, ComparatorType.BYTESTYPE));
cf_defs.add(createColumnFamilyDefinition(SYSTEM_KEYSPACE, TOKENS_CF,
ComparatorType.BYTESTYPE));
cass.createKeyspace(SYSTEM_KEYSPACE, cf_defs);
logger.info("System keyspace initialized");
}
/**
* Initialize application keyspace.
*
* @param applicationId
* the application id
* @param applicationName
* the application name
* @throws Exception
* the exception
*/
public void setupApplicationKeyspace(final UUID applicationId,
String applicationName) throws Exception {
if (!USE_VIRTUAL_KEYSPACES) {
String app_keyspace = keyspaceForApplication(applicationId);
logger.info("Creating application keyspace " + app_keyspace
+ " for " + applicationName + " application");
cass.createKeyspace(
app_keyspace,
getCfDefs(ApplicationCF.class,
getCfDefs(QueuesCF.class, app_keyspace),
app_keyspace));
/*
* String messages_keyspace = app_keyspace +
* APPLICATION_MESSAGES_KEYSPACE_SUFFIX;
* cass.createKeyspace(messages_keyspace, getCfDefs(QueuesCF.class,
* messages_keyspace));
*/
}
}
public void setupStaticKeyspace() throws Exception {
if (USE_VIRTUAL_KEYSPACES) {
logger.info("Creating static application keyspace "
+ STATIC_APPLICATION_KEYSPACE);
cass.createKeyspace(
STATIC_APPLICATION_KEYSPACE,
getCfDefs(
ApplicationCF.class,
getCfDefs(QueuesCF.class,
STATIC_APPLICATION_KEYSPACE),
STATIC_APPLICATION_KEYSPACE));
/*
* cass.createKeyspace(STATIC_MESSAGES_KEYSPACE,
* getCfDefs(QueuesCF.class, STATIC_MESSAGES_KEYSPACE));
*/
}
}
public void checkKeyspaces() {
cass.checkKeyspaces();
}
public static void logCFPermissions() {
System.out.println(SYSTEM_KEYSPACE + "." + APPLICATIONS_CF
+ ".<rw>=usergrid");
System.out.println(SYSTEM_KEYSPACE + "." + PROPERTIES_CF
+ ".<rw>=usergrid");
for (CFEnum cf : ApplicationCF.values()) {
System.out.println(STATIC_APPLICATION_KEYSPACE + "." + cf
+ ".<rw>=usergrid");
}
for (CFEnum cf : QueuesCF.values()) {
System.out.println(STATIC_APPLICATION_KEYSPACE + "." + cf
+ ".<rw>=usergrid");
}
}
}
|
FrankKwok/Oreo | android/view/PixelCopy.java | /*
* Copyright (C) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package android.view;
import android.annotation.IntDef;
import android.annotation.NonNull;
import android.annotation.Nullable;
import android.graphics.Bitmap;
import android.graphics.Rect;
import android.os.Handler;
import android.view.ViewTreeObserver.OnDrawListener;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
/**
* Provides a mechanisms to issue pixel copy requests to allow for copy
* operations from {@link Surface} to {@link Bitmap}
*/
public final class PixelCopy {
/** @hide */
@Retention(RetentionPolicy.SOURCE)
@IntDef({SUCCESS, ERROR_UNKNOWN, ERROR_TIMEOUT, ERROR_SOURCE_NO_DATA,
ERROR_SOURCE_INVALID, ERROR_DESTINATION_INVALID})
public @interface CopyResultStatus {}
/** The pixel copy request succeeded */
public static final int SUCCESS = 0;
/** The pixel copy request failed with an unknown error. */
public static final int ERROR_UNKNOWN = 1;
/**
* A timeout occurred while trying to acquire a buffer from the source to
* copy from.
*/
public static final int ERROR_TIMEOUT = 2;
/**
* The source has nothing to copy from. When the source is a {@link Surface}
* this means that no buffers have been queued yet. Wait for the source
* to produce a frame and try again.
*/
public static final int ERROR_SOURCE_NO_DATA = 3;
/**
* It is not possible to copy from the source. This can happen if the source
* is hardware-protected or destroyed.
*/
public static final int ERROR_SOURCE_INVALID = 4;
/**
* The destination isn't a valid copy target. If the destination is a bitmap
* this can occur if the bitmap is too large for the hardware to copy to.
* It can also occur if the destination has been destroyed.
*/
public static final int ERROR_DESTINATION_INVALID = 5;
/**
* Listener for observing the completion of a PixelCopy request.
*/
public interface OnPixelCopyFinishedListener {
/**
* Callback for when a pixel copy request has completed. This will be called
* regardless of whether the copy succeeded or failed.
*
* @param copyResult Contains the resulting status of the copy request.
* This will either be {@link PixelCopy#SUCCESS} or one of the
* <code>PixelCopy.ERROR_*</code> values.
*/
void onPixelCopyFinished(@CopyResultStatus int copyResult);
}
/**
* Requests for the display content of a {@link SurfaceView} to be copied
* into a provided {@link Bitmap}.
*
* The contents of the source will be scaled to fit exactly inside the bitmap.
* The pixel format of the source buffer will be converted, as part of the copy,
* to fit the the bitmap's {@link Bitmap.Config}. The most recently queued buffer
* in the SurfaceView's Surface will be used as the source of the copy.
*
* @param source The source from which to copy
* @param dest The destination of the copy. The source will be scaled to
* match the width, height, and format of this bitmap.
* @param listener Callback for when the pixel copy request completes
* @param listenerThread The callback will be invoked on this Handler when
* the copy is finished.
*/
public static void request(@NonNull SurfaceView source, @NonNull Bitmap dest,
@NonNull OnPixelCopyFinishedListener listener, @NonNull Handler listenerThread) {
request(source.getHolder().getSurface(), dest, listener, listenerThread);
}
/**
* Requests for the display content of a {@link SurfaceView} to be copied
* into a provided {@link Bitmap}.
*
* The contents of the source will be scaled to fit exactly inside the bitmap.
* The pixel format of the source buffer will be converted, as part of the copy,
* to fit the the bitmap's {@link Bitmap.Config}. The most recently queued buffer
* in the SurfaceView's Surface will be used as the source of the copy.
*
* @param source The source from which to copy
* @param srcRect The area of the source to copy from. If this is null
* the copy area will be the entire surface. The rect will be clamped to
* the bounds of the Surface.
* @param dest The destination of the copy. The source will be scaled to
* match the width, height, and format of this bitmap.
* @param listener Callback for when the pixel copy request completes
* @param listenerThread The callback will be invoked on this Handler when
* the copy is finished.
*/
public static void request(@NonNull SurfaceView source, @Nullable Rect srcRect,
@NonNull Bitmap dest, @NonNull OnPixelCopyFinishedListener listener,
@NonNull Handler listenerThread) {
request(source.getHolder().getSurface(), srcRect,
dest, listener, listenerThread);
}
/**
* Requests a copy of the pixels from a {@link Surface} to be copied into
* a provided {@link Bitmap}.
*
* The contents of the source will be scaled to fit exactly inside the bitmap.
* The pixel format of the source buffer will be converted, as part of the copy,
* to fit the the bitmap's {@link Bitmap.Config}. The most recently queued buffer
* in the Surface will be used as the source of the copy.
*
* @param source The source from which to copy
* @param dest The destination of the copy. The source will be scaled to
* match the width, height, and format of this bitmap.
* @param listener Callback for when the pixel copy request completes
* @param listenerThread The callback will be invoked on this Handler when
* the copy is finished.
*/
public static void request(@NonNull Surface source, @NonNull Bitmap dest,
@NonNull OnPixelCopyFinishedListener listener, @NonNull Handler listenerThread) {
request(source, null, dest, listener, listenerThread);
}
/**
* Requests a copy of the pixels at the provided {@link Rect} from
* a {@link Surface} to be copied into a provided {@link Bitmap}.
*
* The contents of the source rect will be scaled to fit exactly inside the bitmap.
* The pixel format of the source buffer will be converted, as part of the copy,
* to fit the the bitmap's {@link Bitmap.Config}. The most recently queued buffer
* in the Surface will be used as the source of the copy.
*
* @param source The source from which to copy
* @param srcRect The area of the source to copy from. If this is null
* the copy area will be the entire surface. The rect will be clamped to
* the bounds of the Surface.
* @param dest The destination of the copy. The source will be scaled to
* match the width, height, and format of this bitmap.
* @param listener Callback for when the pixel copy request completes
* @param listenerThread The callback will be invoked on this Handler when
* the copy is finished.
*/
public static void request(@NonNull Surface source, @Nullable Rect srcRect,
@NonNull Bitmap dest, @NonNull OnPixelCopyFinishedListener listener,
@NonNull Handler listenerThread) {
validateBitmapDest(dest);
if (!source.isValid()) {
throw new IllegalArgumentException("Surface isn't valid, source.isValid() == false");
}
if (srcRect != null && srcRect.isEmpty()) {
throw new IllegalArgumentException("sourceRect is empty");
}
// TODO: Make this actually async and fast and cool and stuff
int result = ThreadedRenderer.copySurfaceInto(source, srcRect, dest);
listenerThread.post(new Runnable() {
@Override
public void run() {
listener.onPixelCopyFinished(result);
}
});
}
/**
* Requests a copy of the pixels from a {@link Window} to be copied into
* a provided {@link Bitmap}.
*
* The contents of the source will be scaled to fit exactly inside the bitmap.
* The pixel format of the source buffer will be converted, as part of the copy,
* to fit the the bitmap's {@link Bitmap.Config}. The most recently queued buffer
* in the Window's Surface will be used as the source of the copy.
*
* Note: This is limited to being able to copy from Window's with a non-null
* DecorView. If {@link Window#peekDecorView()} is null this throws an
* {@link IllegalArgumentException}. It will similarly throw an exception
* if the DecorView has not yet acquired a backing surface. It is recommended
* that {@link OnDrawListener} is used to ensure that at least one draw
* has happened before trying to copy from the window, otherwise either
* an {@link IllegalArgumentException} will be thrown or an error will
* be returned to the {@link OnPixelCopyFinishedListener}.
*
* @param source The source from which to copy
* @param dest The destination of the copy. The source will be scaled to
* match the width, height, and format of this bitmap.
* @param listener Callback for when the pixel copy request completes
* @param listenerThread The callback will be invoked on this Handler when
* the copy is finished.
*/
public static void request(@NonNull Window source, @NonNull Bitmap dest,
@NonNull OnPixelCopyFinishedListener listener, @NonNull Handler listenerThread) {
request(source, null, dest, listener, listenerThread);
}
/**
* Requests a copy of the pixels at the provided {@link Rect} from
* a {@link Window} to be copied into a provided {@link Bitmap}.
*
* The contents of the source rect will be scaled to fit exactly inside the bitmap.
* The pixel format of the source buffer will be converted, as part of the copy,
* to fit the the bitmap's {@link Bitmap.Config}. The most recently queued buffer
* in the Window's Surface will be used as the source of the copy.
*
* Note: This is limited to being able to copy from Window's with a non-null
* DecorView. If {@link Window#peekDecorView()} is null this throws an
* {@link IllegalArgumentException}. It will similarly throw an exception
* if the DecorView has not yet acquired a backing surface. It is recommended
* that {@link OnDrawListener} is used to ensure that at least one draw
* has happened before trying to copy from the window, otherwise either
* an {@link IllegalArgumentException} will be thrown or an error will
* be returned to the {@link OnPixelCopyFinishedListener}.
*
* @param source The source from which to copy
* @param srcRect The area of the source to copy from. If this is null
* the copy area will be the entire surface. The rect will be clamped to
* the bounds of the Surface.
* @param dest The destination of the copy. The source will be scaled to
* match the width, height, and format of this bitmap.
* @param listener Callback for when the pixel copy request completes
* @param listenerThread The callback will be invoked on this Handler when
* the copy is finished.
*/
public static void request(@NonNull Window source, @Nullable Rect srcRect,
@NonNull Bitmap dest, @NonNull OnPixelCopyFinishedListener listener,
@NonNull Handler listenerThread) {
validateBitmapDest(dest);
if (source == null) {
throw new IllegalArgumentException("source is null");
}
if (source.peekDecorView() == null) {
throw new IllegalArgumentException(
"Only able to copy windows with decor views");
}
Surface surface = null;
if (source.peekDecorView().getViewRootImpl() != null) {
surface = source.peekDecorView().getViewRootImpl().mSurface;
}
if (surface == null || !surface.isValid()) {
throw new IllegalArgumentException(
"Window doesn't have a backing surface!");
}
request(surface, srcRect, dest, listener, listenerThread);
}
private static void validateBitmapDest(Bitmap bitmap) {
// TODO: Pre-check max texture dimens if we can
if (bitmap == null) {
throw new IllegalArgumentException("Bitmap cannot be null");
}
if (bitmap.isRecycled()) {
throw new IllegalArgumentException("Bitmap is recycled");
}
if (!bitmap.isMutable()) {
throw new IllegalArgumentException("Bitmap is immutable");
}
}
private PixelCopy() {}
}
|
yipuran/yipuran-core | src/main/java/org/yipuran/xml/XmlParserImpl.java | package org.yipuran.xml;
import java.io.IOException;
import java.io.InputStream;
import java.util.Arrays;
import java.util.Enumeration;
import java.util.List;
import java.util.Locale;
import java.util.Properties;
import java.util.ResourceBundle;
import javax.xml.parsers.SAXParser;
import javax.xml.parsers.SAXParserFactory;
/**
* XmlParser 実装クラス.
* 解析対象 XMLファイル baseName と、AbstractXmlHandler インスタンスをコンストラクタで受け取る。
* @param <T> XML解析結果オブジェクトGeneric
*/
final class XmlParserImpl<T> implements XmlParser{
private String baseName;
AbstractXmlHandler<T> handler;
long cacheTime;
/**
* コンストラクタ.
* @param baseName String
* @param handler AbstractXmlHandler<T>
*/
protected XmlParserImpl(String baseName, AbstractXmlHandler<T> handler){
this.baseName = baseName;
this.handler = handler;
this.cacheTime = ResourceBundle.Control.TTL_DONT_CACHE;
}
/**
* コンストラクタ.
* @param baseName String
* @param handler AbstractXmlHandler<T>
* @param cacheTime long
*/
protected XmlParserImpl(String baseName, AbstractXmlHandler<T> handler, long cacheTime){
this.baseName = baseName;
this.handler = handler;
this.cacheTime = cacheTime;
}
/*
* @see XmlParser#parse()
*/
@Override
public Object parse(){
ResourceBundle.getBundle(this.baseName , new ResourceBundle.Control(){
@Override
public List<String> getFormats(String base_Name){
if (base_Name==null) throw new NullPointerException();
return Arrays.asList("xml");
}
@Override
public ResourceBundle newBundle(String base_Name, Locale locale, String format, ClassLoader loader, boolean reload)
throws IllegalAccessException, InstantiationException, IOException{
if (base_Name == null || locale == null || format == null || loader == null)
throw new NullPointerException();
if (format.equals("xml")){
String bundleName = toBundleName(base_Name, locale);
String resourceName = toResourceName(bundleName, format);
try(InputStream stream = loader.getResourceAsStream(resourceName)){
MXMLparse xmlParse = new MXMLparse(stream);
xmlParse.parse();
}catch(Exception e){
throw new IOException(e);
}
}
return new DummyBundle();
}
@Override
public long getTimeToLive(String base_Name, Locale llocale){
if (base_Name==null || llocale==null){
throw new NullPointerException();
}
return XmlParserImpl.this.cacheTime;
}
});
return this.handler.result();
}
//------------------------------------------------
/**
* XMLparse.
*/
class MXMLparse {
private InputStream steam;
/**
* constructor.
* @param stream InputStream
*/
MXMLparse(InputStream stream){
this.steam = stream;
}
/**
* parse.
* @throws Exception Error
*/
void parse() throws Exception{
SAXParserFactory factory = SAXParserFactory.newInstance();
SAXParser saxParser = factory.newSAXParser();
saxParser.parse(this.steam, XmlParserImpl.this.handler);
}
}
//------------------------------------------------
/**
* DummyBundle .
*/
class DummyBundle extends ResourceBundle {
private Properties props;
/** constrctor. */
DummyBundle(){
this.props = new Properties();
}
@Override
protected Object handleGetObject(String key) {
return this.props.getProperty(key);
}
@Override
public Enumeration<String> getKeys() {
return null;
}
}
}
|
licaon-kter/atalk-android | aTalk/src/main/java/org/atalk/impl/neomedia/RTPConnectorTCPOutputStream.java | /*
* Jitsi, the OpenSource Java VoIP and Instant Messaging client.
*
* Distributable under LGPL license. See terms of license at gnu.org.
*/
package org.atalk.impl.neomedia;
import org.atalk.service.neomedia.RawPacket;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.net.Socket;
/**
* RTPConnectorOutputStream implementation for TCP protocol.
*
* @author <NAME>
*/
public class RTPConnectorTCPOutputStream extends RTPConnectorOutputStream
{
/**
* TCP socket used to send packet data
*/
private final Socket socket;
/**
* Initializes a new <tt>RTPConnectorTCPOutputStream</tt>.
*
* @param socket a <tt>Socket</tt>
*/
public RTPConnectorTCPOutputStream(Socket socket)
{
this.socket = socket;
}
/**
* Sends a specific <tt>RawPacket</tt> through this <tt>OutputDataStream</tt> to a specific
* <tt>InetSocketAddress</tt>.
*
* @param packet the <tt>RawPacket</tt> to send through this <tt>OutputDataStream</tt> to the specified
* <tt>target</tt>
* @param target the <tt>InetSocketAddress</tt> to which the specified <tt>packet</tt> is to be sent
* through this <tt>OutputDataStream</tt>
* @throws IOException if anything goes wrong while sending the specified <tt>packet</tt> through this
* <tt>OutputDataStream</tt> to the specified <tt>target</tt>
*/
@Override
protected void sendToTarget(RawPacket packet, InetSocketAddress target)
throws IOException
{
socket.getOutputStream().write(packet.getBuffer(), packet.getOffset(), packet.getLength());
}
/**
* Returns whether or not this <tt>RTPConnectorOutputStream</tt> has a valid socket.
*
* @return <tt>true</tt>if this <tt>RTPConnectorOutputStream</tt> has a valid socket, and
* <tt>false</tt> otherwise.
*/
@Override
protected boolean isSocketValid()
{
return (socket != null);
}
}
|
jianlins/easycie | fastner/src/main/java/edu/utah/bmi/nlp/fastner/FastRuleWGN.java | /*
* Copyright 2017 Department of Biomedical Informatics, University of Utah
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.utah.bmi.nlp.fastner;
import edu.utah.bmi.nlp.core.NERRule;
import edu.utah.bmi.nlp.core.Rule;
import edu.utah.bmi.nlp.core.Span;
import edu.utah.bmi.nlp.fastcner.UnicodeChecker;
import org.apache.commons.lang3.math.NumberUtils;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.function.BiFunction;
/**
* <p>
* This class is an extension of FastRuleWG with quantitative conditional support.
* For instance, a rule "\> 3" will match any number greater than 3.
* </p>
*
* @author <NAME>
*/
public class FastRuleWGN extends FastRuleWG {
// fields are defined in abstract class
public FastRuleWGN() {
}
public FastRuleWGN(String ruleStr) {
// support read from OWl file, TSV file or OWL file directory
super(ruleStr);
}
public FastRuleWGN(String ruleStr, boolean caseSensitive) {
super(ruleStr, caseSensitive);
}
public FastRuleWGN(HashMap<Integer, Rule> ruleStore) {
super(ruleStore);
}
protected void process(ArrayList<?> contextTokens,
BiFunction<ArrayList, Integer, String> getText,
BiFunction<ArrayList, Integer, Integer> getBegin,
BiFunction<ArrayList, Integer, Integer> getEnd,
HashMap rule, int matchBegin, int matchEnd, int currentPosition,
HashMap<String, ArrayList<Span>> matches) {
// when reach the end of the tunedcontext, end the iteration
if (currentPosition < contextTokens.size()) {
// start processing the tunedcontext tokens
String thisToken = getText.apply(contextTokens, currentPosition);
// System.out.println("thisToken-"+thisToken);
if (rule.containsKey("\\w+")) {
process(contextTokens, getText, getBegin, getEnd, (HashMap) rule.get("\\w+"),
matchBegin, matchEnd, currentPosition + 1, matches);
}
// if the end of a rule is met
if (rule.containsKey(END)) {
// if no () is used in this definition, use the whole rule string
addDeterminants(rule, matches, getBegin.apply(contextTokens, matchBegin),
getEnd.apply(contextTokens, (matchEnd == -1 ? currentPosition - 1 : matchEnd)));
}
// if the current token match the element of a rule
if (rule.containsKey(thisToken)) {
process(contextTokens, getText, getBegin, getEnd, (HashMap) rule.get(thisToken),
matchBegin, matchEnd, currentPosition + 1, matches);
}
if (rule.containsKey("\\d+") && UnicodeChecker.isNumber(thisToken)) {
process(contextTokens, getText, getBegin, getEnd, (HashMap) rule.get("\\d+"),
matchBegin, matchEnd, currentPosition + 1, matches);
}
if (rule.containsKey("\\(")) {
process(contextTokens, getText, getBegin, getEnd, (HashMap) rule.get("\\("),
currentPosition, matchEnd, currentPosition, matches);
}
if (rule.containsKey("\\)")) {
process(contextTokens, getText, getBegin, getEnd, (HashMap) rule.get("\\)"),
matchBegin, currentPosition - 1, currentPosition, matches);
}
if (rule.containsKey("\\>") && UnicodeChecker.isNumber(thisToken)) {
processNumerics(contextTokens, getText, getBegin, getEnd, (HashMap) rule.get("\\>"),
matchBegin, matchEnd, currentPosition, matches,
thisToken, true);
}
if (rule.containsKey("\\<") && UnicodeChecker.isNumber(thisToken)) {
processNumerics(contextTokens, getText, getBegin, getEnd, (HashMap) rule.get("\\<"),
matchBegin, matchEnd, currentPosition, matches,
thisToken, false);
}
} else if (currentPosition == contextTokens.size() && rule.containsKey(END)) {
// if no () is used in this definition, use the whole rule string
matchEnd = matchEnd == -1 ? currentPosition - 1 : matchEnd;
addDeterminants(rule, matches, getBegin.apply(contextTokens, matchBegin), getEnd.apply(contextTokens, matchEnd));
}
}
protected void processNumerics(ArrayList<?> contextTokens,
BiFunction<ArrayList, Integer, String> getText,
BiFunction<ArrayList, Integer, Integer> getBegin,
BiFunction<ArrayList, Integer, Integer> getEnd,
HashMap rule, int matchBegin, int matchEnd, int currentPosition,
HashMap<String, ArrayList<Span>> matches, String numericToken, boolean greaterThan) {
Double num = NumberUtils.createDouble(numericToken.trim());
for (Object ruleValue : rule.keySet()) {
Double ruleNumValue = NumberUtils.createDouble((String) ruleValue);
if (greaterThan && num > ruleNumValue) {
// if has a rule like "\> 3 \< 4"
if (((HashMap) rule.get(ruleValue)).containsKey("\\<")) {
processNumerics(contextTokens, getText, getBegin, getEnd, (HashMap) ((HashMap) rule.get(ruleValue)).get("\\<"),
matchBegin, matchEnd, currentPosition, matches,
numericToken, false);
}
// if followed by ordinary rule elements
process(contextTokens, getText, getBegin, getEnd, (HashMap) rule.get(ruleValue),
matchBegin, matchEnd, currentPosition + 1, matches);
} else if (!greaterThan && num < ruleNumValue) {
// if has a rule like "\< 6 \> 4"
if (((HashMap) rule.get(ruleValue)).containsKey("\\>")) {
processNumerics(contextTokens, getText, getBegin, getEnd, (HashMap) ((HashMap) rule.get(ruleValue)).get("\\>"),
matchBegin, matchEnd, currentPosition, matches,
numericToken, true);
}
// if followed by ordinary rule elements
process(contextTokens, getText, getBegin, getEnd, (HashMap) rule.get(ruleValue),
matchBegin, matchEnd, currentPosition + 1, matches);
}
}
}
}
|
jasstionzyf/hbase | hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterCoprocessorHost.java | <filename>hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterCoprocessorHost.java
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.master;
import java.io.IOException;
import java.lang.reflect.InvocationTargetException;
import java.util.List;
import java.util.Set;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ClusterMetrics;
import org.apache.hadoop.hbase.MetaMutationAnnotation;
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.MasterSwitchType;
import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.client.SharedConnection;
import org.apache.hadoop.hbase.client.SnapshotDescription;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.coprocessor.BaseEnvironment;
import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
import org.apache.hadoop.hbase.coprocessor.CoreCoprocessor;
import org.apache.hadoop.hbase.coprocessor.HasMasterServices;
import org.apache.hadoop.hbase.coprocessor.MasterCoprocessor;
import org.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironment;
import org.apache.hadoop.hbase.coprocessor.MasterObserver;
import org.apache.hadoop.hbase.coprocessor.MetricsCoprocessor;
import org.apache.hadoop.hbase.master.locking.LockProcedure;
import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
import org.apache.hadoop.hbase.metrics.MetricRegistry;
import org.apache.hadoop.hbase.net.Address;
import org.apache.hadoop.hbase.procedure2.LockType;
import org.apache.hadoop.hbase.procedure2.LockedResource;
import org.apache.hadoop.hbase.procedure2.Procedure;
import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
import org.apache.hadoop.hbase.quotas.GlobalQuotaSettings;
import org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
import org.apache.hadoop.hbase.replication.SyncReplicationState;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.security.access.Permission;
import org.apache.hadoop.hbase.security.access.UserPermission;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.com.google.protobuf.Service;
/**
* Provides the coprocessor framework and environment for master oriented
* operations. {@link HMaster} interacts with the loaded coprocessors
* through this class.
*/
@InterfaceAudience.Private
public class MasterCoprocessorHost
extends CoprocessorHost<MasterCoprocessor, MasterCoprocessorEnvironment> {
private static final Logger LOG = LoggerFactory.getLogger(MasterCoprocessorHost.class);
/**
* Coprocessor environment extension providing access to master related
* services.
*/
private static class MasterEnvironment extends BaseEnvironment<MasterCoprocessor>
implements MasterCoprocessorEnvironment {
private final MetricRegistry metricRegistry;
private final MasterServices services;
public MasterEnvironment(final MasterCoprocessor impl, final int priority, final int seq,
final Configuration conf, final MasterServices services) {
super(impl, priority, seq, conf);
this.services = services;
this.metricRegistry =
MetricsCoprocessor.createRegistryForMasterCoprocessor(impl.getClass().getName());
}
@Override
public ServerName getServerName() {
return this.services.getServerName();
}
@Override
public Connection getConnection() {
return new SharedConnection(this.services.getConnection());
}
@Override
public Connection createConnection(Configuration conf) throws IOException {
return this.services.createConnection(conf);
}
@Override
public MetricRegistry getMetricRegistryForMaster() {
return metricRegistry;
}
@Override
public void shutdown() {
super.shutdown();
MetricsCoprocessor.removeRegistry(this.metricRegistry);
}
}
/**
* Special version of MasterEnvironment that exposes MasterServices for Core Coprocessors only.
* Temporary hack until Core Coprocessors are integrated into Core.
*/
private static class MasterEnvironmentForCoreCoprocessors extends MasterEnvironment
implements HasMasterServices {
private final MasterServices masterServices;
public MasterEnvironmentForCoreCoprocessors(final MasterCoprocessor impl, final int priority,
final int seq, final Configuration conf, final MasterServices services) {
super(impl, priority, seq, conf, services);
this.masterServices = services;
}
/**
* @return An instance of MasterServices, an object NOT for general user-space Coprocessor
* consumption.
*/
@Override
public MasterServices getMasterServices() {
return this.masterServices;
}
}
private MasterServices masterServices;
public MasterCoprocessorHost(final MasterServices services, final Configuration conf) {
super(services);
this.conf = conf;
this.masterServices = services;
// Log the state of coprocessor loading here; should appear only once or
// twice in the daemon log, depending on HBase version, because there is
// only one MasterCoprocessorHost instance in the master process
boolean coprocessorsEnabled = conf.getBoolean(COPROCESSORS_ENABLED_CONF_KEY,
DEFAULT_COPROCESSORS_ENABLED);
LOG.trace("System coprocessor loading is {}", (coprocessorsEnabled ? "enabled" : "disabled"));
loadSystemCoprocessors(conf, MASTER_COPROCESSOR_CONF_KEY);
}
@Override
public MasterEnvironment createEnvironment(final MasterCoprocessor instance, final int priority,
final int seq, final Configuration conf) {
// If coprocessor exposes any services, register them.
for (Service service : instance.getServices()) {
masterServices.registerService(service);
}
// If a CoreCoprocessor, return a 'richer' environment, one laden with MasterServices.
return instance.getClass().isAnnotationPresent(CoreCoprocessor.class)?
new MasterEnvironmentForCoreCoprocessors(instance, priority, seq, conf, masterServices):
new MasterEnvironment(instance, priority, seq, conf, masterServices);
}
@Override
public MasterCoprocessor checkAndGetInstance(Class<?> implClass)
throws InstantiationException, IllegalAccessException {
try {
if (MasterCoprocessor.class.isAssignableFrom(implClass)) {
return implClass.asSubclass(MasterCoprocessor.class).getDeclaredConstructor().newInstance();
} else {
LOG.error("{} is not of type MasterCoprocessor. Check the configuration of {}",
implClass.getName(), CoprocessorHost.MASTER_COPROCESSOR_CONF_KEY);
return null;
}
} catch (NoSuchMethodException | InvocationTargetException e) {
throw (InstantiationException) new InstantiationException(implClass.getName()).initCause(e);
}
}
private ObserverGetter<MasterCoprocessor, MasterObserver> masterObserverGetter =
MasterCoprocessor::getMasterObserver;
abstract class MasterObserverOperation extends
ObserverOperationWithoutResult<MasterObserver> {
public MasterObserverOperation(){
super(masterObserverGetter);
}
public MasterObserverOperation(boolean bypassable) {
this(null, bypassable);
}
public MasterObserverOperation(User user) {
super(masterObserverGetter, user);
}
public MasterObserverOperation(User user, boolean bypassable) {
super(masterObserverGetter, user, bypassable);
}
}
//////////////////////////////////////////////////////////////////////////////////////////////////
// MasterObserver operations
//////////////////////////////////////////////////////////////////////////////////////////////////
public void preCreateNamespace(final NamespaceDescriptor ns) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preCreateNamespace(this, ns);
}
});
}
public void postCreateNamespace(final NamespaceDescriptor ns) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postCreateNamespace(this, ns);
}
});
}
public void preDeleteNamespace(final String namespaceName) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preDeleteNamespace(this, namespaceName);
}
});
}
public void postDeleteNamespace(final String namespaceName) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postDeleteNamespace(this, namespaceName);
}
});
}
public void preModifyNamespace(final NamespaceDescriptor currentNsDescriptor,
final NamespaceDescriptor newNsDescriptor) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preModifyNamespace(this, currentNsDescriptor, newNsDescriptor);
}
});
}
public void postModifyNamespace(final NamespaceDescriptor oldNsDescriptor,
final NamespaceDescriptor currentNsDescriptor) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postModifyNamespace(this, oldNsDescriptor, currentNsDescriptor);
}
});
}
public void preGetNamespaceDescriptor(final String namespaceName)
throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preGetNamespaceDescriptor(this, namespaceName);
}
});
}
public void postGetNamespaceDescriptor(final NamespaceDescriptor ns)
throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postGetNamespaceDescriptor(this, ns);
}
});
}
public void preListNamespaces(final List<String> namespaces) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver oserver) throws IOException {
oserver.preListNamespaces(this, namespaces);
}
});
}
public void postListNamespaces(final List<String> namespaces) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver oserver) throws IOException {
oserver.postListNamespaces(this, namespaces);
}
});
}
public void preListNamespaceDescriptors(final List<NamespaceDescriptor> descriptors)
throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preListNamespaceDescriptors(this, descriptors);
}
});
}
public void postListNamespaceDescriptors(final List<NamespaceDescriptor> descriptors)
throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postListNamespaceDescriptors(this, descriptors);
}
});
}
/* Implementation of hooks for invoking MasterObservers */
public TableDescriptor preCreateTableRegionsInfos(TableDescriptor desc) throws IOException {
if (coprocEnvironments.isEmpty()) {
return desc;
}
return execOperationWithResult(
new ObserverOperationWithResult<MasterObserver, TableDescriptor>(masterObserverGetter, desc) {
@Override
protected TableDescriptor call(MasterObserver observer) throws IOException {
return observer.preCreateTableRegionsInfos(this, getResult());
}
});
}
public void preCreateTable(final TableDescriptor htd, final RegionInfo[] regions)
throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preCreateTable(this, htd, regions);
}
});
}
public void postCreateTable(final TableDescriptor htd, final RegionInfo[] regions)
throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postCreateTable(this, htd, regions);
}
});
}
public void preCreateTableAction(final TableDescriptor htd, final RegionInfo[] regions,
final User user) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation(user) {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preCreateTableAction(this, htd, regions);
}
});
}
public void postCompletedCreateTableAction(
final TableDescriptor htd, final RegionInfo[] regions, final User user) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation(user) {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postCompletedCreateTableAction(this, htd, regions);
}
});
}
public void preDeleteTable(final TableName tableName) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preDeleteTable(this, tableName);
}
});
}
public void postDeleteTable(final TableName tableName) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postDeleteTable(this, tableName);
}
});
}
public void preDeleteTableAction(final TableName tableName, final User user) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation(user) {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preDeleteTableAction(this, tableName);
}
});
}
public void postCompletedDeleteTableAction(final TableName tableName, final User user)
throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation(user) {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postCompletedDeleteTableAction(this, tableName);
}
});
}
public void preTruncateTable(final TableName tableName) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preTruncateTable(this, tableName);
}
});
}
public void postTruncateTable(final TableName tableName) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postTruncateTable(this, tableName);
}
});
}
public void preTruncateTableAction(final TableName tableName, final User user)
throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation(user) {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preTruncateTableAction(this, tableName);
}
});
}
public void postCompletedTruncateTableAction(final TableName tableName, final User user)
throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation(user) {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postCompletedTruncateTableAction(this, tableName);
}
});
}
public TableDescriptor preModifyTable(final TableName tableName,
final TableDescriptor currentDescriptor, final TableDescriptor newDescriptor)
throws IOException {
if (coprocEnvironments.isEmpty()) {
return newDescriptor;
}
return execOperationWithResult(
new ObserverOperationWithResult<MasterObserver, TableDescriptor>(masterObserverGetter,
newDescriptor) {
@Override
protected TableDescriptor call(MasterObserver observer) throws IOException {
return observer.preModifyTable(this, tableName, currentDescriptor, getResult());
}
});
}
public void postModifyTable(final TableName tableName, final TableDescriptor oldDescriptor,
final TableDescriptor currentDescriptor) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postModifyTable(this, tableName, oldDescriptor, currentDescriptor);
}
});
}
public void preModifyTableAction(final TableName tableName,
final TableDescriptor currentDescriptor, final TableDescriptor newDescriptor, final User user)
throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation(user) {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preModifyTableAction(this, tableName, currentDescriptor, newDescriptor);
}
});
}
public void postCompletedModifyTableAction(final TableName tableName,
final TableDescriptor oldDescriptor, final TableDescriptor currentDescriptor, final User user)
throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation(user) {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postCompletedModifyTableAction(this, tableName, oldDescriptor, currentDescriptor);
}
});
}
public void preEnableTable(final TableName tableName) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preEnableTable(this, tableName);
}
});
}
public void postEnableTable(final TableName tableName) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postEnableTable(this, tableName);
}
});
}
public void preEnableTableAction(final TableName tableName, final User user) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation(user) {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preEnableTableAction(this, tableName);
}
});
}
public void postCompletedEnableTableAction(final TableName tableName, final User user)
throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation(user) {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postCompletedEnableTableAction(this, tableName);
}
});
}
public void preDisableTable(final TableName tableName) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preDisableTable(this, tableName);
}
});
}
public void postDisableTable(final TableName tableName) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postDisableTable(this, tableName);
}
});
}
public void preDisableTableAction(final TableName tableName, final User user) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation(user) {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preDisableTableAction(this, tableName);
}
});
}
public void postCompletedDisableTableAction(final TableName tableName, final User user)
throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation(user) {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postCompletedDisableTableAction(this, tableName);
}
});
}
public void preAbortProcedure(
final ProcedureExecutor<MasterProcedureEnv> procEnv,
final long procId) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preAbortProcedure(this, procId);
}
});
}
public void postAbortProcedure() throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postAbortProcedure(this);
}
});
}
public void preGetProcedures() throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preGetProcedures(this);
}
});
}
public void postGetProcedures(final List<Procedure<?>> procInfoList) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postGetProcedures(this);
}
});
}
public void preGetLocks() throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preGetLocks(this);
}
});
}
public void postGetLocks(final List<LockedResource> lockedResources) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postGetLocks(this);
}
});
}
public void preMove(final RegionInfo region, final ServerName srcServer,
final ServerName destServer) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preMove(this, region, srcServer, destServer);
}
});
}
public void postMove(final RegionInfo region, final ServerName srcServer,
final ServerName destServer) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postMove(this, region, srcServer, destServer);
}
});
}
public void preAssign(final RegionInfo regionInfo) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preAssign(this, regionInfo);
}
});
}
public void postAssign(final RegionInfo regionInfo) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postAssign(this, regionInfo);
}
});
}
public void preUnassign(final RegionInfo regionInfo, final boolean force)
throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preUnassign(this, regionInfo, force);
}
});
}
public void postUnassign(final RegionInfo regionInfo, final boolean force) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postUnassign(this, regionInfo, force);
}
});
}
public void preRegionOffline(final RegionInfo regionInfo) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preRegionOffline(this, regionInfo);
}
});
}
public void postRegionOffline(final RegionInfo regionInfo) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postRegionOffline(this, regionInfo);
}
});
}
public void preMergeRegions(final RegionInfo[] regionsToMerge)
throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preMergeRegions(this, regionsToMerge);
}
});
}
public void postMergeRegions(final RegionInfo[] regionsToMerge)
throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postMergeRegions(this, regionsToMerge);
}
});
}
public boolean preBalance() throws IOException {
return execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preBalance(this);
}
});
}
public void postBalance(final List<RegionPlan> plans) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postBalance(this, plans);
}
});
}
public void preSetSplitOrMergeEnabled(final boolean newValue,
final MasterSwitchType switchType) throws IOException {
execOperation(coprocEnvironments.isEmpty()? null: new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preSetSplitOrMergeEnabled(this, newValue, switchType);
}
});
}
public void postSetSplitOrMergeEnabled(final boolean newValue,
final MasterSwitchType switchType) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postSetSplitOrMergeEnabled(this, newValue, switchType);
}
});
}
/**
* Invoked just before calling the split region procedure
* @param tableName the table where the region belongs to
* @param splitRow the split point
* @throws IOException
*/
public void preSplitRegion(
final TableName tableName,
final byte[] splitRow) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preSplitRegion(this, tableName, splitRow);
}
});
}
/**
* Invoked just before a split
* @param tableName the table where the region belongs to
* @param splitRow the split point
* @param user the user
* @throws IOException
*/
public void preSplitRegionAction(
final TableName tableName,
final byte[] splitRow,
final User user) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation(user) {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preSplitRegionAction(this, tableName, splitRow);
}
});
}
/**
* Invoked just after a split
* @param regionInfoA the new left-hand daughter region
* @param regionInfoB the new right-hand daughter region
* @param user the user
* @throws IOException
*/
public void postCompletedSplitRegionAction(
final RegionInfo regionInfoA,
final RegionInfo regionInfoB,
final User user) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation(user) {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postCompletedSplitRegionAction(this, regionInfoA, regionInfoB);
}
});
}
/**
* This will be called before update META step as part of split table region procedure.
* @param splitKey
* @param metaEntries
* @param user the user
* @throws IOException
*/
public void preSplitBeforeMETAAction(
final byte[] splitKey,
final List<Mutation> metaEntries,
final User user) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation(user) {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preSplitRegionBeforeMETAAction(this, splitKey, metaEntries);
}
});
}
/**
* This will be called after update META step as part of split table region procedure.
* @param user the user
* @throws IOException
*/
public void preSplitAfterMETAAction(final User user) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation(user) {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preSplitRegionAfterMETAAction(this);
}
});
}
/**
* Invoked just after the rollback of a failed split
* @param user the user
* @throws IOException
*/
public void postRollBackSplitRegionAction(final User user) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation(user) {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postRollBackSplitRegionAction(this);
}
});
}
/**
* Invoked just before a merge
* @param regionsToMerge the regions to merge
* @param user the user
* @throws IOException
*/
public void preMergeRegionsAction(
final RegionInfo[] regionsToMerge, final User user) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation(user) {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preMergeRegionsAction(this, regionsToMerge);
}
});
}
/**
* Invoked after completing merge regions operation
* @param regionsToMerge the regions to merge
* @param mergedRegion the new merged region
* @param user the user
* @throws IOException
*/
public void postCompletedMergeRegionsAction(
final RegionInfo[] regionsToMerge,
final RegionInfo mergedRegion,
final User user) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation(user) {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postCompletedMergeRegionsAction(this, regionsToMerge, mergedRegion);
}
});
}
/**
* Invoked before merge regions operation writes the new region to hbase:meta
* @param regionsToMerge the regions to merge
* @param metaEntries the meta entry
* @param user the user
* @throws IOException
*/
public void preMergeRegionsCommit(
final RegionInfo[] regionsToMerge,
final @MetaMutationAnnotation List<Mutation> metaEntries,
final User user) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation(user) {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preMergeRegionsCommitAction(this, regionsToMerge, metaEntries);
}
});
}
/**
* Invoked after merge regions operation writes the new region to hbase:meta
* @param regionsToMerge the regions to merge
* @param mergedRegion the new merged region
* @param user the user
* @throws IOException
*/
public void postMergeRegionsCommit(
final RegionInfo[] regionsToMerge,
final RegionInfo mergedRegion,
final User user) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation(user) {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postMergeRegionsCommitAction(this, regionsToMerge, mergedRegion);
}
});
}
/**
* Invoked after rollback merge regions operation
* @param regionsToMerge the regions to merge
* @param user the user
* @throws IOException
*/
public void postRollBackMergeRegionsAction(
final RegionInfo[] regionsToMerge, final User user) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation(user) {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postRollBackMergeRegionsAction(this, regionsToMerge);
}
});
}
// This hook allows Coprocessor change value of balance switch.
public void preBalanceSwitch(final boolean b) throws IOException {
if (this.coprocEnvironments.isEmpty()) {
return;
}
execOperation(new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preBalanceSwitch(this, b);
}
});
}
public void postBalanceSwitch(final boolean oldValue, final boolean newValue)
throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postBalanceSwitch(this, oldValue, newValue);
}
});
}
public void preShutdown() throws IOException {
// While stopping the cluster all coprocessors method should be executed first then the
// coprocessor should be cleaned up.
if (coprocEnvironments.isEmpty()) {
return;
}
execShutdown(new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preShutdown(this);
}
@Override
public void postEnvCall() {
// invoke coprocessor stop method
shutdown(this.getEnvironment());
}
});
}
public void preStopMaster() throws IOException {
// While stopping master all coprocessors method should be executed first then the coprocessor
// environment should be cleaned up.
if (coprocEnvironments.isEmpty()) {
return;
}
execShutdown(new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preStopMaster(this);
}
@Override
public void postEnvCall() {
// invoke coprocessor stop method
shutdown(this.getEnvironment());
}
});
}
public void preMasterInitialization() throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preMasterInitialization(this);
}
});
}
public void postStartMaster() throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postStartMaster(this);
}
});
}
public void preSnapshot(final SnapshotDescription snapshot,
final TableDescriptor hTableDescriptor) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preSnapshot(this, snapshot, hTableDescriptor);
}
});
}
public void postSnapshot(final SnapshotDescription snapshot,
final TableDescriptor hTableDescriptor) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postSnapshot(this, snapshot, hTableDescriptor);
}
});
}
public void postCompletedSnapshotAction(SnapshotDescription snapshot,
TableDescriptor hTableDescriptor) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postCompletedSnapshotAction(this, snapshot, hTableDescriptor);
}
});
}
public void preListSnapshot(final SnapshotDescription snapshot) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preListSnapshot(this, snapshot);
}
});
}
public void postListSnapshot(final SnapshotDescription snapshot) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postListSnapshot(this, snapshot);
}
});
}
public void preCloneSnapshot(final SnapshotDescription snapshot,
final TableDescriptor hTableDescriptor) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preCloneSnapshot(this, snapshot, hTableDescriptor);
}
});
}
public void postCloneSnapshot(final SnapshotDescription snapshot,
final TableDescriptor hTableDescriptor) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postCloneSnapshot(this, snapshot, hTableDescriptor);
}
});
}
public void preRestoreSnapshot(final SnapshotDescription snapshot,
final TableDescriptor hTableDescriptor) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preRestoreSnapshot(this, snapshot, hTableDescriptor);
}
});
}
public void postRestoreSnapshot(final SnapshotDescription snapshot,
final TableDescriptor hTableDescriptor) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postRestoreSnapshot(this, snapshot, hTableDescriptor);
}
});
}
public void preDeleteSnapshot(final SnapshotDescription snapshot) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preDeleteSnapshot(this, snapshot);
}
});
}
public void postDeleteSnapshot(final SnapshotDescription snapshot) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postDeleteSnapshot(this, snapshot);
}
});
}
public void preGetTableDescriptors(final List<TableName> tableNamesList,
final List<TableDescriptor> descriptors, final String regex) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preGetTableDescriptors(this, tableNamesList, descriptors, regex);
}
});
}
public void postGetTableDescriptors(final List<TableName> tableNamesList,
final List<TableDescriptor> descriptors, final String regex) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postGetTableDescriptors(this, tableNamesList, descriptors, regex);
}
});
}
public void preGetTableNames(final List<TableDescriptor> descriptors,
final String regex) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preGetTableNames(this, descriptors, regex);
}
});
}
public void postGetTableNames(final List<TableDescriptor> descriptors,
final String regex) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postGetTableNames(this, descriptors, regex);
}
});
}
public void preTableFlush(final TableName tableName) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preTableFlush(this, tableName);
}
});
}
public void postTableFlush(final TableName tableName) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postTableFlush(this, tableName);
}
});
}
public void preSetUserQuota(
final String user, final GlobalQuotaSettings quotas) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preSetUserQuota(this, user, quotas);
}
});
}
public void postSetUserQuota(
final String user, final GlobalQuotaSettings quotas) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postSetUserQuota(this, user, quotas);
}
});
}
public void preSetUserQuota(
final String user, final TableName table, final GlobalQuotaSettings quotas)
throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preSetUserQuota(this, user, table, quotas);
}
});
}
public void postSetUserQuota(
final String user, final TableName table, final GlobalQuotaSettings quotas)
throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postSetUserQuota(this, user, table, quotas);
}
});
}
public void preSetUserQuota(
final String user, final String namespace, final GlobalQuotaSettings quotas)
throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preSetUserQuota(this, user, namespace, quotas);
}
});
}
public void postSetUserQuota(
final String user, final String namespace, final GlobalQuotaSettings quotas)
throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postSetUserQuota(this, user, namespace, quotas);
}
});
}
public void preSetTableQuota(
final TableName table, final GlobalQuotaSettings quotas) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preSetTableQuota(this, table, quotas);
}
});
}
public void postSetTableQuota(
final TableName table, final GlobalQuotaSettings quotas) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postSetTableQuota(this, table, quotas);
}
});
}
public void preSetNamespaceQuota(
final String namespace, final GlobalQuotaSettings quotas) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preSetNamespaceQuota(this, namespace, quotas);
}
});
}
public void postSetNamespaceQuota(
final String namespace, final GlobalQuotaSettings quotas) throws IOException{
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postSetNamespaceQuota(this, namespace, quotas);
}
});
}
public void preSetRegionServerQuota(final String regionServer, final GlobalQuotaSettings quotas)
throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preSetRegionServerQuota(this, regionServer, quotas);
}
});
}
public void postSetRegionServerQuota(final String regionServer, final GlobalQuotaSettings quotas)
throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postSetRegionServerQuota(this, regionServer, quotas);
}
});
}
public void preMoveServersAndTables(final Set<Address> servers, final Set<TableName> tables,
final String targetGroup) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preMoveServersAndTables(this, servers, tables, targetGroup);
}
});
}
public void postMoveServersAndTables(final Set<Address> servers, final Set<TableName> tables,
final String targetGroup) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postMoveServersAndTables(this, servers, tables, targetGroup);
}
});
}
public void preMoveServers(final Set<Address> servers, final String targetGroup)
throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preMoveServers(this, servers, targetGroup);
}
});
}
public void postMoveServers(final Set<Address> servers, final String targetGroup)
throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postMoveServers(this, servers, targetGroup);
}
});
}
public void preMoveTables(final Set<TableName> tables, final String targetGroup)
throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preMoveTables(this, tables, targetGroup);
}
});
}
public void postMoveTables(final Set<TableName> tables, final String targetGroup)
throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postMoveTables(this, tables, targetGroup);
}
});
}
public void preAddRSGroup(final String name)
throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preAddRSGroup(this, name);
}
});
}
public void postAddRSGroup(final String name)
throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postAddRSGroup(this, name);
}
});
}
public void preRemoveRSGroup(final String name)
throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preRemoveRSGroup(this, name);
}
});
}
public void postRemoveRSGroup(final String name)
throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postRemoveRSGroup(this, name);
}
});
}
public void preBalanceRSGroup(final String name)
throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preBalanceRSGroup(this, name);
}
});
}
public void postBalanceRSGroup(final String name, final boolean balanceRan)
throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postBalanceRSGroup(this, name, balanceRan);
}
});
}
public void preRemoveServers(final Set<Address> servers)
throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preRemoveServers(this, servers);
}
});
}
public void postRemoveServers(final Set<Address> servers)
throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postRemoveServers(this, servers);
}
});
}
public void preGetRSGroupInfo(final String groupName) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preGetRSGroupInfo(this, groupName);
}
});
}
public void postGetRSGroupInfo(final String groupName) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postGetRSGroupInfo(this, groupName);
}
});
}
public void preGetRSGroupInfoOfTable(final TableName tableName) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preGetRSGroupInfoOfTable(this, tableName);
}
});
}
public void postGetRSGroupInfoOfTable(final TableName tableName) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postGetRSGroupInfoOfTable(this, tableName);
}
});
}
public void preListRSGroups() throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preListRSGroups(this);
}
});
}
public void postListRSGroups() throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postListRSGroups(this);
}
});
}
public void preListTablesInRSGroup(final String groupName) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
protected void call(MasterObserver observer) throws IOException {
observer.preListTablesInRSGroup(this, groupName);
}
});
}
public void postListTablesInRSGroup(final String groupName) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
protected void call(MasterObserver observer) throws IOException {
observer.postListTablesInRSGroup(this, groupName);
}
});
}
public void preGetConfiguredNamespacesAndTablesInRSGroup(final String groupName)
throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
protected void call(MasterObserver observer) throws IOException {
observer.preGetConfiguredNamespacesAndTablesInRSGroup(this, groupName);
}
});
}
public void postGetConfiguredNamespacesAndTablesInRSGroup(final String groupName)
throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
protected void call(MasterObserver observer) throws IOException {
observer.postGetConfiguredNamespacesAndTablesInRSGroup(this, groupName);
}
});
}
public void preGetRSGroupInfoOfServer(final Address server) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preGetRSGroupInfoOfServer(this, server);
}
});
}
public void postGetRSGroupInfoOfServer(final Address server) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postGetRSGroupInfoOfServer(this, server);
}
});
}
public void preAddReplicationPeer(final String peerId, final ReplicationPeerConfig peerConfig)
throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preAddReplicationPeer(this, peerId, peerConfig);
}
});
}
public void postAddReplicationPeer(final String peerId, final ReplicationPeerConfig peerConfig)
throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postAddReplicationPeer(this, peerId, peerConfig);
}
});
}
public void preRemoveReplicationPeer(final String peerId) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preRemoveReplicationPeer(this, peerId);
}
});
}
public void postRemoveReplicationPeer(final String peerId) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postRemoveReplicationPeer(this, peerId);
}
});
}
public void preEnableReplicationPeer(final String peerId) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preEnableReplicationPeer(this, peerId);
}
});
}
public void postEnableReplicationPeer(final String peerId) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postEnableReplicationPeer(this, peerId);
}
});
}
public void preDisableReplicationPeer(final String peerId) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preDisableReplicationPeer(this, peerId);
}
});
}
public void postDisableReplicationPeer(final String peerId) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postDisableReplicationPeer(this, peerId);
}
});
}
public void preGetReplicationPeerConfig(final String peerId) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preGetReplicationPeerConfig(this, peerId);
}
});
}
public void postGetReplicationPeerConfig(final String peerId) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postGetReplicationPeerConfig(this, peerId);
}
});
}
public void preUpdateReplicationPeerConfig(final String peerId,
final ReplicationPeerConfig peerConfig) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preUpdateReplicationPeerConfig(this, peerId, peerConfig);
}
});
}
public void postUpdateReplicationPeerConfig(final String peerId,
final ReplicationPeerConfig peerConfig) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postUpdateReplicationPeerConfig(this, peerId, peerConfig);
}
});
}
public void preListReplicationPeers(final String regex) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preListReplicationPeers(this, regex);
}
});
}
public void postListReplicationPeers(final String regex) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postListReplicationPeers(this, regex);
}
});
}
public void preTransitReplicationPeerSyncReplicationState(String peerId,
SyncReplicationState state) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preTransitReplicationPeerSyncReplicationState(this, peerId, state);
}
});
}
public void postTransitReplicationPeerSyncReplicationState(String peerId,
SyncReplicationState from, SyncReplicationState to) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postTransitReplicationPeerSyncReplicationState(this, peerId, from, to);
}
});
}
public void preRequestLock(String namespace, TableName tableName, RegionInfo[] regionInfos,
LockType type, String description) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preRequestLock(this, namespace, tableName, regionInfos, description);
}
});
}
public void postRequestLock(String namespace, TableName tableName, RegionInfo[] regionInfos,
LockType type, String description) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postRequestLock(this, namespace, tableName, regionInfos, description);
}
});
}
public void preLockHeartbeat(LockProcedure proc, boolean keepAlive) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preLockHeartbeat(this, proc.getTableName(), proc.getDescription());
}
});
}
public void postLockHeartbeat(LockProcedure proc, boolean keepAlive) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postLockHeartbeat(this);
}
});
}
public void preGetClusterMetrics() throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preGetClusterMetrics(this);
}
});
}
public void postGetClusterMetrics(ClusterMetrics status) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postGetClusterMetrics(this, status);
}
});
}
public void preClearDeadServers() throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preClearDeadServers(this);
}
});
}
public void postClearDeadServers(List<ServerName> servers,
List<ServerName> notClearedServers) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postClearDeadServers(this, servers, notClearedServers);
}
});
}
public void preDecommissionRegionServers(List<ServerName> servers, boolean offload)
throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preDecommissionRegionServers(this, servers, offload);
}
});
}
public void postDecommissionRegionServers(List<ServerName> servers, boolean offload)
throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postDecommissionRegionServers(this, servers, offload);
}
});
}
public void preListDecommissionedRegionServers() throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preListDecommissionedRegionServers(this);
}
});
}
public void postListDecommissionedRegionServers() throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postListDecommissionedRegionServers(this);
}
});
}
public void preRecommissionRegionServer(ServerName server, List<byte[]> encodedRegionNames)
throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preRecommissionRegionServer(this, server, encodedRegionNames);
}
});
}
public void postRecommissionRegionServer(ServerName server, List<byte[]> encodedRegionNames)
throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postRecommissionRegionServer(this, server, encodedRegionNames);
}
});
}
public void preSwitchRpcThrottle(boolean enable) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null :new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preSwitchRpcThrottle(this, enable);
}
});
}
public void postSwitchRpcThrottle(final boolean oldValue, final boolean newValue)
throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postSwitchRpcThrottle(this, oldValue, newValue);
}
});
}
public void preIsRpcThrottleEnabled() throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preIsRpcThrottleEnabled(this);
}
});
}
public void postIsRpcThrottleEnabled(boolean enabled) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postIsRpcThrottleEnabled(this, enabled);
}
});
}
public void preSwitchExceedThrottleQuota(boolean enable) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preSwitchExceedThrottleQuota(this, enable);
}
});
}
public void postSwitchExceedThrottleQuota(final boolean oldValue, final boolean newValue)
throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postSwitchExceedThrottleQuota(this, oldValue, newValue);
}
});
}
public void preGrant(UserPermission userPermission, boolean mergeExistingPermissions)
throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preGrant(this, userPermission, mergeExistingPermissions);
}
});
}
public void postGrant(UserPermission userPermission, boolean mergeExistingPermissions)
throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postGrant(this, userPermission, mergeExistingPermissions);
}
});
}
public void preRevoke(UserPermission userPermission) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preRevoke(this, userPermission);
}
});
}
public void postRevoke(UserPermission userPermission) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postRevoke(this, userPermission);
}
});
}
public void preGetUserPermissions(String userName, String namespace, TableName tableName,
byte[] family, byte[] qualifier) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preGetUserPermissions(this, userName, namespace, tableName, family, qualifier);
}
});
}
public void postGetUserPermissions(String userName, String namespace, TableName tableName,
byte[] family, byte[] qualifier) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postGetUserPermissions(this, userName, namespace, tableName, family, qualifier);
}
});
}
public void preHasUserPermissions(String userName, List<Permission> permissions)
throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.preHasUserPermissions(this, userName, permissions);
}
});
}
public void postHasUserPermissions(String userName, List<Permission> permissions)
throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
observer.postHasUserPermissions(this, userName, permissions);
}
});
}
}
|
ASM717/cpp_intensive | module05/ex01/Bureaucrat.cpp | <filename>module05/ex01/Bureaucrat.cpp
/* ************************************************************************** */
/* */
/* ::: :::::::: */
/* Bureaucrat.cpp :+: :+: :+: */
/* +:+ +:+ +:+ */
/* By: amuriel <<EMAIL>> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */
/* Created: 2021/10/01 11:20:15 by amuriel #+# #+# */
/* Updated: 2021/10/07 20:37:42 by amuriel ### ########.fr */
/* */
/* ************************************************************************** */
#include "Bureaucrat.hpp"
Bureaucrat::Bureaucrat(){}
Bureaucrat::Bureaucrat(std::string const &name, int grade) : m_name(name)
{
m_grade = grade;
if (m_grade < 1)
throw GradeTooHighException();
if (m_grade > 150)
throw GradeTooLowException();
}
Bureaucrat::Bureaucrat(Bureaucrat const &ref) : m_name(ref.m_name)
{
*this = ref;
}
Bureaucrat &Bureaucrat::operator=(Bureaucrat const &ref)
{
if (this == &ref)
return *this;
m_grade = ref.m_grade;
if (m_grade < 1)
throw GradeTooHighException();
if (m_grade > 150)
throw GradeTooLowException();
return *this;
}
Bureaucrat::~Bureaucrat(){}
std::string const & Bureaucrat::getName() const
{
return m_name;
}
int Bureaucrat::getGrade() const
{
return m_grade;
}
void Bureaucrat::setGrade(int grade)
{
m_grade = grade;
if (m_grade < 1)
throw GradeTooHighException();
if (m_grade > 150)
throw GradeTooLowException();
}
void Bureaucrat::incrementGrade()
{
m_grade--;
if (m_grade < 1)
throw GradeTooHighException();
}
void Bureaucrat::decrementGrade()
{
m_grade++;
if (m_grade > 150)
throw GradeTooLowException();
}
const char* Bureaucrat::GradeTooHighException::what() const throw()
{
return RED"Bureaucrat::GradeTooHighException::Grade too High";
}
const char* Bureaucrat::GradeTooLowException::what() const throw()
{
return RED"Bureaucrat::GradeTooLowException::Grade too Low";
}
void Bureaucrat::signForm(Form &form) const
{
if (!(form.getSigned()) && (form.getSignGrade() > m_grade))
std::cout << GREEN << m_name << " signs " << form.getFormName() << std::endl;
else
std::cout << YEL << m_name << " cannot sign " << form.getFormName()
<< " because it's grade is too low or the form is already signed" << std::endl;
}
std::ostream &operator<<(std::ostream &out, Bureaucrat const &src)
{
out << SKY << src.getName() << ", bureaucrat grade " << src.getGrade();
return (out);
}
|
zjxkenshine/codedemo | java-demo/java-basic-demo/src/main/java/com/kenshine/basic/_01_base/test08_system.java | <gh_stars>1-10
package com.kenshine.basic._01_base;
/**
* @author :kenshine
* @date :Created in 2022/1/8 23:07
* @description:
* @modified By:
* @version: $
*/
public class test08_system {
public static void main(String[] args) {
int[] a={1,2,3,5,6};
int[] b={15,16,17,18,19};
/**
* 修改b数组索引1位置开始的两个值为a数组索引2位置开始的两个值
*/
System.arraycopy(a,2,b,1,2);
for(int i:b){ //仅修改b数组,a不变
System.out.println(i);
}
}
}
|
Krunal-K-SimformSolutions/Amplituda | app/src/main/java/linc/com/amplituda/callback/AmplitudaSuccessListener.java | <reponame>Krunal-K-SimformSolutions/Amplituda
package linc.com.amplituda.callback;
import linc.com.amplituda.AmplitudaResult;
/**
* Callback interface for success processing event
*/
public interface AmplitudaSuccessListener<T> {
void onSuccess(final AmplitudaResult<T> result);
}
|
sldblog/offender-management-allocation-manager | app/models/hmpps_api/movement.rb | <gh_stars>0
# frozen_string_literal: true
module HmppsApi
class MovementDirection
IN = 'IN'
OUT = 'OUT'
end
class Movement
include Deserialisable
attr_accessor :offender_no, :create_date_time,
:from_agency, :from_agency_description,
:to_agency, :to_agency_description,
:from_city, :to_city,
:movement_type, :movement_type_description,
:direction_code, :movement_time,
:movement_reason, :comment_text
def initialize(fields = {})
# Allow this object to be reconstituted from a hash, we can't use
# from_json as the one passed in will already be using the snake case
# names whereas from_json is expecting the elite2 camelcase names.
fields.each { |k, v| instance_variable_set("@#{k}", v) }
end
def from_prison?
PrisonService::PRISONS.include?(from_agency)
end
def to_prison?
PrisonService::PRISONS.include?(to_agency)
end
def temporary?
movement_type == HmppsApi::MovementType::TEMPORARY
end
def out?
direction_code == MovementDirection::OUT
end
def self.from_json(payload)
Movement.new.tap { |obj|
obj.offender_no = payload.fetch('offenderNo')
obj.create_date_time = deserialise_date_and_time(payload, 'createDateTime')
obj.from_agency = payload['fromAgency']
obj.from_agency_description = payload['fromAgencyDescription']
obj.to_agency = payload['toAgency']
obj.to_agency_description = payload['toAgencyDescription']
obj.from_city = payload['fromCity']
obj.to_city = payload['toCity']
obj.movement_type = payload.fetch('movementType')
obj.movement_type_description = payload['movementTypeDescription']
obj.direction_code = payload.fetch('directionCode')
obj.movement_time = payload['movementTime']
obj.movement_reason = payload['movementReason']
obj.comment_text = payload['commentText']
}
end
end
end
|
zoozooll/MyExercise | VV/src/com/beem/project/btf/ui/activity/ClipPictureActivity.java | package com.beem.project.btf.ui.activity;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.HashMap;
import org.json.JSONArray;
import org.json.JSONObject;
import android.app.Activity;
import android.content.Intent;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Matrix;
import android.graphics.Paint;
import android.graphics.PointF;
import android.graphics.Rect;
import android.graphics.Bitmap.Config;
import android.graphics.drawable.BitmapDrawable;
import android.graphics.drawable.Drawable;
import android.os.Bundle;
import android.os.Environment;
import android.os.Handler;
import android.support.v4.app.Fragment;
import android.util.Log;
import android.view.MotionEvent;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.View.OnTouchListener;
import android.view.ViewGroup.LayoutParams;
import android.view.ViewTreeObserver;
import android.view.ViewTreeObserver.OnGlobalLayoutListener;
import android.widget.ImageView;
import android.widget.ImageView.ScaleType;
import android.widget.RelativeLayout;
import android.widget.Toast;
import com.beem.project.btf.BeemApplication;
import com.beem.project.btf.R;
import com.beem.project.btf.manager.LoginManager;
import com.beem.project.btf.ui.activity.base.VVBaseActivity;
import com.beem.project.btf.ui.entity.EventBusData;
import com.beem.project.btf.ui.entity.EventBusData.EventAction;
import com.beem.project.btf.ui.entity.EventBusData.IEventBusAction;
import com.beem.project.btf.ui.views.ClipView;
import com.beem.project.btf.ui.views.ClipView.OnDrawListenerComplete;
import com.beem.project.btf.update.UploadUtil;
import com.beem.project.btf.utils.AppProperty;
import com.beem.project.btf.utils.BBSUtils;
import com.beem.project.btf.utils.PictureUtil;
import com.beem.project.btf.utils.ThreadUtils;
import com.beem.project.btf.utils.UIHelper;
import com.butterfly.vv.vv.utils.CToast;
import de.greenrobot.event.EventBus;
public class ClipPictureActivity extends VVBaseActivity implements OnTouchListener, OnClickListener, IEventBusAction {
private static final String KEY_PHOTO_PATH = "photo_path";
private static final String KEY_CLIP_TYPE = "CLIP_TYPE";
private static final String KEY_CLIP_WIDTH = "KEY_CLIP_WIDTH";
private static final String KEY_CLIP_HEIGHT = "KEY_CLIP_HEIGHT";
private static final String KEY_CLIP_ROTATE = "KEY_CLIP_ROTATE";
private ImageView srcPic;
private ImageView ok_image_camera, cell_image_camera;
private ClipView clipview;
private Matrix matrix = new Matrix();
private Matrix savedMatrix = new Matrix();
/** 动作标志:无 */
private static final int NONE = 0;
/** 动作标志:拖动 */
private static final int DRAG = 1;
/** 动作标志:缩放 */
private static final int ZOOM = 2;
/** 初始化动作标志 */
private int mode = NONE;
/** 记录起始坐标 */
private PointF start = new PointF();
/** 记录缩放时两指中间点坐标 */
private PointF mid = new PointF();
private float oldDist = 1f;
private Bitmap bitmap;
private RelativeLayout relative_layout;
private String photo_path;
private Handler mHandler = new Handler();
// private boolean isregister;
private static final String TAG = ClipPictureActivity.class.getSimpleName();
private String savePhoto;
private UpLoadFileRunnable uploadTask;
private int clipWidth;
private int clipHeight;
private float clipRatio;
private ClipType clipType;
private int rotate;
public static void launch(Fragment fragment, String photo_path) {
Intent lastIntent = new Intent(fragment.getActivity(), ClipPictureActivity.class);
lastIntent.putExtra(KEY_PHOTO_PATH, photo_path);
fragment.startActivity(lastIntent);
}
public static void launch(Activity activity, String photo_path, int requestCode) {
launch(activity, photo_path, requestCode, 1.0f);
}
public static void launch(Activity activity, String photo_path, int requestCode, int width, int height) {
Intent intent = new Intent(activity, ClipPictureActivity.class);
intent.putExtra(KEY_PHOTO_PATH, photo_path);
intent.putExtra(KEY_CLIP_WIDTH, width);
intent.putExtra(KEY_CLIP_HEIGHT, height);
activity.startActivityForResult(intent, requestCode);
}
public static void launch(Activity activity, String photo_path, int requestCode, int width, int height,
ClipType clipType) {
Intent intent = new Intent(activity, ClipPictureActivity.class);
intent.putExtra(KEY_PHOTO_PATH, photo_path);
intent.putExtra(KEY_CLIP_WIDTH, width);
intent.putExtra(KEY_CLIP_HEIGHT, height);
intent.putExtra(KEY_CLIP_TYPE, clipType);
activity.startActivityForResult(intent, requestCode);
}
public static void launch(Activity activity, String photo_path, int requestCode, float clipRotate) {
Intent intent = new Intent(activity, ClipPictureActivity.class);
intent.putExtra(KEY_PHOTO_PATH, photo_path);
intent.putExtra(KEY_CLIP_ROTATE, clipRotate);
activity.startActivityForResult(intent, requestCode);
}
public static void launch(Activity activity, String photo_path, int requestCode, float clipRotate, ClipType clipType) {
Intent intent = new Intent(activity, ClipPictureActivity.class);
intent.putExtra(KEY_PHOTO_PATH, photo_path);
intent.putExtra(KEY_CLIP_ROTATE, clipRotate);
intent.putExtra(KEY_CLIP_TYPE, clipType);
activity.startActivityForResult(intent, requestCode);
}
public static void launch(Fragment fragment, String photo_path, int requestCode, boolean isRegister) {
Intent intent = new Intent(fragment.getActivity(), ClipPictureActivity.class);
intent.putExtra(KEY_PHOTO_PATH, photo_path);
intent.putExtra(KEY_CLIP_TYPE, ClipType.REGISTER);
fragment.startActivityForResult(intent, requestCode);
}
public static void launch(Fragment fragment, String photo_path, int requestCode, ClipType clipType) {
Intent intent = new Intent(fragment.getActivity(), ClipPictureActivity.class);
intent.putExtra(KEY_PHOTO_PATH, photo_path);
intent.putExtra(KEY_CLIP_TYPE, clipType);
fragment.startActivityForResult(intent, requestCode);
}
public static void launch(Fragment fragment, String photo_path, int requestCode, int width, int height) {
Intent intent = new Intent(fragment.getActivity(), ClipPictureActivity.class);
intent.putExtra(KEY_PHOTO_PATH, photo_path);
intent.putExtra(KEY_CLIP_WIDTH, width);
intent.putExtra(KEY_CLIP_HEIGHT, height);
fragment.startActivityForResult(intent, requestCode);
}
public static void launch(Fragment fragment, String photo_path, int requestCode, float clipRotate) {
Intent intent = new Intent(fragment.getActivity(), ClipPictureActivity.class);
intent.putExtra(KEY_PHOTO_PATH, photo_path);
intent.putExtra(KEY_CLIP_ROTATE, clipRotate);
fragment.startActivityForResult(intent, requestCode);
}
public static void launch(Activity activity, String photo_path, int requestCode, boolean isRegister) {
Intent intent = new Intent(activity, ClipPictureActivity.class);
intent.putExtra(KEY_PHOTO_PATH, photo_path);
intent.putExtra(KEY_CLIP_TYPE, ClipType.REGISTER);
activity.startActivityForResult(intent, requestCode);
}
public static void launch(Activity activity, String photo_path, int requestCode, ClipType clipType) {
Intent intent = new Intent(activity, ClipPictureActivity.class);
intent.putExtra(KEY_PHOTO_PATH, photo_path);
intent.putExtra(KEY_CLIP_TYPE, clipType);
activity.startActivityForResult(intent, requestCode);
}
public static void launch(Fragment fragment) {
Intent intent = new Intent(fragment.getActivity(), ClipPictureActivity.class);
fragment.startActivity(intent);
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.camera_photo_cat_head);
srcPic = (ImageView) this.findViewById(R.id.src_pic);
srcPic.setOnTouchListener(this);
rotate = 0;
// srcPic.setScaleType(ScaleType.CENTER_INSIDE);
relative_layout = (RelativeLayout) findViewById(R.id.relative_layout);
photo_path = getIntent().getStringExtra(KEY_PHOTO_PATH);
clipWidth = getIntent().getIntExtra(KEY_CLIP_WIDTH, -1);
clipHeight = getIntent().getIntExtra(KEY_CLIP_HEIGHT, -1);
clipRatio = getIntent().getFloatExtra(KEY_CLIP_ROTATE, 1.0f);
clipType = (ClipType) getIntent().getSerializableExtra(KEY_CLIP_TYPE);
ViewTreeObserver observer = srcPic.getViewTreeObserver();
observer.addOnGlobalLayoutListener(new OnGlobalLayoutListener() {
@Override
public void onGlobalLayout() {
srcPic.getViewTreeObserver().removeGlobalOnLayoutListener(this);
initClipView(relative_layout.getHeight());
}
});
ok_image_camera = (ImageView) this.findViewById(R.id.ok_image_camera);
ok_image_camera.setOnClickListener(this);
cell_image_camera = (ImageView) this.findViewById(R.id.cell_image_camera);
cell_image_camera.setOnClickListener(this);
View imv_rotate = findViewById(R.id.imv_rotate);
imv_rotate.setOnClickListener(this);
// Log.i(tag, "onCreate");
EventBus.getDefault().register(this);
}
/**
* 初始化截图区域,并将源图按裁剪框比例缩放
* @param top
*/
private void initClipView(final int top) {
ThreadUtils.executeTask(new Runnable() {
@Override
public void run() {
bitmap = PictureUtil.getBitMapFromPath(mContext, photo_path);
mHandler.post(new Runnable() {
@Override
public void run() {
clipview = new ClipView(ClipPictureActivity.this);
clipview.setCustomTopBarHeight(top);
clipview.setClipWidth(clipWidth);
clipview.setClipHeight(clipHeight);
clipview.setClipRatio(clipRatio);
clipview.addOnDrawCompleteListener(new OnDrawListenerComplete() {
@Override
public void onDrawCompelete() {
clipview.removeOnDrawCompleteListener();
int clipHeight = clipview.getClipHeight();
int clipWidth = clipview.getClipWidth();
int midX = clipview.getClipLeftMargin() + (clipWidth / 2);
int midY = clipview.getClipTopMargin() + (clipHeight / 2);
if (bitmap != null) {
int imageWidth = bitmap.getWidth();
int imageHeight = bitmap.getHeight();
Log.i(TAG, "~imageWidth~" + imageWidth + "~imageHeight~" + imageHeight);
// 按裁剪框求缩放比例
float scale = (clipWidth * 1.0f) / imageWidth;
if (imageWidth > imageHeight) {
scale = (clipHeight * 1.0f) / imageHeight;
}
// 起始中心点
float imageMidX = imageWidth * scale / 2;
float imageMidY = +imageHeight * scale / 2;
srcPic.setScaleType(ScaleType.MATRIX);
// 缩放
matrix.postScale(scale, scale);
// 平移
matrix.postTranslate(midX - imageMidX, midY - imageMidY);
}
// matrix.postTranslate(0,40);
srcPic.setImageMatrix(matrix);
srcPic.setImageBitmap(bitmap);
}
});
addContentView(clipview, new LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT));
}
});
}
});
}
@Override
public boolean onTouch(View v, MotionEvent event) {
ImageView view = (ImageView) v;
switch (event.getAction() & MotionEvent.ACTION_MASK) {
case MotionEvent.ACTION_DOWN:
savedMatrix.set(matrix);
// 设置开始点位置
start.set(event.getX(), event.getY());
mode = DRAG;
break;
case MotionEvent.ACTION_POINTER_DOWN:
oldDist = spacing(event);
if (oldDist > 10f) {
savedMatrix.set(matrix);
midPoint(mid, event);
mode = ZOOM;
}
break;
case MotionEvent.ACTION_UP:
case MotionEvent.ACTION_POINTER_UP:
mode = NONE;
break;
case MotionEvent.ACTION_MOVE:
if (mode == DRAG) {
matrix.set(savedMatrix);
matrix.postTranslate(event.getX() - start.x, event.getY() - start.y);
} else if (mode == ZOOM) {
float newDist = spacing(event);
if (newDist > 10f) {
matrix.set(savedMatrix);
float scale = newDist / oldDist;
matrix.postScale(scale, scale, mid.x, mid.y);
}
}
break;
}
view.setImageMatrix(matrix);
return true;
}
/**
* 多点触控时,计算最先放下的两指距离
* @param event
* @return
*/
private float spacing(MotionEvent event) {
float x = event.getX(0) - event.getX(1);
float y = event.getY(0) - event.getY(1);
return (float) Math.sqrt(x * x + y * y);
}
/**
* 多点触控时,计算最先放下的两指中心坐标
* @param point
* @param event
*/
private void midPoint(PointF point, MotionEvent event) {
float x = event.getX(0) + event.getX(1);
float y = event.getY(0) + event.getY(1);
point.set(x / 2, y / 2);
}
@Override
public void onClick(View v) {
switch (v.getId()) {
case R.id.cell_image_camera:// 取消
ClipPictureActivity.this.finish();
break;
case R.id.ok_image_camera: {
// 保存确认
if (clipview == null || !clipview.isDrawComplete()) {
CToast.showToast(BeemApplication.getContext(), "图片正在加载,请稍后...", Toast.LENGTH_SHORT);
return;
}
if (clipType == ClipType.OTHERS || clipType == null) {
if (uploadTask == null) {
String audioAddress = AppProperty.getInstance().VVAPI
+ AppProperty.getInstance().UPLOAD_PORTRAIT;
uploadTask = new UpLoadFileRunnable(savePhoto(), audioAddress);
ThreadUtils.executeTask(uploadTask);
}
} else if (clipType == ClipType.REGISTER) {
savePhoto = savePhoto();
EventBus.getDefault().post(new EventBusData(EventAction.RegisterCacheImage, savePhoto));
finish();
} else if (clipType == ClipType.CLIP_NEWSTV) {
Bitmap bmp = getBitmap();
String file = PictureUtil.getClipTempImage().getPath();
PictureUtil.saveBitmapFile(bmp, file);
bmp.recycle();
setResult(RESULT_OK);
finish();
}
break;
}
case R.id.imv_rotate: {
/*Bitmap bmTemp = bitmap;
Bitmap bmDest = PictureUtil.rotaingImageView(90, bmTemp);
matrix.reset();
savedMatrix.reset();
srcPic.setImageBitmap(bmDest);
srcPic.setImageMatrix(matrix);
srcPic.setScaleType(ScaleType.CENTER_INSIDE);
bitmap = bmDest;*/
rotate += 90;
srcPic.setRotation(rotate);
}
break;
default:
break;
}
}
/**
* 获取裁剪框内截图
* @return
*/
private Bitmap getBitmap() {
Bitmap matricBitmap = drawable2Bitmap(srcPic.getDrawable(), srcPic.getWidth(), srcPic.getHeight());
PictureUtil.saveBitmapFile(matricBitmap, "/sdcard/matric.jpg");
// 获取截屏
View view = this.getWindow().getDecorView();
view.setDrawingCacheEnabled(true);
view.buildDrawingCache();
// 获取状态栏高度
Rect frame = new Rect();
this.getWindow().getDecorView().getWindowVisibleDisplayFrame(frame);
int statusBarHeight = frame.top;
int x = clipview.getClipLeftMargin();
if (x < 0)
x = 0;
int y = clipview.getClipTopMargin() + statusBarHeight;
if (y < 0)
y = 0;
Bitmap cacheBmp = view.getDrawingCache();
/*Bitmap finalBitmap = Bitmap.createBitmap(cacheBmp, x,
y, clipview.getClipWidth(), clipview.getClipHeight());*/
int outputWidth = clipview.getClipWidth();
if (outputWidth < 0) {
outputWidth = clipview.getWidth();
}
int outputHeight = clipview.getClipHeight();
if (outputHeight < 0) {
outputHeight = (int) (outputWidth * clipRatio);
}
Bitmap finalBitmap = Bitmap.createBitmap(outputWidth, outputHeight, Bitmap.Config.ARGB_8888);
if (cacheBmp != null) {
Canvas c = new Canvas(finalBitmap);
c.drawBitmap(cacheBmp, new Rect(x, y, x + outputWidth, y + outputHeight), new Rect(0, 0, outputWidth,
outputHeight), new Paint());
}
// 释放资源
view.destroyDrawingCache();
return finalBitmap;
}
// 保存图片
private String savePhoto() {
if (!Environment.getExternalStorageState().equals(Environment.MEDIA_MOUNTED))
return null;
Bitmap savedbmp = getBitmap();
String filepath = BBSUtils.getTakePhotoPath(this, System.currentTimeMillis() + ".jpg");
File savedfile = new File(filepath);
BufferedOutputStream bos = null;
try {
bos = new BufferedOutputStream(new FileOutputStream(savedfile));
savedbmp.compress(Bitmap.CompressFormat.JPEG, 80, bos);
bos.flush();
} catch (Exception e) {
e.printStackTrace();
return null;
} finally {
if (savedbmp != null && !savedbmp.isRecycled()) {
savedbmp.recycle();
savedbmp = null;
}
if (bos != null) {
try {
bos.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
return filepath;
}
@Override
public void registerVVBroadCastReceivers() {
}
@Override
protected void onDestroy() {
super.onDestroy();
EventBus.getDefault().unregister(this);
if (bitmap != null) {
bitmap.recycle();
bitmap = null;
}
}
@Override
public void onEventMainThread(EventBusData data) {
}
/**
* 从Drawable中获取Bitmap对象
* @param drawable
* @return
*/
private Bitmap drawable2Bitmap(Drawable drawable, int defWidth, int defHeight) {
try {
if (drawable == null) {
return null;
}
if (drawable instanceof BitmapDrawable) {
return ((BitmapDrawable) drawable).getBitmap();
}
int intrinsicWidth = drawable.getIntrinsicWidth();
int intrinsicHeight = drawable.getIntrinsicHeight();
Bitmap bitmap = Bitmap.createBitmap(intrinsicWidth <= 0 ? defWidth : intrinsicWidth,
intrinsicHeight <= 0 ? defHeight : intrinsicHeight, Config.ARGB_8888);
Canvas canvas = new Canvas(bitmap);
drawable.setBounds(0, 0, canvas.getWidth(), canvas.getHeight());
drawable.draw(canvas);
return bitmap;
} catch (OutOfMemoryError e) {
return null;
}
}
public static enum ClipType {
REGISTER, CLIP_NEWSTV, OTHERS;
public static ClipType get(int index) {
switch (index) {
case 0:
return REGISTER;
case 1:
return CLIP_NEWSTV;
default:
return OTHERS;
}
}
}
private class UpLoadFileRunnable implements Runnable {
private File getFile;
private String url;
private UpLoadFileRunnable(String path, String url) {
super();
this.getFile = new File(path);
this.url = url;
showWaitingDialog();
}
private void showWaitingDialog() {
UIHelper.showDialogForLoading(mContext, "请稍候", true);
}
@Override
public void run() {
HashMap<String, String> params = new HashMap<String, String>();
String jidParsed = LoginManager.getInstance().getJidParsed();
params.put("tm_id", jidParsed);
params.put("session_id", LoginManager.getInstance().getSessionId());
String result = UploadUtil.uploadImage(new String[] { getFile.getPath() }, url, params, "portrait_file",
true);
String[] uploadUrl = null;
try {
JSONObject jsonObject = new JSONObject(result);
JSONArray jsonArray = jsonObject.getJSONArray("url");
uploadUrl = new String[] { jsonArray.getString(0), jsonArray.getString(1) };
} catch (Exception e) {
e.printStackTrace();
} finally {
onPostExecute(uploadUrl);
}
return;
}
protected void onPostExecute(final String[] uploadUrl) {
mHandler.post(new Runnable() {
@Override
public void run() {
UIHelper.hideDialogForLoading();
if (uploadUrl != null) {
// 删除拍照图片
getFile.delete();
Intent intent = new Intent();
intent.putExtra("savePhoto", uploadUrl);
setResult(Activity.RESULT_OK, intent);
EventBus.getDefault().post(new EventBusData(EventAction.SendResultFAlbum, uploadUrl));
finish();
CToast.showToast(mContext, "上传成功", Toast.LENGTH_SHORT);
} else {
Log.i(TAG, "上传失败");
CToast.showToast(mContext, "上传失败,请重新上传", Toast.LENGTH_SHORT);
}
}
});
}
}
}
|
exasol/virtual-schema-common-document | src/main/java/com/exasol/adapter/document/edml/deserializer/KeyTypeDeserializer.java | <reponame>exasol/virtual-schema-common-document<gh_stars>0
package com.exasol.adapter.document.edml.deserializer;
import java.io.IOException;
import com.exasol.adapter.document.edml.KeyType;
import com.exasol.errorreporting.ExaError;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.databind.DeserializationContext;
import com.fasterxml.jackson.databind.JsonDeserializer;
/**
* EDML Deserializer for {@link KeyType}.
*/
public class KeyTypeDeserializer extends JsonDeserializer<KeyType> {
@Override
public KeyType deserialize(final JsonParser jsonParser, final DeserializationContext deserializationContext)
throws IOException {
final String edmlName = jsonParser.getCodec().readValue(jsonParser, String.class);
if (edmlName == null || edmlName.isEmpty()) {
return KeyType.NONE;
} else {
try {
return KeyType.valueOf(edmlName.toUpperCase());
} catch (final IllegalArgumentException exception) {
throw new IllegalArgumentException(
ExaError.messageBuilder("E-VSD-86").message("Unknown key type {{type}}.", edmlName).toString());
}
}
}
}
|
thiago5171/python. | exercicios/ex039.py | <gh_stars>1-10
#faça um programa que leia o peso de cinco pessoas. No final,
#mostre qual foi o maior e o menor peso lidos.
cont = 0
maior = 0
menor = 0
for i in range(1,6):
peso = float(input())
if i==1 :
maior = peso
menor = peso
else:
if peso>maior:
maior=peso
if peso<menor:
menor = peso
print(maior)
print(menor)
|
vlab-research/fly | dashboard-client/src/components/LinkModal/index.js | export { default } from './LinkModal';
|
stas-vilchik/bdd-ml | data/9139.js | {
if (n)
if (o(n)) {
Array.isArray(n) && (n = g(n));
var a;
for (var s in n)
!(function(o) {
if ("class" === o || "style" === o || yi(o)) a = t;
else {
var s = t.attrs && t.attrs.type;
a =
r || Si.mustUseProp(e, s, o)
? t.domProps || (t.domProps = {})
: t.attrs || (t.attrs = {});
}
o in a ||
((a[o] = n[o]),
i &&
((t.on || (t.on = {}))["update:" + o] = function(t) {
n[o] = t;
}));
})(s);
} else;
return t;
}
|
xavave/testnfc | mfoc_hard/hardnested/hardnested_bf_core.h | //-----------------------------------------------------------------------------
// Copyright (C) 2016, 2017 by piwi
//
// This code is licensed to you under the terms of the GNU GPL, version 2 or,
// at your option, any later version. See the LICENSE.txt file for the text of
// the license.
//-----------------------------------------------------------------------------
// Implements a card only attack based on crypto text (encrypted nonces
// received during a nested authentication) only. Unlike other card only
// attacks this doesn't rely on implementation errors but only on the
// inherent weaknesses of the crypto1 cypher. Described in
// <NAME>, <NAME>, "Ciphertext-only Cryptanalysis on Hardened
// Mifare Classic Cards" in Proceedings of the 22nd ACM SIGSAC Conference on
// Computer and Communications Security, 2015
//-----------------------------------------------------------------------------
//
// brute forcing is based on @aczids bitsliced brute forcer
// https://github.com/aczid/crypto1_bs with some modifications. Mainly:
// - don't rollback. Start with 2nd byte of nonce instead
// - reuse results of filter subfunctions
// - reuse results of previous nonces if some first bits are identical
//
//-----------------------------------------------------------------------------
// aczid's Copyright notice:
//
// Bit-sliced Crypto-1 brute-forcing implementation
// Builds on the data structures returned by CraptEV1 craptev1_get_space(nonces, threshold, uid)
/*
Copyright (c) 2015-2016 <NAME>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*/
#ifndef HARDNESTED_BF_CORE_H__
#define HARDNESTED_BF_CORE_H__
#include "hardnested_bruteforce.h" // statelist_t
typedef enum {
SIMD_AUTO,
SIMD_AVX512,
SIMD_AVX2,
SIMD_AVX,
SIMD_SSE2,
SIMD_MMX,
SIMD_NONE,
} SIMDExecInstr;
extern void SetSIMDInstr(SIMDExecInstr instr);
extern SIMDExecInstr GetSIMDInstrAuto();
extern const uint64_t crack_states_bitsliced(uint32_t cuid, uint8_t *best_first_bytes, statelist_t *p, uint32_t *keys_found, uint64_t *num_keys_tested, uint32_t nonces_to_bruteforce, uint8_t *bf_test_nonces_2nd_byte, noncelist_t *nonces);
extern void bitslice_test_nonces(uint32_t nonces_to_bruteforce, uint32_t *bf_test_nonces, uint8_t *bf_test_nonce_par);
#endif
|
STRIDES-Codes/Exploring-the-Microbiome- | src/metaspades/src/projects/hammer/expander.cpp | <gh_stars>0
//***************************************************************************
//* Copyright (c) 2015 Saint Petersburg State University
//* Copyright (c) 2011-2014 Saint Petersburg Academic University
//* All Rights Reserved
//* See file LICENSE for details.
//***************************************************************************
#include "expander.hpp"
#include "config_struct_hammer.hpp"
#include "globals.hpp"
#include "kmer_data.hpp"
#include "valid_kmer_generator.hpp"
#include "io/reads/read.hpp"
#include <vector>
#include <cstring>
bool Expander::operator()(std::unique_ptr<Read> r) {
uint8_t trim_quality = (uint8_t)cfg::get().input_trim_quality;
// FIXME: Get rid of this
Read cr = *r;
size_t sz = cr.trimNsAndBadQuality(trim_quality);
if (sz < hammer::K)
return false;
std::vector<unsigned> covered_by_solid(sz, false);
std::vector<size_t> kmer_indices(sz, -1ull);
ValidKMerGenerator<hammer::K> gen(cr);
while (gen.HasMore()) {
hammer::KMer kmer = gen.kmer();
size_t idx = data_.checking_seq_idx(kmer);
if (idx != -1ULL) {
size_t read_pos = gen.pos() - 1;
kmer_indices[read_pos] = idx;
if (data_[idx].good()) {
for (size_t j = read_pos; j < read_pos + hammer::K; ++j)
covered_by_solid[j] = true;
}
}
gen.Next();
}
for (size_t j = 0; j < sz; ++j)
if (!covered_by_solid[j])
return false;
for (size_t j = 0; j < sz; ++j) {
if (kmer_indices[j] == -1ull)
continue;
// FIXME: Do not lock everything
KMerStat &kmer_data = data_[kmer_indices[j]];
if (!kmer_data.good()) {
# pragma omp atomic
changed_ += 1;
kmer_data.lock();
kmer_data.mark_good();
kmer_data.unlock();
}
}
return false;
}
|
tusharchoudhary0003/Custom-Football-Game | sources/com/google/ads/mediation/MediationBannerAdapter.java | <filename>sources/com/google/ads/mediation/MediationBannerAdapter.java
package com.google.ads.mediation;
import android.app.Activity;
import android.view.View;
import com.google.ads.mediation.C8092e;
import com.google.ads.mediation.MediationServerParameters;
import p019d.p271g.p272a.C12722c;
@Deprecated
public interface MediationBannerAdapter<ADDITIONAL_PARAMETERS extends C8092e, SERVER_PARAMETERS extends MediationServerParameters> extends C8082b<ADDITIONAL_PARAMETERS, SERVER_PARAMETERS> {
View getBannerView();
void requestBannerAd(C8083c cVar, Activity activity, SERVER_PARAMETERS server_parameters, C12722c cVar2, C8081a aVar, ADDITIONAL_PARAMETERS additional_parameters);
}
|
floryst/ITK | Modules/Filtering/LabelMap/include/itkRegionFromReferenceLabelMapFilter.h | /*=========================================================================
*
* Copyright Insight Software Consortium
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0.txt
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*=========================================================================*/
/*=========================================================================
*
* Portions of this file are subject to the VTK Toolkit Version 3 copyright.
*
* Copyright (c) <NAME>, <NAME>, <NAME>
*
* For complete copyright, license and disclaimer of warranty information
* please refer to the NOTICE file at the top of the ITK source tree.
*
*=========================================================================*/
#ifndef itkRegionFromReferenceLabelMapFilter_h
#define itkRegionFromReferenceLabelMapFilter_h
#include "itkChangeRegionLabelMapFilter.h"
namespace itk
{
/** \class RegionFromReferenceLabelMapFilter
* \brief Set the region from a reference image
*
* Change the region of a label map to be the same as one of a reference image.
* This filter implements the same feature as its superclass, but with the input region
* well integrated in the pipeline architecture.
* If the output cannot contain some of the objects' lines, they are truncated or removed.
* All objects fully outside the output region are removed.
*
* This implementation was taken from the Insight Journal paper:
* https://hdl.handle.net/1926/584 or
* http://www.insight-journal.org/browse/publication/176
*
* \author <NAME>. Biologie du Developpement et de la Reproduction, INRA de Jouy-en-Josas, France.
*
* \ingroup ImageEnhancement MathematicalMorphologyImageFilters
* \ingroup ITKLabelMap
*/
template< typename TInputImage >
class ITK_TEMPLATE_EXPORT RegionFromReferenceLabelMapFilter:public ChangeRegionLabelMapFilter< TInputImage >
{
public:
ITK_DISALLOW_COPY_AND_ASSIGN(RegionFromReferenceLabelMapFilter);
/** Standard class type aliases. */
using Self = RegionFromReferenceLabelMapFilter;
using Superclass = ChangeRegionLabelMapFilter< TInputImage >;
using Pointer = SmartPointer< Self >;
using ConstPointer = SmartPointer< const Self >;
/** Run-time type information (and related methods). */
itkTypeMacro(RegionFromReferenceLabelMapFilter, ChangeRegionImageFilter);
/** Standard New method. */
itkNewMacro(Self);
/** Superclass type alias. */
using OutputImageType = typename Superclass::OutputImageType;
using OutputImagePointer = typename Superclass::OutputImagePointer;
using OutputImageRegionType = typename Superclass::OutputImageRegionType;
using OutputImagePixelType = typename Superclass::OutputImagePixelType;
/** Some convenient type alias. */
using InputImageType = TInputImage;
using InputImagePointer = typename InputImageType::Pointer;
using InputImageConstPointer = typename InputImageType::ConstPointer;
using InputImageRegionType = typename InputImageType::RegionType;
using InputImagePixelType = typename InputImageType::PixelType;
using LabelObjectType = typename InputImageType::LabelObjectType;
using PixelType = typename InputImageType::PixelType;
using IndexType = typename InputImageType::IndexType;
using SizeType = typename InputImageType::SizeType;
using RegionType = typename InputImageType::RegionType;
using TOutputImage = TInputImage;
/** ImageDimension constants */
static constexpr unsigned int InputImageDimension = TInputImage::ImageDimension;
static constexpr unsigned int OutputImageDimension = TOutputImage::ImageDimension;
static constexpr unsigned int ImageDimension = TOutputImage::ImageDimension;
using ReferenceImageType = ImageBase< Self::ImageDimension >;
/** Copy the output information from another Image. */
void SetReferenceImage(const ReferenceImageType *image);
const ReferenceImageType * GetReferenceImage() const;
/** Set the input image */
void SetInput1(const TInputImage *input)
{
this->SetInput(input);
}
/** Set the reference image */
void SetInput2(const ReferenceImageType *input)
{
this->SetReferenceImage(input);
}
protected:
RegionFromReferenceLabelMapFilter()
{
this->SetNumberOfRequiredInputs(2);
}
~RegionFromReferenceLabelMapFilter() override = default;
void GenerateOutputInformation() override;
void PrintSelf(std::ostream & os, Indent indent) const override;
};
} // end namespace itk
#ifndef ITK_MANUAL_INSTANTIATION
#include "itkRegionFromReferenceLabelMapFilter.hxx"
#endif
#endif
|
anthonyfuentes/hubzero-cms | core/plugins/groups/forum/assets/js/emailSettings.js | /**
* @package hubzero-cms
* @copyright Copyright 2005-2019 HUBzero Foundation, LLC.
* @license http://opensource.org/licenses/MIT MIT
*/
$(document).ready(() => {
Hubzero.initApi(() => {
const $emailSettingsForm = $('#email-settings')
const $preexistingSubscriptionIdsElement = $('input[id=preexisting-subscriptions]')
$emailSettingsForm.submit((e) => {
e.preventDefault()
const subscriptionsDelta = _determineSubscriptionDelta()
const changeMade = _subscriptionsWereChanged(subscriptionsDelta)
if (changeMade) {
_updateUserSubscriptions(subscriptionsDelta)
}
})
const _determineSubscriptionDelta = () => {
const preexistingSubscriptionIds = _getPreexistingSubscriptionIds()
const updatedSubscriptions = _getUpdatedSubscriptions()
const delta = {}
delta.delete = preexistingSubscriptionIds.filter((categoryId) => {
return !updatedSubscriptions.includes(categoryId)
})
delta.create = updatedSubscriptions.filter((categoryId) => {
return !preexistingSubscriptionIds.includes(categoryId)
})
return delta
}
const _getPreexistingSubscriptionIds = () => {
const inputValue = $preexistingSubscriptionIdsElement.val()
const preexistingSubscriptionsIds = (inputValue === "") ? [] : inputValue.split(',')
return preexistingSubscriptionsIds
}
const _getUpdatedSubscriptions = () => {
const formData = $emailSettingsForm.serializeArray()
const updatedSubscriptions = formData.filter((input) => {
return input.value === 'on'
})
const updatedSubscriptionsIds = updatedSubscriptions.map((input) => {
return input.name
})
return updatedSubscriptionsIds
}
const _subscriptionsWereChanged = (subscriptionsDelta) => {
return subscriptionsDelta.delete.length > 0 || subscriptionsDelta.create.length > 0
}
const _updateUserSubscriptions = (subscriptionsDelta) => {
const userId = $emailSettingsForm.find('input[id=user-id]').val()
const deleteResponse = _deleteSubscriptions(subscriptionsDelta.delete, userId)
const createResponse = _createSubscriptions(subscriptionsDelta.create, userId)
Promise.all([deleteResponse, createResponse])
.then((responses) => {
const enrichedResponses = _getEnrichedResponses({
delete: responses[0], create: responses[1]
})
let [deleteResponse, createResponse] = enrichedResponses
_notifyUser(deleteResponse, createResponse)
_updatePreexistingIdsElement(deleteResponse, createResponse)
})
}
const _deleteSubscriptions = (subscriptionsToDelete, userId) => {
if (subscriptionsToDelete.length > 0) {
const promise = $.ajax({
url: '/api/v2.0/forum/userscategories/destroy',
method: 'DELETE',
data: {
userId,
categoriesIds: subscriptionsToDelete
}
})
return promise
}
}
const _createSubscriptions = (subscriptionsToCreate, userId) => {
if (subscriptionsToCreate.length > 0) {
const promise = $.ajax({
url: '/api/v2.0/forum/userscategories/create',
method: 'POST',
data: {
userId,
categoriesIds: subscriptionsToCreate
}
})
return promise
}
}
const _getEnrichedResponses = (responses) => {
const enrichedResponses = []
Object.keys(responses).forEach((action) => {
const response = responses[action]
const enrichedResponse = _enrichResponse(response, action)
enrichedResponses.push(enrichedResponse)
})
return enrichedResponses
}
const _enrichResponse = (response, action) => {
let enrichedResponse
if (response) {
enrichedResponse = JSON.parse(response)
enrichedResponse.action = action
} else {
enrichedResponse = _getNullResponse(action)
}
return enrichedResponse
}
const _getNullResponse = (action) => {
return {
status: 'success',
records: [],
null: true
}
}
const _updatePreexistingIdsElement = (deleteResponse, createResponse) => {
const updatedSubscriptionIds =_getUpdatedSubscriptionIds(deleteResponse, createResponse)
updatedSubscriptionIds.sort()
$preexistingSubscriptionIdsElement.val(updatedSubscriptionIds.join(','))
}
const _getUpdatedSubscriptionIds = (deleteResponse, createResponse) => {
const preexistingSubscriptionIds = _getPreexistingSubscriptionIds()
if (_requestSucceeded(deleteResponse)) {
_removeIds(preexistingSubscriptionIds, deleteResponse.records)
}
if (_requestSucceeded(createResponse)) {
_addIds(preexistingSubscriptionIds, createResponse.records)
}
return preexistingSubscriptionIds
}
const _requestSucceeded = (response) => {
return !response.null && response.status === 'success'
}
const _removeIds = (preexistingSubscriptionIds, records) => {
records.forEach((record) => {
let index = preexistingSubscriptionIds.indexOf(record.category_id)
preexistingSubscriptionIds.splice(index, 1)
})
}
const _addIds = (preexistingSubscriptionIds, records) => {
records.forEach((record) => {
preexistingSubscriptionIds.push(record.category_id)
})
}
const _notifyUser = (deleteResponse, createResponse) => {
const notificationMessage = _generateNotificationMessage(deleteResponse, createResponse)
const notificationType = _getNotificationType(deleteResponse, createResponse)
Notify[notificationType](notificationMessage)
}
const _generateNotificationMessage = (deleteResponse, createResponse) => {
let notifications = []
if (!deleteResponse.null) {
notifications.push(_generateDeleteNotification(deleteResponse))
}
if (!createResponse.null) {
notifications.push(_generateCreateNotification(createResponse))
}
return notifications.join("<br>")
}
const _generateDeleteNotification = (deleteResponse) => {
let notificationMessage
if (_requestFailed(deleteResponse)) {
notificationMessage = _generateDeleteErrorNotification(deleteResponse.errors)
} else {
notificationMessage = 'The specified subscriptions were deleted.'
}
return notificationMessage
}
const _generateDeleteErrorNotification = (errors) => {
const baseErrorMessage = 'There were errors when attempting to delete the specified subscriptions'
const notificationMessage = _generateErrorNotification(baseErrorMessage, errors)
return notificationMessage
}
const _generateCreateNotification = (createResponse) => {
let notificationMessage
if (_requestFailed(createResponse)) {
notificationMessage = _generateCreateErrorNotification(createResponse.errors)
} else {
notificationMessage = 'Subscriptions created.'
}
return notificationMessage
}
const _generateCreateErrorNotification = (errors) => {
const baseErrorMessage = 'There were errors when attempting to create the specified subscriptions'
const notificationMessage = _generateErrorNotification(baseErrorMessage, errors)
return notificationMessage
}
const _generateErrorNotification = (baseMessage, errors) => {
let errorNotification
const joinedErrors = errors.join(', ')
if (joinedErrors != '') {
errorNotification = `${baseMessage}: ${joinedErrors}`
}
return errorNotification
}
const _getNotificationType = (deleteResponse, createResponse) => {
let notificationType
const deleteFailed = _requestFailed(deleteResponse)
const createFailed = _requestFailed(createResponse)
if ((deleteResponse.null || deleteFailed ) && (createResponse.null || createFailed)) {
notificationType = 'error'
} else if (deleteFailed || createFailed) {
notificationType = 'warn'
} else {
notificationType = 'success'
}
return notificationType
}
const _requestFailed = (response) => {
return !response.null && response.status === 'error'
}
})
})
|
nanoNeel/foam2 | src/foam/lib/json/UnknownParser.java | /**
* @license
* Copyright 2018 The FOAM Authors. All Rights Reserved.
* http://www.apache.org/licenses/LICENSE-2.0
*/
package foam.lib.json;
import foam.lib.parse.*;
public class UnknownParser
extends ProxyParser
{
public UnknownParser() {
super(new Parser() {
private Parser delegate = new Alt(
NullParser.instance(),
BooleanParser.instance(),
//double parser should be before LongParser()
new DoubleParser(),
new LongParser(),
new UnknownStringParser(),
new UnknownReferenceParser());
public PStream parse(PStream ps, ParserContext x) {
PStream ps1 = ps.apply(delegate, x);
if ( ps1 == null ) {
return null;
}
Object o = ps1.value();
Object value = null;
if ( o == null ) {
value = "null";
} else {
value = o.toString();
}
return ps1.setValue(value);
}
});
}
} |
vitek-karas/WarPlusPlus | src/Common/ServerClient/ControlConnection.h | #ifndef SC_CONTROLCONNECTION_H_
#define SC_CONTROLCONNECTION_H_
enum EControlRequest{
ControlRequest_Init = 1, // Initialization (followed by nothing)
};
enum EControlAnswer{
ControlAnswer_Init = 1, // Initialization, followed by SControlAnswerInit
};
struct tagSControlAnswerInit
{
DWORD m_dwCivilizationID; // Civilization ID of the connected client
DWORD m_dwTimeslice; // Current timeslice number on the server
DWORD m_dwTimesliceInterval; // Length of the timeslice in milliseconds
DWORD m_dwStartPositionX; // Start position of the viewport
DWORD m_dwStartPositionY;
};
typedef tagSControlAnswerInit SControlAnswerInit;
#endif |
lightvector/arimaasharp | pattern/patternsolver.h | /*
* patternsolver.h
* Author: davidwu
*/
#include "../core/global.h"
#include "../pattern/pattern.h"
struct Possibility
{
pla_t owner; //0 = silv, 1 = gold, 2 = npla
int isRabbit; //0 = false, 1 = true, 2 = unknown
//This field has the status of being just a special flag.
//If set to 1, then we just outright assume a piece is frozen,
//and we set it back to 0 whenever the piece is touched by a friendly
//or a hostile nonrabbit piece is removed from being adjacent. We also delete the possiblity
//entirely if a friendly piece is moved from adjacent, or if the piece itself is pushpulled
//We never set it back to 1, and instead use actual testing of the surrounding
//squares at move legality checking time to see if normal freezing might happen.
//We only use this for the maybe player.
int isSpecialFrozen; //0 = possible false, 1 = true
Possibility();
Possibility(pla_t owner, int isRabbit, int isSpecialFrozen);
//Constructors
static Possibility npla();
static Possibility silv();
static Possibility gold();
//All of the functions assume adjacency and onboardness
//All of the definite functions assume that dest is npla.
//All of the maybe functions assume that dest is maybe npla.
//Obviously, none of these test actual frozenness, since that depends on adjacent squares on the board.
//But they do test special frozenness
bool canDefiniteStep(pla_t pla, loc_t src, loc_t dest) const;
//Same as canDefiniteStep, but npla is allowed
bool canDefinitePhantomStep(pla_t pla, loc_t src, loc_t dest) const;
//Pullers and pushers must be nonrabbit pla.
bool canDefinitePhantomPush(const Possibility& destP, pla_t pla, loc_t src, loc_t dest, loc_t dest2) const;
bool canDefinitePhantomPull(const Possibility& destP, pla_t pla, loc_t src, loc_t dest, loc_t dest2) const;
//Any combination of plas and empties is fine, due to freezyness, might be legal
//even if neither is legal individually.
bool canDefinitePhantomStepStep(const Possibility& destP, pla_t pla, loc_t src, loc_t dest, loc_t dest2) const;
bool maybeCanStep(pla_t pla, loc_t src, loc_t dest) const;
bool maybeCanPush(const Possibility& destP, pla_t pla, loc_t src, loc_t dest, loc_t dest2) const;
bool maybeCanPull(const Possibility& destP, pla_t pla, loc_t src, loc_t dest, loc_t dest2) const;
static void write(ostream& out, const Possibility& p);
static string write(const Possibility& p);
friend ostream& operator<<(ostream& out, const Possibility& p);
};
class KB
{
public:
static void init();
private:
static const int GT = 0;
static const int GEQ = 1;
static const int LEQ = 2;
static const int LT = 3;
STRUCT_NAMED_PAIR(int,node,int,cmp,StrRelation);
static inline int invertCmp(int cmp) {return 3-cmp;}
struct PNode
{
bool isHead; //True if this is the head node for a list.
Possibility possibility; //The possibility at this node
int nextNode; //Index of next node in pNodes
int prevNode; //Index of prev node in pNodes
//Strength relationships to other nodes, the other nodes will have matching reversed relations to this node
//These are unique per node pair.
vector<StrRelation>* strRelations;
PNode();
~PNode();
PNode(const PNode& other);
PNode& operator=(const PNode& other);
};
vector<PNode> pNodes; //Linked list nodes for locations
int pHead[BSIZE]; //Pointer to head node of circular doubly linked list of possibilities for this location.
int unusedList; //Pointer to head node of list of unused nodes
//The pattern has freezing for this player, for correctness, this player must be the maybe player
pla_t hasFrozenPla;
public:
KB(const Pattern& pattern);
~KB();
//Properties
bool definitelyNPla(loc_t loc) const;
bool maybeNPla(loc_t loc) const;
bool maybeOccupied(loc_t loc) const;
bool definitelyRabbit(pla_t pla, loc_t loc) const;
bool definitelyPla(pla_t pla, loc_t loc) const;
bool maybePla(pla_t pla, loc_t loc) const;
bool maybePlaNonRab(pla_t pla, loc_t loc) const;
bool maybePlaElephant(pla_t pla, loc_t loc) const;
bool definitelyPlaOrNPla(pla_t pla, loc_t loc) const;
bool definitelyHasDefender(pla_t pla, loc_t loc) const;
bool maybeHasDefender(pla_t pla, loc_t loc) const;
bool plaPiecesAtAreAsGeneralAs(pla_t pla, loc_t loc, loc_t loc2) const;
bool anySpecialFrozen(pla_t pla, loc_t loc) const;
bool anySpecialFrozenAround(pla_t pla, loc_t loc) const;
//Returns the node added
int addPossibilityToLoc(const Possibility& p, loc_t loc);
//Board actions and movement
bool canDefiniteStep(pla_t pla, loc_t src, loc_t dest) const;
bool tryDefiniteStep(pla_t pla, loc_t src, loc_t dest);
bool tryDefinitePhantomStep(pla_t pla, loc_t src, loc_t dest);
bool tryDefinitePhantomPush(pla_t pla, loc_t src, loc_t dest, loc_t dest2);
bool tryDefinitePhantomPull(pla_t pla, loc_t src, loc_t dest, loc_t dest2);
bool tryDefinitePhantomStepStep(pla_t pla, loc_t src, loc_t dest, loc_t dest2);
bool tryMaybeStep(pla_t pla, loc_t src, loc_t dest);
bool tryMaybePush(pla_t pla, loc_t src, loc_t dest, loc_t dest2);
bool tryMaybePull(pla_t pla, loc_t src, loc_t dest, loc_t dest2);
static bool canDefinitelyGoal(const KB& kb, pla_t pla, int numSteps, int verbosity=0);
static bool canMaybeStopGoal(const KB& kb, pla_t pla, int numSteps, int verbosity=0);
void write(ostream& out) const;
static void write(ostream& out, const KB& p);
static string write(const KB& p);
friend ostream& operator<<(ostream& out, const KB& p);
private:
//Basic list operations
int next(int node) const;
bool atLeastOne(int head) const;
void addNode(int head, int node);
int removeNode(int node);
//More fancy list operations
int getUnusedNode();
void swapLists(int head1, int head2);
void clear(int head);
//Strength relation management
int numStrengthRelations(int node) const;
bool hasStrengthRelation(int node, int otherNode) const;
bool isDefinitelyGt(int node, int otherNode) const;
bool isDefinitelyGeq(int node, int otherNode) const;
bool isMaybeGt(int node, int otherNode) const;
bool isMaybeGeq(int node, int otherNode) const;
void addStrengthRelationVsLoc(int node, int cmp, loc_t loc);
void removeAllStrengthRelations(int node);
//Combined with other properties
bool definitelyDominates(loc_t loc, int otherNode) const;
bool maybeDominates(loc_t loc, int otherNode) const;
bool definitelyHasDominator(loc_t loc, int node) const;
bool maybeHasDominator(loc_t loc, int node) const;
bool definitelyUnfrozen(loc_t loc, int node) const;
bool maybeUnfrozen(loc_t loc, int node) const;
//Internal utility actions
bool removeAllPlasAt(pla_t pla, loc_t loc);
bool removeAllSpecialFrozenPlasAt(pla_t pla, loc_t loc);
void removeAllSpecialFrozenPlasAround(pla_t pla, loc_t loc);
void unSpecialFreeze(pla_t pla, loc_t loc);
void unSpecialFreezeAround(pla_t pla, loc_t loc);
void resolveTrapCapturesAround(pla_t pla, loc_t loc);
//Testing
void checkListConsistency(int head, const char* message) const;
void checkListConsistency(int head, const string& message) const;
void checkConsistency(const char* message) const;
void checkConsistency(const string& message) const;
};
|
malliina/musicpimp | musicpimp/app/com/malliina/musicpimp/messaging/PushKeys.scala | <filename>musicpimp/app/com/malliina/musicpimp/messaging/PushKeys.scala<gh_stars>1-10
package com.malliina.musicpimp.messaging
object PushKeys {
val Cmd = "cmd"
val Stop = "stop"
val Tag = "tag"
}
|
hoeoy/wwj | src/main/java/com/iandtop/model/OrderStyleModel.java | <reponame>hoeoy/wwj<gh_stars>0
package com.iandtop.model;
import java.util.LinkedList;
import java.util.List;
/**
* Created by Administrator on 2017/5/15.
*/
public class OrderStyleModel {
private int id;
private String stylename;
private List<OrderModelVo> orderModelVos = new LinkedList<OrderModelVo>();//所有小类的集合
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public String getStylename() {
return stylename;
}
public void setStylename(String stylename) {
this.stylename = stylename;
}
public List<OrderModelVo> getOrderModelVos() {
return orderModelVos;
}
public void setOrderModelVos(List<OrderModelVo> orderModelVos) {
this.orderModelVos = orderModelVos;
}
}
|
MetricsGroup/IERT-Webapp | src/diploma/admin.py | from django.contrib import admin
from .models import diploma_facultys
# in case of different admin panel
#####################################################
# from django.contrib.admin import AdminSite
#
# class DiplomaAdminSite(AdminSite):
# site_header = "IERT DIPLOMA ADMIN"
# site_title = "IERT DIPLOMA ADMIN"
# index_title = "Welcome to IERT Diploma Admin Portal"
#
# diploma_admin_site = DiplomaAdminSite(name='iert_diploma_admin')
########################################################################
admin.site.register(diploma_facultys)
|
SocioProphet/aigents-java | src/main/java/net/webstructor/main/Logger.java | /*
* MIT License
*
* Copyright (c) 2005-2020 by <NAME>, Aigents®
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package net.webstructor.main;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.util.Date;
import java.text.SimpleDateFormat;
import net.webstructor.al.Time;
public class Logger {
static Logger m_logger = null;
public static Logger getLogger() {
if (m_logger == null)
m_logger = new Logger();
return m_logger;
}
private long m_ticket = 0;
private Date m_date = null;
private BufferedWriter m_writer = null;
private OutputStreamWriter m_streamWriter = null;
private SimpleDateFormat m_timeFormat;
private String m_prefix = "webcat";
private int m_retentionDays = 0;
public Logger() {
m_timeFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SS");
}
public Logger(String prefix) {
this();
m_prefix = prefix;
}
public void setRetentionDays(int days) {
synchronized (this){
this.m_retentionDays = days;
}
}
public void cleanup() {
int days;
synchronized (this){
days = m_retentionDays;
}
if (days > 0){
Date day = Time.date(new Date(), -days);//last day to keep
for (int i=0;i<=365;i++){//look as far as one year back
day = Time.date(day,-1);//next day to clean
File log = new File(getLogFileName(day));
if (!log.exists())
break;//delete all what we can
if (!log.delete())
break;
}
}
}
public String getLogFileName(Date date) {
String name = new SimpleDateFormat("yyyy-MM-dd").format(date);
return m_prefix+"-"+name+"-log.txt";
}
private synchronized long getTicket() {
return ++m_ticket;
}
private void reopenFile(Date date) throws IOException {
if (m_writer != null)
m_writer.close();
m_writer = new BufferedWriter(m_streamWriter = new OutputStreamWriter(
new FileOutputStream(getLogFileName(date),true), "UTF-8"));
}
private void flush() throws IOException {
//TODO: make flushing based on options
m_writer.flush();
m_streamWriter.flush();
}
private void checkReopenFile(Date now) throws IOException {
//TODO: check if file is missed and re-create if so
Date date = Time.date(now);
if (m_date == null || !date.equals(m_date)){
reopenFile(now);
m_date = date;
}
}
private synchronized void writeToFile(String text) throws IOException {
m_writer.write(text);
m_writer.newLine();
}
private void writeToFile(long ticket, Date now, String pref, String text) throws IOException {
StringBuilder sb = new StringBuilder();
sb
.append(new Long(ticket).toString()).append(':')
.append(pref).append(':')
.append(m_timeFormat.format(now)).append(':')
.append(text);
writeToFile(sb.toString());
flush();
}
public void log(String text,String tag) throws IOException {
log(new Date(),text,tag);
}
public void log(Date now, String text,String tag) throws IOException {
checkReopenFile(now);
writeToFile(Thread.currentThread().getId(),now,tag,text);
}
public long logOut(String out,long ticket) throws IOException {
Date now = new Date();
checkReopenFile(now);
writeToFile(ticket,now,"output",out);
return ticket;
}
public long logIn(String in) throws IOException {
long ticket = getTicket();
Date now = new Date();
checkReopenFile(now);
writeToFile(ticket,now,"input",in);
return ticket;
}
}
|
ChestnutGames/chestnut | lualib-src/chestnut/lua-stack.c | #define LUA_LIB
#include <lua.h>
#include <lauxlib.h>
#include <stdlib.h>
#include <stdbool.h>
#include <assert.h>
static int
lpush(lua_State *L) {
lua_settop(L, 2);
luaL_checktype(L, 1, LUA_TTABLE);
lua_rawgeti(L, 1, 0);
lua_Integer top = luaL_checkinteger(L, -1);
if (lua_type(L, 2) == LUA_TNIL) {
luaL_error(L, "args #2 must not be nil.");
return 0;
}
lua_pushvalue(L, 2); // forbit more args.
lua_rawseti(L, 1, ++top);
lua_pushinteger(L, top);
lua_rawseti(L, 1, 0);
return 0;
}
static int
lpop(lua_State *L) {
luaL_checktype(L, 1, LUA_TTABLE);
lua_rawgeti(L, 1, 0);
lua_Integer top = luaL_checkinteger(L, -1);
if (top <= 0) {
return 0;
}
lua_rawgeti(L, 1, top--);
lua_pushinteger(L, top);
lua_rawseti(L, 1, 0);
return 1;
}
static int
lpeek(lua_State *L) {
luaL_checktype(L, 1, LUA_TTABLE);
lua_rawgeti(L, 1, 0);
lua_Integer top = luaL_checkinteger(L, -1);
if (top <= 0) {
return 0;
}
lua_rawgeti(L, 1, top);
return 1;
}
static int
lnewindex(lua_State *L) {
luaL_error(L, "not suppoort.");
return 0;
}
static int
llen(lua_State *L) {
luaL_checktype(L, 1, LUA_TTABLE);
lua_rawgeti(L, 1, 0);
return 1;
}
static int
lnext(lua_State *L) {
luaL_checktype(L, 1, LUA_TTABLE);
lua_Integer idx;
if (lua_isnoneornil(L, 2)) {
lua_rawgeti(L, 1, 0);
idx = luaL_checkinteger(L, -1);
} else {
idx = lua_tointeger(L, 2);
--idx;
}
if (idx <= 0) {
return 0;
}
lua_pushinteger(L, idx);
lua_rawgeti(L, 1, idx);
return 2;
}
static int
lpairs(lua_State *L) {
luaL_checktype(L, 1, LUA_TTABLE);
lua_pushcfunction(L, lnext);
lua_pushvalue(L, 1);
lua_pushnil(L);
return 3;
}
static int
lfree(lua_State *L) {
return 0;
}
static int
lalloc(lua_State *L) {
int prearr = 16;
int n = lua_gettop(L);
while (n > prearr) {
prearr *= 2;
}
lua_createtable(L, prearr, 1);
lua_pushvalue(L, lua_upvalueindex(1));
lua_setmetatable(L, -2);
for (int i = 1; i <= n; i++) {
lua_pushvalue(L, i);
lua_rawseti(L, -2, i);
}
lua_pushinteger(L, n);
lua_rawseti(L, -2, 0);
return 1;
}
LUAMOD_API int
luaopen_chestnut_stack(lua_State *L) {
luaL_checkversion(L);
luaL_Reg l[] = {
{ "__newindex", lnewindex },
{ "__pairs", lpairs },
{ "__len", llen },
{ "__gc", lfree },
{ NULL, NULL },
};
luaL_newlib(L, l); // met
luaL_Reg il[] = {
{ "push", lpush },
{ "pop", lpop },
{ "peek", lpeek },
{ NULL, NULL },
};
luaL_newlib(L, il);
lua_setfield(L, -2, "__index");
lua_pushcclosure(L, lalloc, 1);
return 1;
} |
chcbaram/arm_seminar_fw | tutorial/04/result/mission_2/04_m2/src/ap/ap.c | <reponame>chcbaram/arm_seminar_fw
/*
* ap.c
*
* Created on: 2016. 7. 13.
* Author: Baram
*/
#include "ap.h"
int demoCmdif(int argc, char **argv);
void apInit(void)
{
}
void apMain(void)
{
cmdifBegin(_DEF_UART1, 115200);
cmdifAdd("demo", demoCmdif);
while(1)
{
cmdifLoop();
}
}
//-- demoCmdif
//
int demoCmdif(int argc, char **argv)
{
bool ret = true;
uint32_t time;
if (argc == 3)
{
time = (uint32_t) strtoul((const char * ) argv[2], (char **)NULL, (int) 0);
if(strcmp("led", argv[1]) == 0)
{
while(cmdifRxAvailable() == 0)
{
ledToggle(0);
delay(time);
}
}
else
{
ret = false;
}
}
else
{
ret = false;
}
if (ret == false)
{
cmdifPrintf( "demo led time(ms) ...\n");
}
return 0;
}
|
hyena/weasyl | weasyl/test/resetpassword/test_request.py | import pytest
import arrow
from weasyl.test import db_utils
from weasyl import resetpassword
from weasyl import define as d
from weasyl.error import WeasylError
class Bag(object):
def __init__(self, **kw):
for kv in kw.items():
setattr(self, *kv)
@pytest.mark.usefixtures('db')
def test_user_must_exist_for_a_forgotten_password_request_to_be_made():
user_name = "test"
email_addr = "<EMAIL>"
form = Bag(email=email_addr, username=user_name, day=arrow.now().day,
month=arrow.now().month, year=arrow.now().year)
with pytest.raises(WeasylError) as err:
resetpassword.request(form)
assert 'loginRecordMissing' == err.value.value
@pytest.mark.usefixtures('db')
def test_email_must_match_email_stored_in_DB():
user_name = "test"
email_addr = "<EMAIL>"
db_utils.create_user(email_addr=email_addr, username=user_name)
email_addr = "<EMAIL>"
form = Bag(email=email_addr, username=user_name, day=arrow.now().day,
month=arrow.now().month, year=arrow.now().year)
with pytest.raises(WeasylError) as err:
resetpassword.request(form)
assert 'emailInvalid' == err.value.value
@pytest.mark.usefixtures('db')
def test_verify_success_if_valid_information_provided():
user_name = "test"
email_addr = "<EMAIL>"
user_id = db_utils.create_user(email_addr=email_addr, username=user_name)
form = Bag(email=email_addr, username=user_name, day=arrow.now().day,
month=arrow.now().month, year=arrow.now().year)
resetpassword.request(form)
pw_reset_token = d.engine.scalar("SELECT token FROM forgotpassword WHERE userid = %(id)s", id=user_id)
assert 100 == len(pw_reset_token)
assert resetpassword.checktoken(pw_reset_token)
|
zeta1999/rootba | src/rootba/util/enum_utils.hpp | <gh_stars>100-1000
/**
BSD 3-Clause License
This file is part of the RootBA project.
https://github.com/NikolausDemmel/rootba
Copyright (c) 2021, <NAME>.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#pragma once
#include <type_traits>
///////////////////////////////////////////////////////////////////////////////
#include <wise_enum/wise_enum.h>
///////////////////////////////////////////////////////////////////////////////
#include <flags/flags.hpp>
// helper to declare flags for enums in rootba namespace
#define ROOTBA_DECLARE_FLAGS(FLAGTYPE, ENUMTYPE) \
} \
ALLOW_FLAGS_FOR_ENUM(rootba::ENUMTYPE); \
namespace rootba { \
using FLAGTYPE = flags::flags<ENUMTYPE>
#define ROOTBA_DECLARE_CLASS_FLAGS(FLAGTYPE, ENUMTYPE) \
using FLAGTYPE = flags::flags<ENUMTYPE>
#define ROOTBA_REGISTER_CLASS_FLAGS(ENUMTYPE) \
} \
ALLOW_FLAGS_FOR_ENUM(rootba::ENUMTYPE); \
namespace rootba { \
static_assert(true)
///////////////////////////////////////////////////////////////////////////////
namespace rootba {
// "all flags" for wise-enum supported flag types
template <class FlagsType>
constexpr FlagsType // NOLINT(readability-const-return-type)
compute_all_flags_union() {
// Note: beware of https://github.com/grisumbras/enum-flags/issues/15
// which is why we cannot do simply "~FlagsType(flags::empty)"
auto res = std::decay_t<FlagsType>(flags::empty);
for (const auto& [value, _] :
wise_enum::range<typename std::decay_t<FlagsType>::enum_type>) {
res = res | value;
}
return res;
}
template <typename FlagsType>
constexpr FlagsType no_flags = FlagsType(flags::empty);
template <typename FlagsType>
constexpr FlagsType all_flags = compute_all_flags_union<FlagsType>();
// type trait for flags
template <class F>
struct is_flags_type : std::false_type {};
template <class E>
struct is_flags_type<flags::flags<E>> : std::true_type {};
// type trait for flags of wise-enums
template <class F>
struct is_wise_enum_flags_type : std::false_type {};
template <class E>
struct is_wise_enum_flags_type<flags::flags<E>>
: std::enable_if_t<wise_enum::is_wise_enum_v<E>, std::true_type> {};
} // namespace rootba
|
Rose2073/RoseCppSource | NKZX_NOI_OJ/P1496.cpp | #include<cstdio>
int main(){
for(int i=1;i<=9;i++){
for(int j=i;j>=1;j--)
printf(" %d*%d=%2d",i,j,i*j);
putchar('\n');
}
return 0;
}
|
JamesCao2048/BlizzardData | Corpus/aspectj/909.java | aspect X {
intertype Target {
int i = 5;
}
}
class Target {}
|
hei6775/light-protoactor-go | router/broadcast_router.go | package router
import "github.com/hei6775/light-protoactor-go/actor"
type broadcastGroupRouter struct {
GroupRouter
}
type broadcastPoolRouter struct {
PoolRouter
}
type broadcastRouterState struct {
routees *actor.PIDSet
}
func (state *broadcastRouterState) SetRoutees(routees *actor.PIDSet) {
state.routees = routees
}
func (state *broadcastRouterState) GetRoutees() *actor.PIDSet {
return state.routees
}
func (state *broadcastRouterState) RouteMessage(message interface{}, sender *actor.PID) {
state.routees.ForEach(func(i int, pid actor.PID) {
actor.Request(&pid, message, sender)
})
}
func NewBroadcastPool(size int) *actor.Props {
return actor.FromSpawnFunc(spawner(&broadcastPoolRouter{PoolRouter{PoolSize: size}}))
}
func NewBroadcastGroup(routees ...*actor.PID) *actor.Props {
return actor.FromSpawnFunc(spawner(&broadcastGroupRouter{GroupRouter{Routees: actor.NewPIDSet(routees...)}}))
}
func (config *broadcastPoolRouter) CreateRouterState() Interface {
return &broadcastRouterState{}
}
func (config *broadcastGroupRouter) CreateRouterState() Interface {
return &broadcastRouterState{}
}
|
zaion520/ATtomato | release/src-rt-6.x.4708/router/samba3/source3/rpc_server/eventlog/srv_eventlog_reg.c | <reponame>zaion520/ATtomato
/*
* Unix SMB/CIFS implementation.
*
* Eventlog RPC server keys initialization
*
* Copyright (c) 2005 <NAME>
* Copyright (c) 2005 <NAME>
* Copyright (c) 2005 <NAME>
* Copyright (c) 2011 <NAME> <<EMAIL>>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, see <http://www.gnu.org/licenses/>.
*/
#include "includes.h"
#include "../librpc/gen_ndr/ndr_winreg_c.h"
#include "rpc_client/cli_winreg_int.h"
#include "rpc_client/cli_winreg.h"
#include "rpc_server/eventlog/srv_eventlog_reg.h"
#include "auth.h"
#undef DBGC_CLASS
#define DBGC_CLASS DBGC_REGISTRY
#define TOP_LEVEL_EVENTLOG_KEY "SYSTEM\\CurrentControlSet\\Services\\Eventlog"
bool eventlog_init_winreg(struct messaging_context *msg_ctx)
{
struct dcerpc_binding_handle *h = NULL;
uint32_t access_mask = SEC_FLAG_MAXIMUM_ALLOWED;
struct policy_handle hive_hnd, key_hnd;
uint32_t uiMaxSize = 0x00080000;
uint32_t uiRetention = 0x93A80;
const char **elogs = lp_eventlog_list();
const char **subkeys = NULL;
uint32_t num_subkeys = 0;
uint32_t i;
char *key = NULL;
NTSTATUS status;
WERROR result = WERR_OK;
bool ok = false;
TALLOC_CTX *tmp_ctx;
tmp_ctx = talloc_stackframe();
if (tmp_ctx == NULL) {
return false;
}
DEBUG(3, ("Initialise the eventlog registry keys if needed.\n"));
key = talloc_strdup(tmp_ctx, TOP_LEVEL_EVENTLOG_KEY);
status = dcerpc_winreg_int_hklm_openkey(tmp_ctx,
get_session_info_system(),
msg_ctx,
&h,
key,
false,
access_mask,
&hive_hnd,
&key_hnd,
&result);
if (!NT_STATUS_IS_OK(status)) {
DEBUG(0, ("eventlog_init_winreg: Could not open %s - %s\n",
key, nt_errstr(status)));
goto done;
}
if (!W_ERROR_IS_OK(result)) {
DEBUG(0, ("eventlog_init_winreg: Could not open %s - %s\n",
key, win_errstr(result)));
goto done;
}
status = dcerpc_winreg_enum_keys(tmp_ctx,
h,
&key_hnd,
&num_subkeys,
&subkeys,
&result);
if (!NT_STATUS_IS_OK(status)) {
DEBUG(0, ("eventlog_init_winreg: Could enum keys at %s - %s\n",
key, nt_errstr(status)));
goto done;
}
if (!W_ERROR_IS_OK(result)) {
DEBUG(0, ("eventlog_init_winreg: Could enum keys at %s - %s\n",
key, win_errstr(result)));
goto done;
}
if (is_valid_policy_hnd(&key_hnd)) {
dcerpc_winreg_CloseKey(h, tmp_ctx, &key_hnd, &result);
}
/* create subkeys if they don't exist */
while (elogs && *elogs) {
enum winreg_CreateAction action = REG_ACTION_NONE;
char *evt_tdb = NULL;
struct winreg_String wkey;
struct winreg_String wkeyclass;
bool skip = false;
for (i = 0; i < num_subkeys; i++) {
if (strequal(subkeys[i], *elogs)) {
skip = true;
}
}
if (skip) {
elogs++;
continue;
}
ZERO_STRUCT(key_hnd);
ZERO_STRUCT(wkey);
wkey.name = talloc_asprintf(tmp_ctx, "%s\\%s", key, *elogs);
if (wkey.name == NULL) {
result = WERR_NOMEM;
goto done;
}
ZERO_STRUCT(wkeyclass);
wkeyclass.name = "";
status = dcerpc_winreg_CreateKey(h,
tmp_ctx,
&hive_hnd,
wkey,
wkeyclass,
0,
access_mask,
NULL,
&key_hnd,
&action,
&result);
if (!NT_STATUS_IS_OK(status)) {
DEBUG(0, ("eventlog_init_winreg_keys: Could not create key %s: %s\n",
wkey.name, nt_errstr(status)));
goto done;
}
if (!W_ERROR_IS_OK(result)) {
DEBUG(0, ("eventlog_init_winreg_keys: Could not create key %s: %s\n",
wkey.name, win_errstr(result)));
goto done;
}
status = dcerpc_winreg_set_dword(tmp_ctx,
h,
&key_hnd,
"MaxSize",
uiMaxSize,
&result);
status = dcerpc_winreg_set_dword(tmp_ctx,
h,
&key_hnd,
"Retention",
uiRetention,
&result);
status = dcerpc_winreg_set_sz(tmp_ctx,
h,
&key_hnd,
"PrimaryModule",
*elogs,
&result);
evt_tdb = talloc_asprintf(tmp_ctx,
"%%SystemRoot%%\\system32\\config\\%s.tdb",
*elogs);
if (evt_tdb == NULL) {
goto done;
}
status = dcerpc_winreg_set_expand_sz(tmp_ctx,
h,
&key_hnd,
"File",
evt_tdb,
&result);
TALLOC_FREE(evt_tdb);
status = dcerpc_winreg_add_multi_sz(tmp_ctx,
h,
&key_hnd,
"Sources",
*elogs,
&result);
if (is_valid_policy_hnd(&key_hnd)) {
dcerpc_winreg_CloseKey(h, tmp_ctx, &key_hnd, &result);
}
/* sub-subkeys */
{
uint32_t uiCategoryCount = 0x00000007;
wkey.name = talloc_asprintf(tmp_ctx,
"%s\\%s",
wkey.name, *elogs);
if (wkey.name == NULL) {
result = WERR_NOMEM;
goto done;
}
status = dcerpc_winreg_CreateKey(h,
tmp_ctx,
&hive_hnd,
wkey,
wkeyclass,
0,
access_mask,
NULL,
&key_hnd,
&action,
&result);
if (!NT_STATUS_IS_OK(status)) {
DEBUG(0, ("eventlog_init_winreg_keys: Could not create key %s: %s\n",
wkey.name, nt_errstr(status)));
goto done;
}
if (!W_ERROR_IS_OK(result)) {
DEBUG(0, ("eventlog_init_winreg_keys: Could not create key %s: %s\n",
wkey.name, win_errstr(result)));
goto done;
}
status = dcerpc_winreg_set_dword(tmp_ctx,
h,
&key_hnd,
"CategoryCount",
uiCategoryCount,
&result);
status = dcerpc_winreg_set_expand_sz(tmp_ctx,
h,
&key_hnd,
"CategoryMessageFile",
"%SystemRoot%\\system32\\eventlog.dll",
&result);
if (is_valid_policy_hnd(&key_hnd)) {
dcerpc_winreg_CloseKey(h, tmp_ctx, &key_hnd, &result);
}
}
elogs++;
} /* loop */
ok = true;
done:
TALLOC_FREE(tmp_ctx);
return ok;
}
/* vim: set ts=8 sw=8 noet cindent syntax=c.doxygen: */
|
softwartechnik/catalin | core/src/test/java/de/softwartechnik/catalin/core/repository/map/AbstractMapRepositoryTest.java | package de.softwartechnik.catalin.core.repository.map;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotEquals;
import static org.junit.jupiter.api.Assertions.assertNull;
import de.softwartechnik.catalin.core.model.CatalinModel;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
class AbstractMapRepositoryTest {
private static final CatalinModel TEST_MODEL_1 = new Model(1);
private static final CatalinModel TEST_MODEL_2 = new Model(2);
private static final CatalinModel TEST_MODEL_3 = new Model(3);
private static final long TEST_MODEL_COUNT = 3;
private static final long ID_NOT_FOUND = 5;
private final Map<Long, CatalinModel> storage = new HashMap<Long, CatalinModel>() {{
put(TEST_MODEL_1.getId(), TEST_MODEL_1);
put(TEST_MODEL_2.getId(), TEST_MODEL_2);
put(TEST_MODEL_3.getId(), TEST_MODEL_3);
}};
public AbstractMapRepository myMapRep;
@BeforeEach
void setUp() {
myMapRep = new AbstractMapRepository<>(storage, CatalinModel.class);
}
@Test
void save() {
CatalinModel TEST_MODEL_4 = new Model(4);
assertEquals(TEST_MODEL_4, myMapRep.save(TEST_MODEL_4));
//Teste ich hier storage.size? Weil mit MapRep.findAll() zu arbeiten wäre ja dumm. Weil nicht getestet.
//#Testception
assertEquals(TEST_MODEL_COUNT + 1, storage.size());
}
@Test
void find() {
assertEquals(TEST_MODEL_1, myMapRep.find(TEST_MODEL_1.getId()));
assertEquals(TEST_MODEL_2, myMapRep.find(TEST_MODEL_2.getId()));
assertNull(myMapRep.find(ID_NOT_FOUND));
assertNotEquals(TEST_MODEL_1, myMapRep.find(TEST_MODEL_2.getId()));
}
@Test
void findAll() {
List<Model> resList = myMapRep.findAll();
CatalinModel[] models = {TEST_MODEL_1, TEST_MODEL_2, TEST_MODEL_3};
assertEquals(TEST_MODEL_COUNT, resList.size());
//ArrayEquals prüft auf Gleichheit der Reihenfolge...das ist theoretisch nicht nötig bei unserem Repository
assertArrayEquals(models, resList.toArray());
}
@Test
void remove() {
myMapRep.remove(TEST_MODEL_1);
assertNull(myMapRep.find(TEST_MODEL_1.getId()));
assertEquals(TEST_MODEL_COUNT - 1, storage.size());
}
public static class Model implements CatalinModel {
private long id;
public Model(long id) {
this.id = id;
}
@Override
public long getId() {
return id;
}
@Override
public void setId(long id) {
this.id = id;
}
}
}
|
sourceryinstitute/rose-sourcery-institute | projects/CodeThorn/src/RersCounterexample.h | <reponame>sourceryinstitute/rose-sourcery-institute<filename>projects/CodeThorn/src/RersCounterexample.h
#ifndef RERS_COUNTEREXAMPLE_H
#define RERS_COUNTEREXAMPLE_H
#include "ExecutionTrace.h"
namespace CodeThorn {
class EState;
class TransitionGraph;
class Analyzer;
/**
* @brief An input/output counterexample in a syntax resembling that of RERS
*
* \author <NAME>
* \date 2017
*/
class RersCounterexample : public ExecutionTrace {
public:
std::string toRersIString() const;
std::string toRersIOString() const;
RersCounterexample onlyStatesSatisfying(std::function<bool(const EState*)> predicate) const;
RersCounterexample onlyIStates() const;
RersCounterexample onlyIOStates() const;
private:
std::string toRersIOString(bool withOutput) const;
char toRersChar(int value) const;
};
}
#endif
|
JamesCao2048/BlizzardData | Corpus/birt/154.java | <filename>Corpus/birt/154.java
/*******************************************************************************
* Copyright (c) 2004 Actuate Corporation.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Actuate Corporation - initial API and implementation
*******************************************************************************/
package org.eclipse.birt.report.designer.internal.ui.views.attributes.page;
import java.util.List;
import org.eclipse.birt.report.designer.internal.ui.views.attributes.provider.LibraryDescriptorProvider;
import org.eclipse.birt.report.designer.internal.ui.views.attributes.section.FormTextSection;
import org.eclipse.birt.report.designer.internal.ui.views.attributes.section.SeperatorSection;
import org.eclipse.birt.report.designer.internal.ui.views.attributes.section.TextSection;
import org.eclipse.birt.report.designer.nls.Messages;
import org.eclipse.birt.report.designer.ui.IReportGraphicConstants;
import org.eclipse.birt.report.designer.ui.ReportPlatformUIImages;
import org.eclipse.birt.report.designer.util.ColorManager;
import org.eclipse.birt.report.designer.util.DEUtil;
import org.eclipse.birt.report.designer.util.FontManager;
import org.eclipse.birt.report.model.api.GroupElementHandle;
import org.eclipse.jface.resource.JFaceResources;
import org.eclipse.swt.SWT;
import org.eclipse.swt.graphics.Font;
import org.eclipse.swt.graphics.FontData;
import org.eclipse.swt.widgets.Composite;
public abstract class GeneralPage extends GeneralFontPage
{
private TextSection librarySection;
private SeperatorSection seperatorSection;
private FormTextSection noteSection;
public void buildUI( Composite parent )
{
super.buildUI( parent );
container.setLayout( WidgetUtil.createGridLayout( 6, 15 ) );
LibraryDescriptorProvider provider = new LibraryDescriptorProvider( );
librarySection = new TextSection( provider.getDisplayName( ),
container,
true );
librarySection.setWidth( 500 );
librarySection.setFillText( false );
librarySection.setProvider( provider );
addSection( PageSectionId.GENERAL_LIBRARY, librarySection );
noteSection = new FormTextSection( "", container, true ); //$NON-NLS-1$
noteSection.setWidth( 500 );
noteSection.setFillText( false );
noteSection.setText( "<form><p><span color=\"color\">" + //$NON-NLS-1$
Messages.getFormattedString( "GeneralPage.Library.Modified.Note",
new Object[]{
"</span> <img href=\"image\"/> <span color=\"color\">"} ) + //$NON-NLS-1$
"</span></p></form>" ); //$NON-NLS-1$
noteSection.setImage( "image", //$NON-NLS-1$
ReportPlatformUIImages.getImage( IReportGraphicConstants.ICON_ENABLE_RESTORE_PROPERTIES ) );
noteSection.setColor( "color", ColorManager.getColor( 127, 127, 127 ) ); //$NON-NLS-1$
addSection( PageSectionId.GENERAL_LIBRARY_NOTE, noteSection );
seperatorSection = new SeperatorSection( container, SWT.HORIZONTAL );
addSection( PageSectionId.GENERAL_SEPERATOR, seperatorSection );
buildContent( );
createSections( );
layoutSections( );
}
public void refresh( )
{
if ( input instanceof List
&& DEUtil.getMultiSelectionHandle( (List) input )
.isExtendedElements( ) )
{
librarySection.setHidden( false );
if ( hasLocalProperties( ) )
{
noteSection.setHidden( false );
Font font = JFaceResources.getDialogFont( );
FontData fData = font.getFontData( )[0];
fData.setHeight( fData.getHeight( ) - 1 );
noteSection.getTextControl( )
.setFont( FontManager.getFont( fData ) );
}
else
noteSection.setHidden( true );
seperatorSection.setHidden( false );
librarySection.load( );
}
else
{
librarySection.setHidden( true );
noteSection.setHidden( true );
seperatorSection.setHidden( true );
}
super.refresh( );
container.layout( true );
container.redraw( );
}
private boolean hasLocalProperties( )
{
GroupElementHandle groupHandle = DEUtil.getGroupElementHandle( (List) input );
return groupHandle.hasLocalPropertiesForExtendedElements( );
}
/**
* Builds UI content of this page.
*
* @param content
* parent composite.
*/
protected abstract void buildContent( );
}
|
antonmedv/year | packages/1989/01/27/index.js | module.exports = new Date(1989, 0, 27)
|
chriskim06/go-sdk | fileutil/parse_file_size_test.go | <reponame>chriskim06/go-sdk
/*
Copyright (c) 2021 - Present. Blend Labs, Inc. All rights reserved
Use of this source code is governed by a MIT license that can be found in the LICENSE file.
*/
package fileutil
import (
"testing"
"github.com/blend/go-sdk/assert"
)
func Test_FileParseSize(t *testing.T) {
assert := assert.New(t)
parsed, err := ParseFileSize("2gb")
assert.Nil(err)
assert.Equal(2*Gigabyte, parsed)
parsed, err = ParseFileSize("3mb")
assert.Nil(err)
assert.Equal(3*Megabyte, parsed)
parsed, err = ParseFileSize("123kb")
assert.Nil(err)
assert.Equal(123*Kilobyte, parsed)
parsed, err = ParseFileSize("12345")
assert.Nil(err)
assert.Equal(12345, parsed)
parsed, err = ParseFileSize("")
assert.Nil(err)
assert.Equal(0, parsed)
parsed, err = ParseFileSize("bogus")
assert.NotNil(err)
assert.Equal(0, parsed)
}
|
beckerhe/orbitprofiler | OrbitCore/OrbitSession.h | //-----------------------------------
// Copyright <NAME> 2013-2017
//-----------------------------------
#pragma once
#include "Core.h"
#include "Serialization.h"
//-----------------------------------------------------------------------------
struct SessionModule {
std::string m_Name;
std::vector<uint64_t> m_FunctionHashes;
std::vector<std::wstring> m_WatchedVariables;
template <class Archive>
void serialize(Archive& archive, uint32_t /*version*/) {
archive(CEREAL_NVP(m_Name), CEREAL_NVP(m_FunctionHashes),
CEREAL_NVP(m_WatchedVariables));
}
};
//-----------------------------------------------------------------------------
class Session {
public:
Session();
~Session();
ORBIT_SERIALIZABLE;
std::string m_FileName;
std::string m_ProcessFullPath;
std::string m_WorkingDirectory;
std::string m_Arguments;
std::map<std::string, SessionModule> m_Modules;
};
|
zyj-nanjing/SmartCloud | SmartCloud-infrastructure/src/main/java/www/bwsensing/com/device/gatewayimpl/database/ProductModelMapper.java | <reponame>zyj-nanjing/SmartCloud<filename>SmartCloud-infrastructure/src/main/java/www/bwsensing/com/device/gatewayimpl/database/ProductModelMapper.java<gh_stars>0
package www.bwsensing.com.device.gatewayimpl.database;
import www.bwsensing.com.device.gatewayimpl.database.dataobject.ProductModelDO;
import org.apache.ibatis.annotations.Param;
import java.util.List;
/**
* @author macos-zyj
*/
public interface ProductModelMapper {
/**
* 获取模板名称的数量
* @param name
* @return
*/
Integer countModelByName(String name);
/**
* 查询产品型号
*
* @param id 产品型号主键
* @return 产品型号
*/
ProductModelDO getProductModelById(Integer id);
/**
* 查询产品型号列表
*
* @param productModel 产品型号
* @return 产品型号集合
*/
List<ProductModelDO> queryProductModelBySort(ProductModelDO productModel);
/**
* 新增产品型号
*
* @param productModel 产品型号
* @return 结果
*/
int saveProductModel(ProductModelDO productModel);
/**
* 修改产品型号
*
* @param productModel 产品型号
* @return 结果
*/
int updateProductModel(ProductModelDO productModel);
/**
* 删除产品型号
*
* @param id 产品型号主键
* @return 结果
*/
int deleteProductModelById(Integer id);
/**
* 批量删除产品型号
*
* @param ids 需要删除的数据主键集合
* @return 结果
*/
int deleteProductModelByIds(String[] ids);
/**
* 保存产品与行业领域的关联
* @param modelId
* @param industryId
*/
void saveProductModelWithIndustry(@Param("modelId")Integer modelId, @Param("industryId")Integer industryId);
/**
* 根据模型编号删除产品与行业领域的关联
* @param modelId
*/
void deleteProductModelWithIndustryByModelId(Integer modelId);
}
|
pettaroni/nablaweb | nablapps/core/tests/test_primary_dir.py | <gh_stars>0
from django.test import TestCase
from django.http import HttpRequest
from nablapps.core.context_processors import get_primary_dir
class TestPrimaryDir(TestCase):
"""Class for testing the primary_dir context processor"""
def test_batman(self):
"""Test batman as primary dir."""
request = HttpRequest()
request.path = "/batman/cakes"
context = get_primary_dir(request)
self.assertEqual(context["primary_dir"], "batman")
self.assertEqual(context["primary_dir_slashes"], "/batman/")
def test_no_primary_dir(self):
request = HttpRequest()
request.path = "/"
context = get_primary_dir(request)
self.assertEqual(context["primary_dir"], "")
self.assertEqual(context["primary_dir_slashes"], "/")
|
addstone/unrealengine3 | Development/Src/Engine/Debugger/UnDebuggerCore.cpp | /*=============================================================================
UnDebuggerCore.cpp: Debugger Core Logic
Copyright 1997-2001 Epic Games, Inc. All Rights Reserved.
Revision history:
* Created by <NAME>, <NAME>
* Revised by <NAME>
=============================================================================*/
#include "..\Src\EnginePrivate.h"
#include "UnDebuggerCore.h"
#include "UnDebuggerInterface.h"
#include "UnDelphiInterface.h"
#include "OpCode.h"
/*-----------------------------------------------------------------------------
UDebuggerCore.
-----------------------------------------------------------------------------*/
static TCHAR GDebuggerIni[1024] = TEXT("");
UDebuggerCore::UDebuggerCore()
: IsDebugging(0),
IsClosing(0),
CurrentState(NULL),
PendingState(NULL),
BreakpointManager(NULL),
CallStack(NULL),
Interface(NULL),
AccessedNone(0),
BreakOnNone(0),
BreakASAP(0),
ProcessDebugInfo(0),
ArrayMemberIndex(INDEX_NONE)
{
appStrcpy( GDebuggerIni, *(appGameConfigDir() + TEXT("Debugger.ini")) );
if ( !GConfig->GetString(TEXT("DEBUGGER.DEBUGGER"), TEXT("InterfaceFilename"), InterfaceDllName, GDebuggerIni) )
InterfaceDllName = TEXT("DebuggerInterface.dll");
if ( InterfaceDllName.Right(4) != TEXT(".dll") )
InterfaceDllName += TEXT(".dll");
Interface = new DelphiInterface(*InterfaceDllName);
if ( !Interface->Initialize( this ))
appErrorf( TEXT("Could not initialize the debugger interface!") );
DebuggerLog = new FDebuggerLog();
GLog->AddOutputDevice( DebuggerLog );
CallStack = new FCallStack( this );
ChangeState(new DSIdleState(this));
BreakpointManager = new FBreakpointManager();
debugf( NAME_Init, TEXT("UnrealScript Debugger Core Initialized.") );
// Init recursion limits
FString Value;
if ( GConfig->GetString(TEXT("DEBUGGER.RECURSION"), TEXT("OBJECTMAX"), Value, GDebuggerIni) )
MaxObjectRecursion = appAtoi(*Value);
else MaxObjectRecursion = 1;
if ( GConfig->GetString(TEXT("DEBUGGER.RECURSION"),TEXT("STRUCTMAX"),Value,GDebuggerIni) )
MaxStructRecursion = appAtoi(*Value);
else MaxStructRecursion = INDEX_NONE;
if ( GConfig->GetString(TEXT("DEBUGGER.RECURSION"),TEXT("CLASSMAX"),Value,GDebuggerIni) )
MaxClassRecursion = appAtoi(*Value);
else MaxClassRecursion = 1;
if ( GConfig->GetString(TEXT("DEBUGGER.RECURSION"),TEXT("STATICARRAYMAX"),Value,GDebuggerIni) )
MaxStaticArrayRecursion = appAtoi(*Value);
else MaxStaticArrayRecursion = 2;
if ( GConfig->GetString(TEXT("DEBUGGER.RECURSION"),TEXT("DYNAMICARRAYMAX"),Value,GDebuggerIni) )
MaxDynamicArrayRecursion = appAtoi(*Value);
else MaxDynamicArrayRecursion = 1;
CurrentObjectRecursion = CurrentStructRecursion = CurrentClassRecursion = CurrentStaticArrayRecursion = CurrentDynamicArrayRecursion = 0;
}
UDebuggerCore::~UDebuggerCore()
{
debugf( NAME_Init, TEXT("UnrealScript Debugger Core Exit.") );
GConfig->SetString(TEXT("DEBUGGER.RECURSION"),TEXT("OBJECTMAX"),appItoa(MaxObjectRecursion),GDebuggerIni);
GConfig->SetString(TEXT("DEBUGGER.RECURSION"),TEXT("STRUCTMAX"),appItoa(MaxStructRecursion),GDebuggerIni);
GConfig->SetString(TEXT("DEBUGGER.RECURSION"),TEXT("CLASSMAX"),appItoa(MaxClassRecursion),GDebuggerIni);
GConfig->SetString(TEXT("DEBUGGER.RECURSION"),TEXT("STATICARRAYMAX"),appItoa(MaxStaticArrayRecursion),GDebuggerIni);
GConfig->SetString(TEXT("DEBUGGER.RECURSION"),TEXT("DYNAMICARRAYMAX"),appItoa(MaxDynamicArrayRecursion),GDebuggerIni);
GConfig->Flush(0, GDebuggerIni);
delete CurrentState;
CurrentState = NULL;
delete PendingState;
PendingState = NULL;
delete BreakpointManager;
BreakpointManager = NULL;
delete CallStack;
CallStack = NULL;
if( Interface )
{
Interface->Close();
delete Interface;
}
Interface = NULL;
GLog->RemoveOutputDevice( DebuggerLog );
delete DebuggerLog;
DebuggerLog = NULL;
}
void UDebuggerCore::Close()
{
if ( IsClosing )
return;
IsClosing = 1;
if ( CallStack )
CallStack->Empty();
StackChanged(NULL);
if ( CurrentState )
CurrentState->SetCurrentNode(NULL);
ChangeState(new DSIdleState(this));
}
void UDebuggerCore::NotifyBeginTick()
{
ProcessDebugInfo = 1;
}
void UDebuggerCore::ProcessInput( enum EUserAction UserAction )
{
CurrentState->HandleInput(UserAction);
}
const FStackNode* UDebuggerCore::GetCurrentNode() const
{
const FStackNode* Node = CurrentState ? CurrentState->GetCurrentNode() : NULL;
if ( !Node )
Node = CallStack->GetTopNode();
return Node;
}
void UDebuggerCore::AddWatch(const TCHAR* watchName)
{
FDebuggerWatch* NewWatch = new(Watches) FDebuggerWatch(ErrorDevice, watchName);
FDebuggerState* State = GetCurrentState();
if ( IsDebugging && State && NewWatch )
{
const FStackNode* Node = State->GetCurrentNode();
if ( Node )
{
NewWatch->Refresh( Node->Object, Node->StackNode );
Interface->LockWatch(Interface->WATCH_WATCH);
Interface->ClearAWatch(Interface->WATCH_WATCH);
RefreshUserWatches();
Interface->UnlockWatch(Interface->WATCH_WATCH);
}
}
}
void UDebuggerCore::RemoveWatch(const TCHAR* watchName)
{
for ( INT i = 0; i < Watches.Num(); i++ )
{
if ( Watches(i).WatchText == watchName )
{
Watches.Remove(i);
return;
}
}
}
void UDebuggerCore::ClearWatches()
{
Watches.Empty();
}
#define SETPARENT(m,c,i) m.Set(c,i + 1)
#define GETPARENT(m,c) m.FindRef(c) - 1
void UDebuggerCore::BuildParentChain( INT WatchType, TMap<UClass*,INT>& ParentChain, UClass* BaseClass, INT ParentIndex )
{
if ( !BaseClass )
return;
ParentChain.Empty();
SETPARENT(ParentChain,BaseClass,ParentIndex);
for ( UClass* Parent = BaseClass->GetSuperClass(); Parent; Parent = Parent->GetSuperClass() )
{
if ( ParentChain.Find(Parent) == NULL )
{
ParentIndex = Interface->AddAWatch( WatchType, ParentIndex, *FString::Printf(TEXT("[[ %s ]]"), Parent->GetName()), TEXT("[[ Base Class ]]") );
SETPARENT(ParentChain,Parent,ParentIndex);
}
}
}
// Insert a given property into the watch window.
void UDebuggerCore::PropertyToWatch( UProperty* Prop, BYTE* PropAddr, UBOOL bResetIndex, INT watch, const TCHAR* PropName )
{
INT ParentIndex = INDEX_NONE;
static TMap<UClass*,INT> InheritanceChain;
if ( bResetIndex )
{
if ( watch == Interface->GLOBAL_WATCH )
BuildParentChain(watch, InheritanceChain, ((UObject*)(PropAddr - Prop->Offset))->GetClass());
else
InheritanceChain.Empty();
}
ArrayMemberIndex = INDEX_NONE;
ParentIndex = GETPARENT(InheritanceChain,Prop->GetOwnerClass());
PropertyToWatch(Prop, PropAddr, 0, watch, ParentIndex, PropName);
}
// Extract the value of a given property at a given address
void UDebuggerCore::PropertyToWatch(UProperty* Prop, BYTE* PropAddr, INT CurrentDepth, INT watch, INT watchParent, const TCHAR* PropName )
{
// This SHOULD be sufficient.
FString VarName, VarValue;
if ( ArrayMemberIndex < INDEX_NONE )
ArrayMemberIndex = INDEX_NONE;
if ( Prop->ArrayDim > 1 && ArrayMemberIndex < 0 )
{
if ( CurrentStaticArrayRecursion < MaxStaticArrayRecursion || MaxStaticArrayRecursion == INDEX_NONE )
{
VarName = PropName ? PropName : FString::Printf( TEXT("%s ( Static %s Array )"), Prop->GetName(), GetShortName(Prop) );
VarValue = FString::Printf(TEXT("%i Elements"), Prop->ArrayDim);
INT WatchID = Interface->AddAWatch(watch, watchParent, *VarName, *VarValue);
CurrentStaticArrayRecursion++;
for ( INT i = 0; i < Prop->ArrayDim; i++ )
{
ArrayMemberIndex++;
PropertyToWatch(Prop, PropAddr + Prop->ElementSize * i, CurrentDepth + 1, watch, WatchID);
}
CurrentStaticArrayRecursion--;
ArrayMemberIndex = INDEX_NONE;
}
return;
}
VarName = PropName
? PropName : (ArrayMemberIndex >= 0
? FString::Printf(TEXT("%s[%i]"), (Prop->IsA(UDelegateProperty::StaticClass()) ? Cast<UDelegateProperty>(Prop)->Function->GetName() : Prop->GetName()), ArrayMemberIndex)
: (FString::Printf(TEXT("%s ( %s )"), (Prop->IsA(UDelegateProperty::StaticClass()) ? Cast<UDelegateProperty>(Prop)->Function->GetName() : Prop->GetName()), GetShortName(Prop))));
if ( Prop->IsA(UStructProperty::StaticClass()) )
VarValue = GetShortName(Prop);
else if ( Prop->IsA(UArrayProperty::StaticClass()) )
VarValue = FString::Printf(TEXT("%i %s %s"), ((FArray*)PropAddr)->Num(), GetShortName(Cast<UArrayProperty>(Prop)->Inner), ((FArray*)PropAddr)->Num() != 1 ? TEXT("Elements") : TEXT("Element"));
else if ( Prop->IsA(UObjectProperty::StaticClass()) )
{
if ( *(UObject**)PropAddr )
VarValue = (*(UObject**)PropAddr)->GetName();
else VarValue = TEXT("None");
}
else
{
VarValue = TEXT("");
Prop->ExportTextItem( VarValue, PropAddr, PropAddr, PPF_Delimited );
}
int ID = Interface->AddAWatch(watch, watchParent, *VarName, *VarValue);
if ( Prop->IsA(UStructProperty::StaticClass()) && (CurrentStructRecursion < MaxStructRecursion || MaxStructRecursion == INDEX_NONE) )
{
INT CurrentIndex = ArrayMemberIndex;
ArrayMemberIndex = INDEX_NONE;
CurrentStructRecursion++;
// Recurse every property in this struct, and copy it's value into Result;
for( TFieldIterator<UProperty> It(Cast<UStructProperty>(Prop)->Struct); It; ++It )
{
if (Prop == *It) continue;
// Special case for nested stuff, don't leave it up to VarName/VarValue since we need to recurse
PropertyToWatch(*It, PropAddr + It->Offset, CurrentDepth + 1, watch, ID);
}
ArrayMemberIndex = CurrentIndex;
CurrentStructRecursion--;
}
else if ( Prop->IsA(UClassProperty::StaticClass()) && (CurrentClassRecursion < MaxClassRecursion || MaxClassRecursion == INDEX_NONE) )
{
UClass* ClassResult = *(UClass**)PropAddr;
if ( !ClassResult )
return;
INT CurrentIndex = ArrayMemberIndex, CurrentID;
ArrayMemberIndex = INDEX_NONE;
TMap<UClass*,INT> ParentChain;
UClass* PropOwner = NULL;
BuildParentChain(watch, ParentChain, ClassResult, ID);
CurrentClassRecursion++;
for ( TFieldIterator<UProperty> It(ClassResult); It; ++It )
{
if ( Prop == *It ) continue;
PropOwner = It->GetOwnerClass();
if ( PropOwner == UObject::StaticClass() ) continue;
CurrentID = GETPARENT(ParentChain,PropOwner);
PropertyToWatch(*It, (BYTE*) &ClassResult->Defaults(It->Offset), CurrentDepth + 1, watch, CurrentID);
}
CurrentClassRecursion--;
ArrayMemberIndex = CurrentIndex;
}
else if(Prop->IsA(UObjectProperty::StaticClass()) && (CurrentObjectRecursion < MaxObjectRecursion || MaxObjectRecursion == INDEX_NONE) )
{
UObject* ObjResult = *(UObject**)PropAddr;
if ( !ObjResult )
return;
INT CurrentIndex = ArrayMemberIndex, CurrentID;
ArrayMemberIndex = INDEX_NONE;
TMap<UClass*,INT> ParentChain;
BuildParentChain(watch, ParentChain, ObjResult->GetClass(), ID);
CurrentObjectRecursion++;
UClass* PropOwner = NULL;
for( TFieldIterator<UProperty> It( ObjResult->GetClass() ); It; ++It )
{
if (Prop == *It) continue;
PropOwner = It->GetOwnerClass();
if ( PropOwner == UObject::StaticClass() ) continue;
CurrentID = GETPARENT(ParentChain,PropOwner);
PropertyToWatch( *It, (BYTE*)ObjResult + It->Offset, CurrentDepth + 1, watch, CurrentID );
}
ArrayMemberIndex = CurrentIndex;
CurrentObjectRecursion--;
}
else if (Prop->IsA( UArrayProperty::StaticClass() ) && (CurrentDynamicArrayRecursion < MaxDynamicArrayRecursion || MaxDynamicArrayRecursion == INDEX_NONE) )
{
const INT Size = Cast<UArrayProperty>(Prop)->Inner->ElementSize;
FArray* Array = ((FArray*)PropAddr);
INT CurrentIndex = ArrayMemberIndex;
ArrayMemberIndex = INDEX_NONE;
CurrentDynamicArrayRecursion++;
for ( INT i = 0; i < Array->Num(); i++ )
{
ArrayMemberIndex++;
PropertyToWatch(Cast<UArrayProperty>(Prop)->Inner, (BYTE*)Array->GetData() + i * Size, CurrentDepth + 1, watch, ID );
}
ArrayMemberIndex = CurrentIndex;
CurrentDynamicArrayRecursion--;
}
}
void UDebuggerCore::NotifyAccessedNone()
{
AccessedNone=1;
}
void UDebuggerCore::SetBreakOnNone(UBOOL inBreakOnNone)
{
BreakOnNone = inBreakOnNone;
AccessedNone = 0;
}
void UDebuggerCore::SetCondition( const TCHAR* ConditionName, const TCHAR* ConditionValue )
{
if ( GIsRequestingExit || IsClosing )
return;
// ChangeState( new DSCondition(this,ConditionName,ConditionValue,CurrentState) );
}
void UDebuggerCore::SetDataBreakpoint( const TCHAR* BreakpointName )
{
if ( GIsRequestingExit || IsClosing )
return;
ChangeState( new DSBreakOnChange(this,BreakpointName,CurrentState) );
}
void UDebuggerCore::NotifyGC()
{
}
UBOOL UDebuggerCore::NotifyAssertionFailed( const INT LineNumber )
{
if ( GIsRequestingExit || IsClosing )
return 0;
debugf(TEXT("Assertion failed, line %i"), LineNumber);
ChangeState( new DSWaitForInput(this), 1 );
return !(GIsRequestingExit || IsClosing);
}
UBOOL UDebuggerCore::NotifyInfiniteLoop()
{
if ( GIsRequestingExit || IsClosing )
return 0;
debugf(TEXT("Recursion limit reached...breaking UDebugger"));
ChangeState( new DSWaitForInput(this), 1 );
return !(GIsRequestingExit || IsClosing);
}
void UDebuggerCore::StackChanged( const FStackNode* CurrentNode )
{
// For now, simply refresh user watches
// later, we can modify this to work for all watches, allowing the ability to view values from anywhere on the callstack
const UObject* Obj = CurrentNode ? CurrentNode->Object : NULL;
const FFrame* Node = CurrentNode ? CurrentNode->StackNode : NULL;
for ( INT i = 0; i < Watches.Num(); i++ )
Watches(i).Refresh(Obj, Node);
}
// Update the interface
void UDebuggerCore::UpdateInterface()
{
if ( IsDebugging && CallStack)
{
const FStackNode* TopNode = CallStack->GetTopNode();
if ( !TopNode )
return;
// Get package name
const TCHAR* cName = TopNode->GetClass()->GetName(),
*pName = TopNode->GetClass()->GetOuter()->GetName();
Interface->Update( cName,
pName,
TopNode->GetLine(),
TopNode->GetInfo(),
TopNode->Object->GetName());
RefreshWatch( TopNode );
TArray<FString> StackNames;
for(int i=0;i < CallStack->StackDepth;i++)
{
const FStackNode* TestNode = CallStack->GetNode(i);
if (TestNode && TestNode->StackNode && TestNode->StackNode->Node)
new(StackNames) FString( TestNode->StackNode->Node->GetFullName() );
}
Interface->UpdateCallStack( StackNames );
}
}
// Update the Watch ListView with all the current variables the Stack/Object contain.
void UDebuggerCore::RefreshWatch( const FStackNode* CNode )
{
TArray<INT> foundWatchNamesIndicies;
if ( CNode == NULL )
return;
Interface->LockWatch(Interface->GLOBAL_WATCH);
Interface->LockWatch(Interface->LOCAL_WATCH);
Interface->LockWatch(Interface->WATCH_WATCH);
Interface->ClearAWatch(Interface->GLOBAL_WATCH);
Interface->ClearAWatch(Interface->LOCAL_WATCH);
Interface->ClearAWatch(Interface->WATCH_WATCH);
UFunction* Function = Cast<UFunction>(CNode->GetFrame()->Node);
const UObject* ContextObject = CNode->GetObject();
UProperty* Parm;
// Setup the local variable watch
if ( Function )
{
for ( Parm = Function->PropertyLink; Parm; Parm = Parm->PropertyLinkNext )
PropertyToWatch( Parm, CNode->GetFrame()->Locals + Parm->Offset, Parm == Function->PropertyLink, Interface->LOCAL_WATCH );
}
// Setup the global vars watch
TFieldIterator<UProperty,CLASS_IsAUProperty> PropertyIt(ContextObject->GetClass());
for( Parm = *PropertyIt; PropertyIt; ++PropertyIt )
PropertyToWatch( *PropertyIt, (BYTE*)ContextObject + PropertyIt->Offset, *PropertyIt == Parm, Interface->GLOBAL_WATCH );
RefreshUserWatches();
Interface->UnlockWatch(Interface->GLOBAL_WATCH);
Interface->UnlockWatch(Interface->LOCAL_WATCH);
Interface->UnlockWatch(Interface->WATCH_WATCH);
}
void UDebuggerCore::RefreshUserWatches()
{
// Fill the custom watch values from the context of the current node
for ( INT i = 0; i < Watches.Num(); i++ )
{
UProperty* Prop = NULL;
BYTE* PropAddr = NULL;
ErrorDevice.Empty();
FDebuggerWatch& Watch = Watches(i);
if ( Watch.GetWatchValue((const UProperty *&) Prop, (const BYTE *&) PropAddr, ArrayMemberIndex) )
PropertyToWatch(Prop, PropAddr, 0, Interface->WATCH_WATCH, INDEX_NONE, *Watch.WatchText);
else Interface->AddAWatch( Interface->WATCH_WATCH, -1, *Watch.WatchText, *ErrorDevice );
}
}
void UDebuggerCore::LoadEditPackages()
{
TArray<FString> EditPackages;
TMultiMap<FString,FString>* Sec = GConfig->GetSectionPrivate( TEXT("Editor.EditorEngine"), 0, 1, GEngineIni );
Sec->MultiFind( FString(TEXT("EditPackages")), EditPackages );
TObjectIterator<UEngine> EngineIt;
if ( EngineIt )
for( INT i=0; i<EditPackages.Num(); i++ )
{
if(appStrcmp(*EditPackages(i), TEXT("UnrealEd"))) // don't load the UnrealEd
{
if( !EngineIt->LoadPackage( NULL, *EditPackages(i), LOAD_NoWarn ) )
appErrorf( TEXT("Can't find edit package '%s'"), *EditPackages(i) );
}
}
Interface->UpdateClassTree();
}
UClass* UDebuggerCore::GetStackOwnerClass( const FFrame* Stack ) const
{
UClass* RClass;
// Function?
RClass = Cast<UClass>( Stack->Node->GetOuter() );
// Nope, a state, we need to go one level higher to get the class
if ( RClass == NULL )
RClass = Cast<UClass>( Stack->Node->GetOuter()->GetOuter() );
if ( RClass == NULL )
RClass = Cast<UClass>( Stack->Node );
// Make sure it's a real class
check(RClass!=NULL);
return RClass;
}
/**
* Routes message to the debugger if present.
*
* @param Msg Message to route
* @param Event Event type of message
*/
void FDebuggerLog::Serialize( const TCHAR* Msg, EName Event )
{
if ( Event != NAME_Title )
{
UDebuggerCore* Debugger = (UDebuggerCore*)GDebugger;
if ( Debugger && Debugger->Interface && Debugger->Interface->IsLoaded() )
{
Debugger->Interface->AddToLog( *FString::Printf(TEXT("%s: %s"), FName::SafeString(Event), Msg) );
}
}
}
/*-----------------------------------------------------------------------------
FCallStack.
-----------------------------------------------------------------------------*/
FCallStack::FCallStack( UDebuggerCore* InParent )
: Parent(InParent), StackDepth(0)
{
}
FCallStack::~FCallStack()
{
Empty();
Parent = NULL;
}
void FCallStack::Empty()
{
QueuedCommands.Empty();
Stack.Empty();
StackDepth = 0;
}
/*-----------------------------------------------------------------------------
FStackNode Implementation
-----------------------------------------------------------------------------*/
FStackNode::FStackNode( const UObject* Debugee, const FFrame* Stack, UClass* InClass, INT CurrentDepth, INT InLineNumber, INT InPos, BYTE InCode )
: Object(Debugee), StackNode(Stack), Class(InClass)
{
Lines.AddItem(InLineNumber);
Positions.AddItem(InPos);
Depths.AddItem(CurrentDepth);
OpCodes.AddItem(InCode);
}
const TCHAR* FStackNode::GetInfo() const
{
return GetOpCodeName(OpCodes.Last());
}
void FStackNode::Show() const
{
debugf(TEXT("Object:%s Class:%s Line:%i Code:%s"),
Object ? Object->GetName() : TEXT("NULL"),
Class ? Class->GetName() : TEXT("NULL"),
GetLine(), GetInfo());
}
/*-----------------------------------------------------------------------------
FDebuggerWatch
-----------------------------------------------------------------------------*/
static TCHAR* ParseNextName( TCHAR*& o )
{
INT count(0);
bool literal=false; // literal object name
TCHAR* c = o;
while ( c && *c )
{
if ( *c == '[' )
count++;
else if ( *c == ']')
count--;
else if ( count == 0 )
{
if ( *c == '\'' )
literal = !literal;
else if ( !literal )
{
if ( *c == '(' )
{
o = c;
*o++ = 0;
}
else if ( *c == ')' )
*c = 0;
else if ( *c == '.' )
{
*c++ = 0;
return c;
}
}
}
c++;
}
return NULL;
}
FDebuggerWatch::FDebuggerWatch(FStringOutputDevice& ErrorHandler, const TCHAR* WatchString )
: WatchText(WatchString)
{
WatchNode = new FDebuggerWatchNode(ErrorHandler, WatchString);
}
void FDebuggerWatch::Refresh( const UObject* CurrentObject, const FFrame* CurrentFrame )
{
if ( !CurrentObject || !CurrentFrame )
return;
Object = CurrentObject;
Class = CurrentObject->GetClass();
Function = Cast<UFunction>(CurrentFrame->Node);
if ( WatchNode )
WatchNode->ResetBase(Class, Object, Function, (BYTE*)Object, CurrentFrame->Locals);
}
UBOOL FDebuggerWatch::GetWatchValue( const UProperty*& OutProp, const BYTE*& OutPropAddr, INT& ArrayIndexOverride )
{
if ( WatchNode && WatchNode->Refresh(Class, Object, (BYTE*)Object ) )
return WatchNode->GetWatchValue(OutProp, OutPropAddr, ArrayIndexOverride);
return 0;
}
FDebuggerWatch::~FDebuggerWatch()
{
if ( WatchNode )
delete WatchNode;
WatchNode = NULL;
}
/*-----------------------------------------------------------------------------
FDebuggerDataWatch
-----------------------------------------------------------------------------*/
FDebuggerDataWatch::FDebuggerDataWatch( FStringOutputDevice& ErrorHandler, const TCHAR* WatchString )
: FDebuggerWatch(ErrorHandler, WatchString)
{ }
void FDebuggerDataWatch::Refresh( const UObject* CurrentObject, const FFrame* CurrentFrame )
{
// reset the current value of the watch
}
UBOOL FDebuggerDataWatch::GetWatchValue( const UProperty*& OutProp, const BYTE*& OutPropAddr, INT& ArrayIndexOverride )
{
return 0;
}
UBOOL FDebuggerDataWatch::Modified() const
{
check(Property);
// TODO for arrays that have been reduced in size, this will crash
return Property->Identical( OriginalValue, DataAddress );
}
/*-----------------------------------------------------------------------------
FDebuggerWatchNode
-----------------------------------------------------------------------------*/
FDebuggerWatchNode::FDebuggerWatchNode( FStringOutputDevice& ErrorHandler, const TCHAR* NodeText )
: NextNode(NULL), ArrayNode(NULL), PropAddr(NULL), Property(NULL), GlobalData(NULL), Base(NULL), LocalData(NULL),
Function(NULL), TopObject(NULL), ContextObject(NULL), TopClass(NULL), ContextClass(NULL), Error(ErrorHandler)
{
TCHAR* Buffer = new TCHAR [ appStrlen(NodeText) + 1 ];
appStrncpy(Buffer, NodeText, appStrlen(NodeText) + 1);
TCHAR* NodeName = Buffer;
TCHAR* Next = ParseNextName(NodeName);
if ( Next )
NextNode = new FDebuggerWatchNode(Error, Next);
PropertyName = NodeName;
FString ArrayDelim;
if ( GetArrayDelimiter(PropertyName, ArrayDelim) )
AddArrayNode(*ArrayDelim);
delete[] Buffer;
}
FDebuggerWatchNode::~FDebuggerWatchNode()
{
if ( NextNode )
delete NextNode;
if ( ArrayNode )
delete ArrayNode;
NextNode = NULL;
ArrayNode = NULL;
}
UBOOL FDebuggerWatchNode::GetArrayDelimiter( FString& Test, FString& Result ) const
{
Result = TEXT("");
INT pos = Test.InStr(TEXT("["));
if ( pos != INDEX_NONE )
{
Result = Test.Mid(pos+1);
Test = Test.Left(pos);
pos = Result.InStr(TEXT("]"),1);
if ( pos != INDEX_NONE )
Result = Result.Left(pos);
}
return Result.Len();
}
void FDebuggerWatchNode::AddArrayNode( const TCHAR* ArrayText )
{
if ( !ArrayText || !(*ArrayText) )
return;
ArrayNode = new FDebuggerArrayNode(Error, ArrayText);
}
void FDebuggerWatchNode::ResetBase( const UClass* CurrentClass, const UObject* CurrentObject, const UFunction* CurrentFunction, const BYTE* CurrentBase, const BYTE* CurrentLocals )
{
TopClass = CurrentClass;
TopObject = CurrentObject;
Function = CurrentFunction;
GlobalData = CurrentBase;
LocalData = CurrentLocals;
if ( NextNode )
NextNode->ResetBase(CurrentClass, CurrentObject, CurrentFunction, CurrentBase, CurrentLocals);
if ( ArrayNode )
ArrayNode->ResetBase(CurrentClass, CurrentObject, CurrentFunction, CurrentBase, CurrentLocals);
}
UBOOL FDebuggerWatchNode::Refresh( const UStruct* RelativeClass, const UObject* RelativeObject, const BYTE* Data )
{
ContextObject = RelativeObject;
ContextClass = RelativeClass;
check(ContextClass);
if ( !Data )
{
if ( appIsDebuggerPresent() )
appDebugBreak();
else
appErrorf(NAME_FriendlyError, TEXT("Corrupted data found in user watch %s (class:%s function:%s)"), *PropertyName, ContextClass->GetName(), Function->GetName());
return 0;
}
Property = NULL;
PropAddr = NULL;
Base = Data;
if ( Data == GlobalData )
{
// Current context is the current function - allow searching the local parameters for properties
Property = FindField<UProperty>( const_cast<UFunction*>(Function), *PropertyName);
if ( Property )
PropAddr = LocalData + Property->Offset;
}
if ( !Property )
{
Property = FindField<UProperty>( const_cast<UStruct*>(ContextClass), *PropertyName);
if ( Property )
PropAddr = Base + Property->Offset;
}
/* if ( !Property )
{
UObject* Obj = FindObject<UObject>(ANY_PACKAGE,*PropertyName);
if ( Obj )
{
ContextObject = Obj;
ContextClass = Obj->GetClass();
}
}
*/
ArrayIndex = GetArrayIndex();
if ( !Property )
{
Error.Logf(TEXT("Member '%s' couldn't be found in local or global scope '%s'"), *PropertyName, ContextClass->GetName());
return 0;
}
if ( ArrayIndex < INDEX_NONE )
return 0;
return 1;
}
INT FDebuggerWatchNode::GetArrayIndex() const
{
if ( ArrayNode )
return ArrayNode->GetArrayIndex();
return INDEX_NONE;
}
// ArrayIndexOverride is to prevent PropertyToWatch from incorrectly interpreting individual elements of static arrays as the entire array
UBOOL FDebuggerWatchNode::GetWatchValue( const UProperty*& OutProp, const BYTE*& OutPropAddr, INT& ArrayIndexOverride )
{
if ( Property == NULL )
{
// if ( PropAddr == NULL )
// {
Error.Logf(TEXT("Member '%s' couldn't be found in local or global scope '%s'"), *PropertyName, ContextClass->GetName());
return 0;
// }
}
else if ( PropAddr == NULL )
{
Error.Logf(TEXT("Member '%s' couldn't be found in local or global scope '%s'"), *PropertyName, ContextClass->GetName());
return 0;
}
const UStructProperty* StructProperty = NULL;
const UArrayProperty* ArrayProperty = NULL;
const UObjectProperty* ObjProperty = NULL;
const UClassProperty* ClassProperty = ConstCast<UClassProperty>(Property);
if ( !ClassProperty )
{
ObjProperty = ConstCast<UObjectProperty>(Property);
if ( !ObjProperty )
{
ArrayProperty = ConstCast<UArrayProperty>(Property);
if ( !ArrayProperty )
StructProperty = ConstCast<UStructProperty>(Property);
}
}
if ( ObjProperty )
{
const BYTE* Data = PropAddr + Max(ArrayIndex,0) * Property->ElementSize;
const UObject* Obj = *(UObject**)Data;
if ( NextNode )
{
if ( !Obj )
{
Error.Logf(TEXT("Expression could not be evaluated: Value of '%s' is None"), *PropertyName);
return 0;
}
if ( !NextNode->Refresh( Obj ? Obj->GetClass() : ObjProperty->PropertyClass, Obj, (BYTE*)Obj ) )
return 0;
return NextNode->GetWatchValue( OutProp, OutPropAddr, ArrayIndexOverride );
}
OutProp = Property;
OutPropAddr = Data;
ArrayIndexOverride = ArrayIndex;
return 1;
}
else if ( ClassProperty )
{
const BYTE* Data = PropAddr + Max(ArrayIndex,0) * Property->ElementSize;
UClass* Cls = *(UClass**)Data;
if ( NextNode )
{
if ( !Cls )
{
Error.Logf(TEXT("Expression couldn't be evaluated: Value of '%s' is None"), *PropertyName);
return 0;
}
if ( !NextNode->Refresh( Cls ? Cls : ClassProperty->MetaClass, Cls ? Cls->GetDefaultObject() : NULL, (BYTE*)&Cls->Defaults(0)) )
return 0;
return NextNode->GetWatchValue( OutProp, OutPropAddr, ArrayIndexOverride );
}
OutProp = Property;
OutPropAddr = Data;
ArrayIndexOverride = ArrayIndex;
return 1;
}
else if ( StructProperty )
{
const BYTE* Data = PropAddr + Max(ArrayIndex,0) * Property->ElementSize;
UStruct* Struct = StructProperty->Struct;
if ( Struct )
{
if ( NextNode )
{
if ( !NextNode->Refresh( Struct, ContextObject, Data ) )
return 0;
return NextNode->GetWatchValue(OutProp, OutPropAddr, ArrayIndexOverride);
}
OutProp = StructProperty;
OutPropAddr = Data;
ArrayIndexOverride = ArrayIndex;
return 1;
}
Error.Logf(TEXT("No data could be found for struct '%s'"), Property->GetName());
return 0;
}
else if ( ArrayProperty )
{
const FArray* Array = (FArray*)PropAddr;
if ( Array )
{
// If the array index is -1, then we want the entire array, not just a single element
if ( ArrayIndex != INDEX_NONE )
{
if ( ArrayIndex < 0 || ArrayIndex >= Array->Num() )
{
Error.Logf(TEXT("Index (%i) out of bounds: %s array only has %i element%s"), ArrayIndex, Property->GetName(), Array->Num(), Array->Num() == 1 ? TEXT("") : TEXT("s"));
return 0;
}
ObjProperty = NULL;
StructProperty = NULL;
ClassProperty = ConstCast<UClassProperty>(ArrayProperty->Inner);
if ( !ClassProperty )
{
ObjProperty = Cast<UObjectProperty>(ArrayProperty->Inner);
if ( !ObjProperty )
StructProperty = ConstCast<UStructProperty>(ArrayProperty->Inner);
}
if ( ObjProperty )
{
const BYTE* Data = ((BYTE*)Array->GetData() + ArrayIndex * ObjProperty->ElementSize);
const UObject* Obj = *(UObject**) Data;
// object is none
if ( NextNode )
{
if ( !NextNode->Refresh( Obj ? Obj->GetClass() : ObjProperty->PropertyClass, Obj, (BYTE*)Obj ) )
return 0;
return NextNode->GetWatchValue( OutProp, OutPropAddr, ArrayIndexOverride );
}
OutProp = ObjProperty;
OutPropAddr = Data;
ArrayIndexOverride = ArrayIndex;
return 1;
}
else if ( ClassProperty )
{
const BYTE* Data = ((BYTE*)Array->GetData() + ClassProperty->ElementSize * ArrayIndex);
UClass* Cls = *(UClass**) Data;
if ( NextNode )
{
if ( !NextNode->Refresh( Cls ? Cls : ClassProperty->MetaClass, Cls ? Cls->GetDefaultObject() : NULL, Cls ? (BYTE*)&Cls->Defaults(0) : NULL ) )
return 0;
return NextNode->GetWatchValue(OutProp, OutPropAddr, ArrayIndexOverride);
}
OutProp = ClassProperty;
OutPropAddr = Data;
ArrayIndexOverride = ArrayIndex;
return 1;
}
else if ( StructProperty )
{
const BYTE* Data = (BYTE*)Array->GetData() + StructProperty->ElementSize * ArrayIndex;
UStruct* Struct = StructProperty->Struct;
if ( Struct )
{
if ( NextNode )
{
if ( !NextNode->Refresh( Struct, NULL, Data ) )
return 0;
return NextNode->GetWatchValue(OutProp, OutPropAddr, ArrayIndexOverride);
}
OutProp = StructProperty;
OutPropAddr = Data;
ArrayIndexOverride = ArrayIndex;
return 1;
}
Error.Logf(TEXT("No data could be found for struct '%s'"), StructProperty->GetName());
return 0;
}
else
{
OutProp = ArrayProperty->Inner;
OutPropAddr = (BYTE*)Array->GetData() + OutProp->ElementSize * ArrayIndex;
ArrayIndexOverride = ArrayIndex;
return 1;
}
}
}
else
{
Error.Logf(TEXT("No data could be found for array '%s'"), Property->GetName());
return 0;
}
}
OutProp = Property;
OutPropAddr = PropAddr + Max(ArrayIndex,0) * Property->ElementSize;
ArrayIndexOverride = ArrayIndex;
return 1;
}
/*-----------------------------------------------------------------------------
FDebuggerArrayNode
-----------------------------------------------------------------------------*/
FDebuggerArrayNode::FDebuggerArrayNode(FStringOutputDevice& ErrorHandler, const TCHAR* ArrayText )
: FDebuggerWatchNode(ErrorHandler, ArrayText)
{
}
FDebuggerArrayNode::~FDebuggerArrayNode()
{
}
void FDebuggerArrayNode::ResetBase( const UClass* CurrentClass, const UObject* CurrentObject, const UFunction* CurrentFunction, const BYTE* CurrentBase, const BYTE* CurrentLocals )
{
Value = INDEX_NONE;
if ( PropertyName.IsNumeric() )
{
Value = appAtoi(*PropertyName);
return;
}
FDebuggerWatchNode::ResetBase(CurrentClass, CurrentObject, CurrentFunction, CurrentBase, CurrentLocals);
Refresh(CurrentClass, CurrentObject, CurrentBase);
}
INT FDebuggerArrayNode::GetArrayIndex()
{
// if the property is simply a number, just return that
if ( Value != INDEX_NONE && PropertyName.IsNumeric() )
return Value;
const UProperty* Prop = NULL;
const BYTE* Data = NULL;
INT dummy(0);
if ( GetWatchValue(Prop, Data, dummy) )
{
FString Buffer = TEXT("");
// Must const_cast here because ExportTextItem isn't made const even though it doesn't modify the property value.
Prop->ExportTextItem(Buffer, const_cast<BYTE*>(Data), NULL, NULL);
Value = appAtoi(*Buffer);
}
else return INDEX_NONE - 1;
return Value;
}
/*-----------------------------------------------------------------------------
Breakpoints
-----------------------------------------------------------------------------*/
FBreakpoint::FBreakpoint( const TCHAR* InClassName, INT InLine )
{
ClassName = InClassName;
Line = InLine;
IsEnabled = true;
}
UBOOL FBreakpointManager::QueryBreakpoint( const TCHAR* sClassName, INT sLine )
{
for(int i=0;i<Breakpoints.Num();i++)
{
FBreakpoint& Breakpoint = Breakpoints(i);
if ( Breakpoint.IsEnabled && Breakpoint.ClassName == sClassName && Breakpoint.Line == sLine )
{
return 1;
}
}
return 0;
}
void FBreakpointManager::SetBreakpoint( const TCHAR* sClassName, INT sLine )
{
for(int i=0;i<Breakpoints.Num();i++)
{
if ( Breakpoints(i).ClassName == sClassName && Breakpoints(i).Line == sLine )
return;
}
new(Breakpoints) FBreakpoint( sClassName, sLine );
}
void FBreakpointManager::RemoveBreakpoint( const TCHAR* sClassName, INT sLine )
{
for( INT i=0; i<Breakpoints.Num(); i++ )
{
if ( Breakpoints(i).ClassName == sClassName && Breakpoints(i).Line == sLine )
{
Breakpoints.Remove(i--);
}
}
}
/*-----------------------------------------------------------------------------
Debugger states
-----------------------------------------------------------------------------*/
FDebuggerState::FDebuggerState(UDebuggerCore* const inDebugger)
: CurrentNode(NULL), Debugger(inDebugger), LineNumber(INDEX_NONE), EvalDepth(Debugger->CallStack->StackDepth)
{
const FStackNode* Node = Debugger->GetCurrentNode();
if ( Node )
LineNumber = Node->GetLine();
}
FDebuggerState::~FDebuggerState()
{
}
void FDebuggerState::UpdateStackInfo( const FStackNode* CNode )
{
if ( Debugger != NULL && ((Debugger->IsDebugging && CNode != GetCurrentNode()) || (CNode == NULL)) )
{
Debugger->StackChanged(CNode);
if ( CNode == NULL )
Debugger->IsDebugging = 0;
}
SetCurrentNode(CNode);
}
/*-----------------------------------------------------------------------------
Constructors.
-----------------------------------------------------------------------------*/
DSIdleState::DSIdleState(UDebuggerCore* const inDebugger)
: FDebuggerState(inDebugger)
{
Debugger->IsDebugging = 0;
}
DSWaitForInput::DSWaitForInput(UDebuggerCore* const inDebugger)
: FDebuggerState(inDebugger)
{
Debugger->IsDebugging = 1;
}
DSWaitForCondition::DSWaitForCondition(UDebuggerCore* const inDebugger)
: FDebuggerState(inDebugger)
{
Debugger->IsDebugging = 0;
}
DSBreakOnChange::DSBreakOnChange( UDebuggerCore* const inDebugger, const TCHAR* WatchText, FDebuggerState* NewState )
: DSWaitForCondition(inDebugger), SubState(NewState), Watch(NULL), bDataBreak(0)
{
Watch = new FDebuggerDataWatch(Debugger->ErrorDevice, WatchText);
const FStackNode* StackNode = SubState ? SubState->GetCurrentNode() : NULL;
const UObject* Obj = StackNode ? StackNode->Object : NULL;
const FFrame* Node = StackNode ? StackNode->StackNode : NULL;
Watch->Refresh( Obj, Node );
}
DSBreakOnChange::~DSBreakOnChange()
{
if ( SubState )
delete SubState;
SubState = NULL;
}
DSRunToCursor::DSRunToCursor( UDebuggerCore* const inDebugger )
: DSWaitForCondition(inDebugger)
{ }
DSStepOut::DSStepOut( UDebuggerCore* const inDebugger )
: DSWaitForCondition(inDebugger)
{ }
DSStepInto::DSStepInto( UDebuggerCore* const inDebugger )
: DSWaitForCondition(inDebugger)
{ }
DSStepOverStack::DSStepOverStack( const UObject* inObject, UDebuggerCore* const inDebugger )
: DSWaitForCondition(inDebugger), EvalObject(inObject)
{
}
/*-----------------------------------------------------------------------------
DSBreakOnChange specifics.
-----------------------------------------------------------------------------*/
void DSBreakOnChange::SetCurrentNode( const FStackNode* Node )
{
if ( SubState )
SubState->SetCurrentNode(Node);
else
DSWaitForCondition::SetCurrentNode(Node);
}
FDebuggerState* DSBreakOnChange::GetCurrent()
{
if ( SubState )
return SubState->GetCurrent();
return FDebuggerState::GetCurrent();
}
const FStackNode* DSBreakOnChange::GetCurrentNode() const
{
if ( SubState )
return SubState->GetCurrentNode();
return FDebuggerState::GetCurrentNode();
}
UBOOL DSBreakOnChange::InterceptNewState( FDebuggerState* NewState )
{
if ( !NewState )
return 0;
if ( SubState )
{
if ( SubState->InterceptNewState(NewState) )
return 1;
delete SubState;
}
SubState = NewState;
return 1;
}
UBOOL DSBreakOnChange::InterceptOldState( FDebuggerState* OldState )
{
if ( !OldState || !SubState || OldState == this )
return 0;
if ( SubState && SubState->InterceptOldState(OldState) )
return 1;
return OldState == SubState;
}
/*-----------------------------------------------------------------------------
HandleInput.
-----------------------------------------------------------------------------*/
void DSBreakOnChange::HandleInput( EUserAction Action )
{
if ( Action >= UA_MAX )
appErrorf(NAME_FriendlyError, TEXT("Invalid UserAction received by HandleInput()!"));
if ( Action != UA_Exit && Action != UA_None && bDataBreak )
{
// refresh the watch's value with the current value
}
bDataBreak = 0;
if ( SubState )
SubState->HandleInput(Action);
}
void DSWaitForInput::HandleInput( EUserAction UserInput )
{
ContinueExecution();
switch ( UserInput )
{
case UA_RunToCursor:
/*CHARRANGE sel;
RichEdit_ExGetSel (Parent->Edit.hWnd, &sel);
if ( sel.cpMax != sel.cpMin )
{
//appMsgf(0,TEXT("Invalid cursor position"));
return;
}
Parent->ChangeState( new DSRunToCursor( sel.cpMax, Parent->GetCallStack()->GetStackDepth() ) );*/
Debugger->IsDebugging = 0;
break;
case UA_Exit:
GIsRequestingExit = 1;
Debugger->Close();
break;
case UA_StepInto:
Debugger->ChangeState( new DSStepInto(Debugger) );
break;
case UA_StepOver:
/* if ( CurrentInfo != TEXT("RETURN") && CurrentInfo != TEXT("RETURNNOTHING") )
{
Debugger->ChangeState( new DSStepOver( CurrentObject,
CurrentClass,
CurrentStack,
CurrentLine,
CurrentPos,
CurrentInfo,
Debugger->GetCallStack()->GetStackDepth(), Debugger ) );
}*/
debugf(TEXT("Warning: UA_StepOver currently unimplemented"));
// Debugger->IsDebugging = 1;
break;
case UA_StepOverStack:
{
const FStackNode* Top = Debugger->CallStack->GetTopNode();
check(Top);
Debugger->ChangeState( new DSStepOverStack(Top->Object,Debugger) );
}
break;
case UA_StepOut:
Debugger->ChangeState( new DSStepOut(Debugger) );
break;
case UA_Go:
Debugger->ChangeState( new DSIdleState(Debugger) );
break;
}
}
/*-----------------------------------------------------------------------------
Process.
-----------------------------------------------------------------------------*/
void FDebuggerState::Process( UBOOL bOptional )
{
if ( !Debugger->IsClosing && Debugger->CallStack->StackDepth && EvaluateCondition(bOptional) )
Debugger->Break();
}
void DSWaitForInput::Process(UBOOL bOptional)
{
if( Debugger->IsClosing )
return;
Debugger->AccessedNone = 0;
Debugger->BreakASAP = 0;
Debugger->UpdateInterface();
bContinue = 0;
Debugger->GetInterface()->Show();
PumpMessages();
}
void DSWaitForCondition::Process(UBOOL bOptional)
{
check(Debugger);
const FStackNode* Node = GetCurrentNode();
check(Node);
if ( !Debugger->IsClosing && Debugger->CallStack->StackDepth && EvaluateCondition(bOptional) )
{
// Condition was MET. We now delegate control to a
// user-controlled state.
if ( Node && Node->StackNode && Node->StackNode->Node &&
Node->StackNode->Node->IsA(UClass::StaticClass()) )
{
if ( appIsDebuggerPresent() )
appDebugBreak();
return;
}
Debugger->Break();
}
}
void DSBreakOnChange::Process(UBOOL bOptional)
{
check(Debugger);
const FStackNode* Node = GetCurrentNode();
check(Node);
if ( !Debugger->IsClosing && Debugger->CallStack->StackDepth && EvaluateCondition(bOptional) )
{
if ( Node && Node->StackNode && Node->StackNode->Node &&
Node->StackNode->Node->IsA(UClass::StaticClass()) )
{
if ( appIsDebuggerPresent() )
appDebugBreak();
return;
}
// TODO : post message box with reason for breaking the udebugger
Debugger->Break();
return;
}
if ( SubState )
SubState->Process(bOptional);
}
void DSWaitForInput::PumpMessages()
{
while( !bContinue && !Debugger->IsClosing && !GIsRequestingExit )
{
check(GEngine->Client);
GEngine->Client->AllowMessageProcessing( FALSE );
MSG Msg;
while( PeekMessageW(&Msg,NULL,0,0,PM_REMOVE) )
{
if( Msg.message == WM_QUIT )
{
GIsRequestingExit = 1;
ContinueExecution();
}
TranslateMessage( &Msg );
DispatchMessageW( &Msg );
}
GEngine->Client->AllowMessageProcessing( TRUE );
}
}
void DSWaitForInput::ContinueExecution()
{
bContinue = TRUE;
}
UBOOL FDebuggerState::EvaluateCondition( UBOOL bOptional )
{
const FStackNode* Node = GetCurrentNode();
check(Node);
check(Debugger->CallStack->StackDepth);
check(Debugger);
check(Debugger->BreakpointManager);
check(!Debugger->IsClosing);
// Check if we've hit a breakpoint
INT Line = Node->GetLine();
if ( /*Line != LineNumber && */Debugger->BreakpointManager->QueryBreakpoint(*Debugger->GetStackOwnerClass(Node->StackNode)->GetPathName(), Line) )
return 1;
return 0;
}
UBOOL DSWaitForCondition::EvaluateCondition(UBOOL bOptional)
{
return FDebuggerState::EvaluateCondition(bOptional);
}
UBOOL DSRunToCursor::EvaluateCondition(UBOOL bOptional)
{
return DSWaitForCondition::EvaluateCondition(bOptional);
}
UBOOL DSStepOut::EvaluateCondition(UBOOL bOptional)
{
check(Debugger->CallStack->StackDepth);
check(!Debugger->IsClosing);
const FStackNode* Node = GetCurrentNode();
check(Node);
if ( Debugger->CallStack->StackDepth >= EvalDepth )
return FDebuggerState::EvaluateCondition(bOptional);
// ?! Is this the desired result?
// This seems like it could possibly result in the udebugger skipping a function while stepping out, if the
// opcode was DI_PrevStack when 'stepout' was received
/* if ( bOptional )
{
if ( !Debugger->CallStack->StackDepth < EvalDepth - 1 )
return DSIdleState::EvaluateCondition(bOptional);
}
else*/
if ( Debugger->CallStack->StackDepth < EvalDepth )
return 1;
return DSWaitForCondition::EvaluateCondition(bOptional);
}
UBOOL DSStepInto::EvaluateCondition(UBOOL bOptional)
{
check(Debugger->CallStack->StackDepth);
check(!Debugger->IsClosing);
const FStackNode* Node = GetCurrentNode();
check(Node);
return Debugger->CallStack->StackDepth != EvalDepth || Node->GetLine() != LineNumber;
}
UBOOL DSStepOverStack::EvaluateCondition(UBOOL bOptional)
{
check(Debugger->CallStack->StackDepth);
check(!Debugger->IsClosing);
const FStackNode* Node = GetCurrentNode();
check(Node);
if ( Debugger->CallStack->StackDepth != EvalDepth || Node->GetLine() != LineNumber )
{
if ( Debugger->CallStack->StackDepth < EvalDepth )
return 1;
if ( Debugger->CallStack->StackDepth == EvalDepth )
return !bOptional;
return FDebuggerState::EvaluateCondition(bOptional);
}
return 0;
}
UBOOL DSBreakOnChange::EvaluateCondition( UBOOL bOptional )
{
// TODO
//first, evaluate whether our data watch has changed...if so, set a flag to indicate that we've requested the udbegger to break
// (will be checked in HandleInput), and break
// otherwise, just execute normal behavior
check(Watch);
if ( Watch->Modified() )
{
bDataBreak = 1;
return 1;
}
if ( SubState )
return SubState->EvaluateCondition(bOptional);
return DSWaitForCondition::EvaluateCondition(bOptional);
}
/*-----------------------------------------------------------------------------
Primary UDebugger methods.
-----------------------------------------------------------------------------*/
void UDebuggerCore::ChangeState( FDebuggerState* NewState, UBOOL bImmediately )
{
if( PendingState )
delete PendingState;
PendingState = NewState ? NewState : NULL;
if ( bImmediately && PendingState )
{
AccessedNone = 0;
BreakASAP = 0;
PendingState->UpdateStackInfo(CurrentState ? CurrentState->GetCurrentNode() : NULL);
ProcessPendingState();
CurrentState->Process();
}
}
void UDebuggerCore::ProcessPendingState()
{
if ( PendingState )
{
if ( CurrentState )
{
if ( CurrentState->InterceptNewState(PendingState) )
{
PendingState = NULL;
return;
}
if ( !PendingState->InterceptOldState(CurrentState) )
delete CurrentState;
}
CurrentState = PendingState;
PendingState = NULL;
}
}
// Main debugger entry point
void UDebuggerCore::DebugInfo( const UObject* Debugee, const FFrame* Stack, BYTE OpCode, INT LineNumber, INT InputPos )
{
if( !ProcessDebugInfo )
return;
// Weird Devastation fix
if ( Stack->Node->IsA( UClass::StaticClass() ) )
{
if ( appIsDebuggerPresent() )
appDebugBreak();
return;
}
// Process any waiting states
ProcessPendingState();
check(CurrentState);
if ( CallStack && BreakpointManager && CurrentState )
{
if ( IsClosing )
{
if ( Interface->IsLoaded() )
Interface->Close();
}
else if ( !GIsRequestingExit )
{
// Returns true if it handled updating the stack
if ( CallStack->UpdateStack(Debugee, Stack, LineNumber, InputPos, OpCode) )
return;
if ( CallStack->StackDepth > 0 )
{
CurrentState->UpdateStackInfo( CallStack->GetTopNode() );
// Halt execution, and wait for user input if we have a breakpoint for this line
if ( (AccessedNone && BreakOnNone) || BreakASAP )
{
Break();
}
else
{
// Otherwise, update the debugger's state with the currently executing stacknode, then
// pass control to debugger state for further processing (i.e. if stepping over, are we ready to break again)
CurrentState->Process();
}
}
}
}
}
// Update the call stack, adding new FStackNodes if necessary
// Take into account latent state stack anomalies...
UBOOL FCallStack::UpdateStack( const UObject* Debugee, const FFrame* FStack, int LineNumber, int InputPos, BYTE OpCode )
{
check(StackDepth == Stack.Num());
if ( StackDepth == 0 )
QueuedCommands.Empty();
FDebuggerState* CurrentState = Parent->GetCurrentState();
switch ( OpCode )
{
// Check if stack change is due to a latent function in a state (meaning thread of execution
case DI_PrevStackLatent:
{
if ( StackDepth != 1 )
{
Parent->DumpStack();
appErrorf(NAME_FriendlyError, TEXT("PrevStackLatent received with stack depth != 1. Verify that all packages have been compiled in debug mode."));
}
Stack.Pop();
StackDepth--;
CurrentState->UpdateStackInfo(NULL);
return 1;
}
// Normal change... pop the top stack off the call stack
case DI_PrevStack:
{
if ( StackDepth <= 0 )
{
Parent->DumpStack();
appErrorf(NAME_FriendlyError, TEXT("PrevStack received with StackDepth <= 0. Verify that all packages have been compiled in debug mode."));
}
FStackNode* Last = &Stack.Last();
if ( Last->StackNode != FStack )
{
if ( !Last->StackNode->Node->IsA(UState::StaticClass()) && FStack->Node->IsA(UState::StaticClass()) )
{
// We've received a call to execDebugInfo() from UObject::GotoState() as a result of the state change,
// but we were executing a function, not state code.
// Queue this prevstack until we're back in state code
new(QueuedCommands) StackCommand( FStack, OpCode, LineNumber );
return 1;
}
}
if ( Last->StackNode != FStack )
appErrorf(NAME_FriendlyError, TEXT("UDebugger CallStack inconsistency detected. Verify that all packages have been compiled in debug mode."));
Stack.Pop();
StackDepth--;
// If we're returning to state code (StackDepth == 1 && stack node is an FStateFrame), and the current object has been marked
// to be deleted, we'll never receive the PREVSTACK (since state code isn't executed for actors marked bDeleteMe)
// Remove this stacknode now, but don't change the current state of the debugger (in case we were stepping into, or out of)
if ( StackDepth == 1 )
{
const FFrame* Node = Stack(0).StackNode;
if ( Node && Node->Node && Node->Node->IsA(UState::StaticClass()) && Node->Object->IsPendingKill() )
{
Stack.Pop();
StackDepth--;
CurrentState->UpdateStackInfo(NULL);
return 1;
}
}
if ( StackDepth == 0 )
CurrentState->UpdateStackInfo(NULL);
else
{
CurrentState->UpdateStackInfo( &Stack.Last() );
CurrentState->Process(1);
}
// If we're returning to state code and we have a queued command for this state, execute it now
if ( StackDepth == 1 && QueuedCommands.Num() )
{
StackCommand Command = QueuedCommands(0);
if ( Command.Frame == Stack(0).StackNode )
{
QueuedCommands.Remove(0);
UpdateStack( Debugee, Command.Frame, Command.LineNumber, InputPos, Command.OpCode );
}
}
return 1;
}
case DI_PrevStackState:
{
if ( StackDepth == 1 && FStack->Node->IsA(UState::StaticClass()) )
{
FStackNode& Node = Stack(0);
UpdateStack( Debugee, Node.StackNode, Node.Lines.Last() + 1, 0, DI_PrevStack );
return 1;
}
break;
}
case DI_NewStack:
{
FStackNode* CurrentTop = NULL;
if (StackDepth)
CurrentTop = &Stack.Last();
if ( CurrentTop && CurrentTop->StackNode == FStack )
{
Parent->DumpStack();
appErrorf(NAME_FriendlyError, TEXT("Received call for new stack with identical stack node! Verify that all packages have been compiled in debug mode."));
}
CurrentTop = new(Stack)
FStackNode( Debugee, FStack, Parent->GetStackOwnerClass(FStack),
StackDepth, LineNumber, InputPos, OpCode );
CurrentState->UpdateStackInfo( CurrentTop );
StackDepth++;
CurrentState->Process();
return 1;
}
case DI_NewStackLatent:
{
if ( StackDepth )
{
Parent->DumpStack();
appErrorf(NAME_FriendlyError,TEXT("Received LATENTNEWSTACK with stack depth Object:%s Class:%s Line:%i OpCode:%s"), Parent->GetStackOwnerClass(FStack)->GetName(), Debugee->GetName(), LineNumber, OpCode);
}
CurrentState->UpdateStackInfo(new(Stack) FStackNode(Debugee, FStack, Parent->GetStackOwnerClass(FStack), StackDepth, LineNumber,InputPos,OpCode));
StackDepth++;
CurrentState->Process();
return 1;
}
case DI_NewStackLabel:
{
if ( StackDepth == 0 )
{
// was result of a native gotostate
CurrentState->UpdateStackInfo(new(Stack) FStackNode(Debugee, FStack, Parent->GetStackOwnerClass(FStack), StackDepth, LineNumber,InputPos,OpCode));
StackDepth++;
CurrentState->Process();
return 1;
}
else
{
Stack.Last().Update( LineNumber, InputPos, OpCode, StackDepth );
return 0;
}
}
}
// Stack has not changed. Update the current node with line number and current opcode type
if ( StackDepth <= 0 )
{
Parent->DumpStack();
appErrorf(NAME_FriendlyError,TEXT("Received call to UpdateStack with CallStack depth of 0. Verify that all packages have been compiled in debug mode."));
}
FStackNode* Last = &Stack.Last();
if ( Last->StackNode != FStack )
{
if ( !Last->StackNode->Node->IsA(UState::StaticClass()) && FStack->Node->IsA(UState::StaticClass()) )
{
// We've received a call to execDebugInfo() from UObject::GotoState() as a result of the state change,
// but we were executing a function, not state code.
// Back up the state's pointer to the EX_DebugInfo, and ignore this update.
FFrame* HijackStack = const_cast<FFrame*>(FStack);
while ( --HijackStack->Code && *HijackStack->Code != EX_DebugInfo );
return 1;
}
Parent->DumpStack();
if ( appIsDebuggerPresent() )
appDebugBreak();
else
appErrorf(NAME_FriendlyError,TEXT("Received call to UpdateStack with stack out of sync Object:%s Class:%s Line:%i OpCode:%s"), Parent->GetStackOwnerClass(FStack)->GetName(), Debugee->GetName(), LineNumber, OpCode);
}
Last->Update( LineNumber, InputPos, OpCode, StackDepth );
// Skip over OPEREFP & FORINIT opcodes to simplify stepping into/over
return OpCode == DI_EFPOper || OpCode == DI_ForInit;
}
void UDebuggerCore::Break()
{
#ifdef _DEBUG
if ( GetCurrentNode() )
GetCurrentNode()->Show();
#endif
ChangeState( new DSWaitForInput(this), 1 );
}
void UDebuggerCore::DumpStack()
{
check(CallStack);
debugf(TEXT("CALLSTACK DUMP - SOMETHING BAD HAPPENED STACKDEPTH: %i !"), CallStack->StackDepth);
for ( INT i = 0; i < CallStack->Stack.Num(); i++ )
{
FStackNode* Node = &CallStack->Stack(i);
if ( !Node )
debugf(TEXT("%i) INVALID NODE"), i);
else
{
debugf(TEXT("%i) Class '%s' Object '%s' Node '%s'"),
i,
Node->Class ? Node->Class->GetName() : TEXT("NONE"),
Node->Object ? Node->Object->GetFullName() : TEXT("NONE"),
Node->StackNode && Node->StackNode->Node
? Node->StackNode->Node->GetFullName() : TEXT("NONE") );
for ( INT j = 0; j < Node->Lines.Num() && j < Node->OpCodes.Num(); j++ )
debugf(TEXT(" %i): Line '%i' OpCode '%s' Depth '%i'"), j, Node->Lines(j), GetOpCodeName(Node->OpCodes(j)), Node->Depths(j));
}
}
}
|
Cyniikal/labelbox-python | tests/integration/test_ontology.py | <filename>tests/integration/test_ontology.py
import pytest
from labelbox import OntologyBuilder
from labelbox.orm.model import Entity
import json
import time
@pytest.mark.skip(reason="normalized ontology contains Relationship, "
"which is not finalized yet. introduce this back when"
"Relationship feature is complete and we introduce"
"a Relationship object to the ontology that we can parse")
def test_from_project_ontology(project) -> None:
o = OntologyBuilder.from_project(project)
assert o.asdict() == project.ontology().normalized
def _get_attr_stringify_json(obj, attr):
value = getattr(obj, attr.name)
if attr.field_type.name.lower() == "json":
return json.dumps(value, sort_keys=True)
return value
def test_feature_schema_create_read(client, rand_gen):
name = f"test-root-schema-{rand_gen(str)}"
feature_schema_cat_normalized = {
'tool': 'polygon',
'name': name,
'color': 'black',
'classifications': [],
}
created_feature_schema = client.create_feature_schema(
feature_schema_cat_normalized)
queried_feature_schema = client.get_feature_schema(
created_feature_schema.uid)
for attr in Entity.FeatureSchema.fields():
assert _get_attr_stringify_json(created_feature_schema,
attr) == _get_attr_stringify_json(
queried_feature_schema, attr)
time.sleep(3) # Slight delay for searching
queried_feature_schemas = list(client.get_feature_schemas(name))
assert [feature_schema.name for feature_schema in queried_feature_schemas
] == [name]
queried_feature_schema = queried_feature_schemas[0]
for attr in Entity.FeatureSchema.fields():
assert _get_attr_stringify_json(created_feature_schema,
attr) == _get_attr_stringify_json(
queried_feature_schema, attr)
def test_ontology_create_read(client, rand_gen):
ontology_name = f"test-ontology-{rand_gen(str)}"
tool_name = f"test-ontology-tool-{rand_gen(str)}"
feature_schema_cat_normalized = {
'tool': 'polygon',
'name': tool_name,
'color': 'black',
'classifications': [],
}
feature_schema = client.create_feature_schema(feature_schema_cat_normalized)
created_ontology = client.create_ontology_from_feature_schemas(
name=ontology_name, feature_schema_ids=[feature_schema.uid])
tool_normalized = created_ontology.normalized['tools'][0]
for k, v in feature_schema_cat_normalized.items():
assert tool_normalized[k] == v
assert tool_normalized['schemaNodeId'] is not None
assert tool_normalized['featureSchemaId'] == feature_schema.uid
queried_ontology = client.get_ontology(created_ontology.uid)
for attr in Entity.Ontology.fields():
assert _get_attr_stringify_json(created_ontology,
attr) == _get_attr_stringify_json(
queried_ontology, attr)
time.sleep(3) # Slight delay for searching
queried_ontologies = list(client.get_ontologies(ontology_name))
assert [ontology.name for ontology in queried_ontologies] == [ontology_name]
queried_ontology = queried_ontologies[0]
for attr in Entity.Ontology.fields():
assert _get_attr_stringify_json(created_ontology,
attr) == _get_attr_stringify_json(
queried_ontology, attr)
|
jsoagger/jsoagger-fx | jsoagger-jfxcore-engine/src/main/java/io/github/jsoagger/jfxcore/components/CoreComponentsPresentersBeanProvider.java | <filename>jsoagger-jfxcore-engine/src/main/java/io/github/jsoagger/jfxcore/components/CoreComponentsPresentersBeanProvider.java
/**
*
*/
package io.github.jsoagger.jfxcore.components;
import io.github.jsoagger.core.bridge.operation.IOperation;
import io.github.jsoagger.core.ioc.api.annotations.Bean;
import io.github.jsoagger.core.ioc.api.annotations.BeansProvider;
import io.github.jsoagger.core.ioc.api.annotations.Named;
import io.github.jsoagger.jfxcore.api.presenter.ModelIconPresenter;
import io.github.jsoagger.jfxcore.api.presenter.ModelIdentityPresenter;
import io.github.jsoagger.jfxcore.api.presenter.ModelSecondaryLabelPresenter;
import io.github.jsoagger.jfxcore.api.services.Services;
import io.github.jsoagger.jfxcore.components.actions.FlowItemActionPresenter;
import io.github.jsoagger.jfxcore.components.presenter.ModelMasterAttributePresenter;
import io.github.jsoagger.jfxcore.components.presenter.ModelRevisionPresenter;
import io.github.jsoagger.jfxcore.components.presenter.ModelStatusPresenter;
import io.github.jsoagger.jfxcore.components.presenter.ModelWorkStatusPresenter;
import io.github.jsoagger.jfxcore.components.presenter.PeopleFullIdentityPresenter;
import io.github.jsoagger.jfxcore.components.presenter.RCIteratedFullIdentityHeaderPresenter;
import io.github.jsoagger.jfxcore.components.presenter.RCIteratedFullIdentityPresenter;
import io.github.jsoagger.jfxcore.components.presenter.RCMasterDescriptionPresenter;
import io.github.jsoagger.jfxcore.components.presenter.RCMasterFullIdentityHeaderPresenter;
import io.github.jsoagger.jfxcore.components.presenter.RCMasterFullIdentityPresenter;
import io.github.jsoagger.jfxcore.components.presenter.RCMasterNameIdentityPresenter;
import io.github.jsoagger.jfxcore.components.presenter.RCMasterNamePresenter;
import io.github.jsoagger.jfxcore.engine.components.listform.DoActionPresenterFactory;
import io.github.jsoagger.jfxcore.engine.components.listform.IListFormDataLoader;
import io.github.jsoagger.jfxcore.engine.components.listform.LegalMentionItemPresenter;
import io.github.jsoagger.jfxcore.engine.components.listform.ListFormCellFactory;
import io.github.jsoagger.jfxcore.engine.components.listform.MultiSelectionListFormItemPresenter;
import io.github.jsoagger.jfxcore.engine.components.listform.ParentItemPresenter;
import io.github.jsoagger.jfxcore.engine.components.listform.PreferenceDoActionPresenterFactory;
import io.github.jsoagger.jfxcore.engine.components.listform.SystemUpdateListFormCellPresenter;
import io.github.jsoagger.jfxcore.engine.components.listform.SystemVersionListFormCellPresenter;
import io.github.jsoagger.jfxcore.engine.components.listform.YesNoListFormCellPresenter;
import io.github.jsoagger.jfxcore.engine.components.presenter.LargeItemPresenterFactory;
import io.github.jsoagger.jfxcore.engine.components.presenter.MediumItemPresenterFactory;
import io.github.jsoagger.jfxcore.engine.components.presenter.SmallItemPresenterFactory;
import io.github.jsoagger.jfxcore.engine.components.presenter.impl.ModelAttributePresenter;
import io.github.jsoagger.jfxcore.engine.components.presenter.impl.iconprovider.AdminStaticIconPresenter;
import io.github.jsoagger.jfxcore.engine.components.presenter.impl.quickactions.DoActionPresenter;
/**
* @author <NAME>
*
*/
@BeansProvider
public class CoreComponentsPresentersBeanProvider {
public CoreComponentsPresentersBeanProvider() {}
@Bean
@Named("ModelMasterAttributePresenter")
public ModelMasterAttributePresenter modelMasterAttributePresenter() {
return new ModelMasterAttributePresenter();
}
@Bean
@Named("RCMasterFullIdentityPresenter")
public RCMasterFullIdentityPresenter rCMasterFullIdentityPresenter() {
return new RCMasterFullIdentityPresenter();
}
@Bean
@Named("RCMasterNameIdentityPresenter")
public RCMasterNameIdentityPresenter rCMasterNameIdentityPresenter() {
return new RCMasterNameIdentityPresenter();
}
@Bean
@Named("RCMasterDescriptionPresenter")
public RCMasterDescriptionPresenter RCMasterDescriptionPresenter() {
return new RCMasterDescriptionPresenter();
}
@Bean
@Named("RCIteratedFullIdentityHeaderPresenter")
public RCIteratedFullIdentityHeaderPresenter rCIteratedFullIdentityHeaderPresenter() {
return new RCIteratedFullIdentityHeaderPresenter();
}
@Bean
@Named("PeopleFullIdentityPresenter")
public PeopleFullIdentityPresenter peopleFullIdentityPresenter() {
return new PeopleFullIdentityPresenter();
}
@Bean
@Named("PeopleEmailPresenter")
public ModelAttributePresenter peopleEmailPresenter() {
ModelAttributePresenter p = new ModelAttributePresenter();
p.setAttributePath("attributes.email");
return p;
}
@Bean
@Named("RCMasterFullIdentityHeaderPresenter")
public RCMasterFullIdentityHeaderPresenter rCMasterFullIdentityHeaderPresenter() {
return new RCMasterFullIdentityHeaderPresenter();
}
@Bean
@Named("ModelWorkStatusPresenter")
public ModelWorkStatusPresenter modelWorkStatusPresenter() {
return new ModelWorkStatusPresenter();
}
@Bean
@Named("ModelRevisionPresenter")
public ModelRevisionPresenter modelRevisionPresenter() {
return new ModelRevisionPresenter();
}
@Bean
@Named("RCMasterNamePresenter")
public RCMasterNamePresenter rCMasterNamePresenter() {
return new RCMasterNamePresenter();
}
@Bean
@Named("ModelStatusPresenter")
public ModelStatusPresenter modelStatusPresenter() {
return new ModelStatusPresenter();
}
@Bean
@Named("RCIteratedFullIdentityPresenter")
public RCIteratedFullIdentityPresenter rCIteratedFullIdentityPresenter() {
return new RCIteratedFullIdentityPresenter();
}
@Bean
@Named("HRCIteratedFullIdentityPresenter")
public RCIteratedFullIdentityPresenter hRCIteratedFullIdentityPresenter() {
RCIteratedFullIdentityPresenter rcit = new RCIteratedFullIdentityPresenter();
rcit.setOrientation("horizontal");
return rcit;
}
@Bean
@Named("applicationVersionValuePresenter")
public MultiSelectionListFormItemPresenter applicationVersionValuePresenter() {
MultiSelectionListFormItemPresenter p = new MultiSelectionListFormItemPresenter();
p.setDataLoader((IListFormDataLoader) Services.getBean("applicationVersionDataLoader"));
return p;
}
@Bean
@Named("defaultMultiSelectionPreferenceValuePresenter")
public MultiSelectionListFormItemPresenter defaultMultiSelectionPreferenceValuePresenter() {
MultiSelectionListFormItemPresenter p = new MultiSelectionListFormItemPresenter();
p.setDataLoader((IListFormDataLoader) Services.getBean("ListFormDataLoader"));
return p;
}
@Bean
@Named("legalMentionItemPresenter")
public LegalMentionItemPresenter LegalMentionItemPresenter() {
LegalMentionItemPresenter p = new LegalMentionItemPresenter();
return p;
}
@Bean
@Named("parentItemPresenter")
public ParentItemPresenter ParentItemPresenter() {
ParentItemPresenter p = new ParentItemPresenter();
return p;
}
@Bean
@Named("systemUpdatePresenter")
public SystemUpdateListFormCellPresenter SystemUpdateListFormCellPresenter() {
SystemUpdateListFormCellPresenter p = new SystemUpdateListFormCellPresenter();
return p;
}
@Bean
@Named("systemVersionPresenter")
public SystemVersionListFormCellPresenter SystemVersionListFormCellPresenter() {
SystemVersionListFormCellPresenter p = new SystemVersionListFormCellPresenter();
return p;
}
@Bean
@Named("PreferencesListCellFactory")
public ListFormCellFactory PreferencesListCellFactory() {
ListFormCellFactory p = new ListFormCellFactory();
return p;
}
@Bean
@Named("PreferenceDoActionPresenterFactory")
public PreferenceDoActionPresenterFactory PreferenceDoActionPresenterFactory() {
PreferenceDoActionPresenterFactory p = new PreferenceDoActionPresenterFactory();
return p;
}
@Bean
@Named("DoActionPresenterFactory")
public DoActionPresenterFactory DoActionPresenterFactory() {
DoActionPresenterFactory p = new DoActionPresenterFactory();
return p;
}
@Bean
@Named("DoActionPresenter")
public DoActionPresenter DoActionPresenter() {
DoActionPresenter p = new DoActionPresenter();
return p;
}
@Bean
@Named("AdminStaticIconPresenter")
public static ModelIconPresenter AdminStaticIconPresenter() {
AdminStaticIconPresenter p = new AdminStaticIconPresenter();
return p;
}
@Bean
@Named("FlowItemActionPresenter")
public FlowItemActionPresenter FlowItemActionPresenter() {
return new FlowItemActionPresenter();
}
@Bean
@Named("yesNoItemPresenter")
public YesNoListFormCellPresenter yesNoItemPresenter() {
YesNoListFormCellPresenter p = new YesNoListFormCellPresenter();
p.setSetPreferenceValueOperation((IOperation) Services.getBean("SetPreferencesValueOperation"));
return p;
}
@Bean
@Named("SmallSearchResultItemPresenter")
public SmallItemPresenterFactory smallSearchResultItemPresenter() {
SmallItemPresenterFactory pr = new SmallItemPresenterFactory();
pr.setIconPresenter((ModelIconPresenter) Services.getBean("ModelSoftTypeIconPresenter"));
pr.setIdentityPresenter(
(ModelIdentityPresenter) Services.getBean("ModelNameIdentityPresenter"));
pr.setSecondaryLabelPresenter(
(ModelSecondaryLabelPresenter) Services.getBean("ModelDescriptionPresenter"));
return pr;
}
@Bean
@Named("MediumItemPresenterFactory")
public MediumItemPresenterFactory mediumItemPresenterFactory() {
MediumItemPresenterFactory pr = new MediumItemPresenterFactory();
pr.setIconPresenter((ModelIconPresenter) Services.getBean("ModelSoftTypeIconPresenter"));
pr.setIdentityPresenter(
(ModelIdentityPresenter) Services.getBean("ModelNameIdentityPresenter"));
pr.setSecondaryLabelPresenter(
(ModelSecondaryLabelPresenter) Services.getBean("ModelDescriptionPresenter"));
return pr;
}
@Bean
@Named("LargerSearchResultItemPresenter")
public LargeItemPresenterFactory largerSearchResultItemPresenter() {
LargeItemPresenterFactory pr = new LargeItemPresenterFactory();
pr.setIconPresenter((ModelIconPresenter) Services.getBean("ModelSoftTypeIconPresenter"));
pr.setIdentityPresenter(
(ModelIdentityPresenter) Services.getBean("ModelNameIdentityPresenter"));
pr.setSecondaryLabelPresenter(
(ModelSecondaryLabelPresenter) Services.getBean("ModelDescriptionPresenter"));
return pr;
}
@Bean
@Named("LargerSearchResultItemPresenter2")
public LargeItemPresenterFactory LargerSearchResultItemPresenter2() {
LargeItemPresenterFactory pr = new LargeItemPresenterFactory();
pr.setIconPresenter((ModelIconPresenter) Services.getBean("ModelSoftTypeIconPresenter"));
pr.setIdentityPresenter(
(ModelIdentityPresenter) Services.getBean("ModelNameIdentityPresenter"));
pr.setSecondaryLabelPresenter(
(ModelSecondaryLabelPresenter) Services.getBean("ModelMasterDescriptionPresenter"));
return pr;
}
@Bean
@Named("SmallFixedSearchResultItemPresenter")
public SmallItemPresenterFactory SmallItemPresenterFactory() {
SmallItemPresenterFactory pr = new SmallItemPresenterFactory();
pr.setIdentityPresenter(
(ModelIdentityPresenter) Services.getBean("ModelNameIdentityPresenter"));
pr.setSecondaryLabelPresenter(
(ModelSecondaryLabelPresenter) Services.getBean("ModelDescriptionPresenter"));
return pr;
}
}
|
apache/sis | core/sis-feature/src/main/java/org/apache/sis/image/AnnotatedImage.java | <reponame>apache/sis
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sis.image;
import java.util.Locale;
import java.util.Arrays;
import java.util.Objects;
import java.util.WeakHashMap;
import java.util.logging.Level;
import java.util.logging.LogRecord;
import java.util.stream.Collector;
import java.awt.Image;
import java.awt.Shape;
import java.awt.Rectangle;
import java.awt.image.RenderedImage;
import java.awt.image.Raster;
import java.awt.image.ImagingOpException;
import org.apache.sis.util.ArraysExt;
import org.apache.sis.util.resources.Errors;
import org.apache.sis.util.collection.Cache;
import org.apache.sis.internal.coverage.j2d.TileOpExecutor;
import org.apache.sis.internal.coverage.j2d.ImageUtilities;
import org.apache.sis.internal.util.Strings;
/**
* An image which wraps an existing image unchanged, except for properties which are computed
* on the fly when first requested. All {@link RenderedImage} methods delegate to the wrapped
* image except {@link #getSources()} and the methods for getting the property names or values.
*
* <p>The name of the computed property is given by {@link #getComputedPropertyName()}.
* If an exception is thrown during calculation and {@link #failOnException} is {@code false},
* then {@code AnnotatedImage} automatically creates another property with the same name and
* {@value #WARNINGS_SUFFIX} suffix. That property will contain the exception encapsulated
* in a {@link LogRecord} in order to retain additional information such as the instant when
* the first error occurred.</p>
*
* <p>The computation results are cached by this class. The cache strategy assumes that the
* property value depend only on sample values, not on properties of the source image.</p>
*
* <div class="note"><b>Design note:</b>
* most non-abstract methods are final because {@link PixelIterator} (among others) relies
* on the fact that it can unwrap this image and still get the same pixel values.</div>
*
* @author <NAME> (Geomatys)
* @version 1.2
* @since 1.1
* @module
*/
abstract class AnnotatedImage extends ImageAdapter {
/**
* The suffix to add to property name for errors that occurred during computation.
* A property with suffix is automatically created if an exception is thrown during
* computation and {@link #failOnException} is {@code false}.
*/
public static final String WARNINGS_SUFFIX = ".warnings";
/**
* An arbitrary value that we use for storing {@code null} values in the {@linkplain #cache}.
*/
private static final Object NULL = Void.TYPE;
/**
* Cache of properties already computed for images. That map shall contain computation results only,
* never the {@link AnnotatedImage} instances that computed those results, as doing so would create
* memory leak (because of the {@link #source} reference preventing the key to be garbage-collected).
* All accesses to this cache shall be synchronized on the {@code CACHE} instance.
*
* <p>In current implementation we cache only the values that have been computed without warnings.
* We do that because otherwise, an {@code AnnotatedImage} with {@link #failOnException} flag set
* could wrongly return a partially computed value if that value has been cached by another image
* instance with the {@link #failOnException} flag unset. As a consequence of this policy, if the
* computation failed for some tiles, that computation will be redone again for the same property
* every time it is requested, until it eventually fully succeeds and the result become cached.</p>
*/
private static final WeakHashMap<RenderedImage, Cache<Object,Object>> CACHE = new WeakHashMap<>();
/**
* Cache of property values computed for the {@linkplain #source} image. This is an entry from the
* global {@link #CACHE}. This cache is shared by all {@link AnnotatedImage} instances wrapping the
* same {@linkplain #source} image in order to avoid computing the same property many times if an
* {@code AnnotatedImage} wrapper is recreated many times for the same operation on the same image.
*
* <p>Note that {@code null} is a valid result. Since {@link Cache} can not store null values,
* those results are replaced by {@link #NULL}.</p>
*
* <p>Keys are {@link String} instances containing directly the property name when {@link #areaOfInterest}
* and {@link #getExtraParameter()} are {@code null}, or {@link CacheKey} instances otherwise.</p>
*/
private final Cache<Object,Object> cache;
/**
* Keys in the {@link AnnotatedImage#cache} when {@link AnnotatedImage#areaOfInterest} is non-null.
*/
private static final class CacheKey {
/** The property name (never null). */
private final String property;
/** The area of interest, or null if none. */
private final Shape areaOfInterest;
/** Parameter specific to subclass, or null if none. */
private final Object[] extraParameter;
/** Creates a new key for the given property and AOI. */
CacheKey(final String property, final Shape areaOfInterest, final Object[] extraParameter) {
this.property = property;
this.areaOfInterest = areaOfInterest;
this.extraParameter = extraParameter;
}
/** Returns a hash code value for this key. */
@Override public int hashCode() {
return property.hashCode()
+ 19 * Objects.hashCode(areaOfInterest)
+ 37 * Arrays.hashCode(extraParameter);
}
/** Compares this key with the given object for equality. */
@Override public boolean equals(final Object obj) {
if (obj instanceof CacheKey) {
final CacheKey other = (CacheKey) obj;
return property.equals(other.property)
&& Objects.equals(areaOfInterest, other.areaOfInterest)
&& Arrays.equals(extraParameter, other.extraParameter);
}
return false;
}
/** Returns a string representation of this key for debugging purpose. */
@Override public String toString() {
return Strings.toString(getClass(), "property", property, "areaOfInterest", areaOfInterest);
}
}
/**
* Pixel coordinates of the region for which to compute the values, or {@code null} for the whole image.
* If non-null, the {@link Shape#contains(double, double)} method may be invoked for testing if a pixel
* shall be included in the computation or not.
*
* <p>This shape should not be modified, either by this class or by the caller who provided the shape.
* The {@code Shape} implementation shall be thread-safe, assuming its state stay unmodified, unless
* the {@link #parallel} argument specified to the constructor was {@code false}.</p>
*
* <p>If {@code areaOfInterest} is {@code null}, then {@link #boundsOfInterest} is always {@code null}.
* However the converse is not necessarily true.</p>
*/
protected final Shape areaOfInterest;
/**
* Bounds of {@link #areaOfInterest} intersected with image bounds, or {@code null} for the whole image.
* If the area of interest fully contains those bounds, then {@link #areaOfInterest} is set to the same
* reference than {@code boundsOfInterest}. Subclasses can use {@code areaOfInterest == boundsOfInterest}
* for quickly testing if the area of interest is rectangular.
*
* <p>If {@link #areaOfInterest} is {@code null}, then {@code boundsOfInterest} is always {@code null}.
* However the converse is not necessarily true.</p>
*/
protected final Rectangle boundsOfInterest;
/**
* The errors that occurred while computing the result, or {@code null} if none or not yet determined.
* This field is never set if {@link #failOnException} is {@code true}.
*/
private volatile ErrorHandler.Report errors;
/**
* Whether parallel execution is authorized for the {@linkplain #source} image.
* If {@code true}, then {@link RenderedImage#getTile(int, int)} implementation should be concurrent.
*/
private final boolean parallel;
/**
* Whether errors occurring during computation should be propagated instead of wrapped in a {@link LogRecord}.
*/
private final boolean failOnException;
/**
* Creates a new annotated image wrapping the given image.
* The annotations are the additional properties computed by the subclass.
*
* @param source the image to wrap for adding properties (annotations).
* @param areaOfInterest pixel coordinates of AOI, or {@code null} for the whole image.
* @param parallel whether parallel execution is authorized.
* @param failOnException whether errors occurring during computation should be propagated.
*/
protected AnnotatedImage(RenderedImage source, Shape areaOfInterest,
final boolean parallel, final boolean failOnException)
{
super(source);
Rectangle bounds = null;
if (areaOfInterest != null) {
bounds = areaOfInterest.getBounds();
ImageUtilities.clipBounds(source, bounds);
if (bounds.isEmpty()) {
bounds.x = getMinX();
bounds.y = getMinY();
bounds.width = 0;
bounds.height = 0;
}
if (areaOfInterest.contains(bounds)) {
areaOfInterest = bounds;
}
/*
* If the rectangle contains the full image, replace them by a null value.
* It allows optimizations (avoid the need to check for point inclusion)
* and allows the cache to detect that a value already exist.
*/
if (bounds.x == getMinX() && bounds.width == getWidth() &&
bounds.y == getMinY() && bounds.height == getHeight())
{
if (bounds == areaOfInterest) {
areaOfInterest = null;
}
bounds = null;
}
}
this.boundsOfInterest = bounds;
this.areaOfInterest = areaOfInterest;
this.parallel = parallel;
this.failOnException = failOnException;
/*
* The `this.source` field should be as specified, even if it is another `AnnotatedImage`,
* for allowing computation of properties managed by those other instances. However we try
* to apply the cache on a deeper source if possible, for increasing the chances that the
* cache is shared by all images using the same data. This is okay if calculation depends
* only on sample value, not on other data.
*/
while (source instanceof ImageAdapter) {
if (source instanceof AnnotatedImage) {
cache = ((AnnotatedImage) source).cache; // Cache for the source of the source.
return;
}
source = ((ImageAdapter) source).source;
}
synchronized (CACHE) {
cache = CACHE.computeIfAbsent(source, (k) -> new Cache<>(8, 200, false));
}
}
/**
* Returns an optional parameter specific to subclass. This is used for caching purpose
* and for {@link #equals(Object)} and {@link #hashCode()} method implementations only,
* i.e. for distinguishing between two {@code AnnotatedImage} instances that are identical
* except for subclass-defined parameters.
*
* <div class="note"><b>API note:</b>
* the return value is an array because there is typically one parameter value per band.
* This method will not modify the returned array.</div>
*
* @return subclass specific extra parameter, or {@code null} if none.
*/
Object[] getExtraParameter() {
return null;
}
/**
* If the source image is the same operation for the same area of interest, returns that source.
* Otherwise returns {@code this} or a previous instance doing the same operation than {@code this}.
*
* @see #equals(Object)
*/
final RenderedImage unique() {
if (source.getClass() == getClass() && equalParameters((AnnotatedImage) source)) {
return source;
} else {
return ImageProcessor.unique(this);
}
}
/**
* Returns the key to use for entries in the {@link #cache} map.
*
* @param property value of {@link #getPropertyNames()}.
*/
private Object getCacheKey(final String property) {
final Object[] extraParameter = getExtraParameter();
return (areaOfInterest != null || extraParameter != null)
? new CacheKey(property, areaOfInterest, extraParameter) : property;
}
/**
* Returns the name of the property which is computed by this image.
*
* @return name of property computed by this image. Shall not be null.
*/
protected abstract String getComputedPropertyName();
/**
* Returns an array of names recognized by {@link #getProperty(String)}.
* The default implementation returns the {@linkplain #source} properties names
* followed by {@link #getComputedPropertyName()}. If that property has already
* been computed and an error occurred, then the names returned by this method
* will include the property name with {@value #WARNINGS_SUFFIX} suffix.
*
* @return all recognized property names.
*/
@Override
public String[] getPropertyNames() {
final boolean hasErrors = (errors != null);
final String[] names = new String[hasErrors ? 2 : 1];
names[0] = getComputedPropertyName();
if (hasErrors) {
names[1] = names[0] + WARNINGS_SUFFIX;
}
return ArraysExt.concatenate(source.getPropertyNames(), names);
}
/**
* Returns whether the given name is the name of the error property.
* The implementation of this method avoids the creation of concatenated string.
*
* @param cn name of the computed property.
* @param name the property name to test (may be {@code null}).
* @return whether {@code name} is {@code cn} + {@value #WARNINGS_SUFFIX}.
*/
private static boolean isErrorProperty(final String cn, final String name) {
return name != null && name.length() == cn.length() + WARNINGS_SUFFIX.length()
&& name.startsWith(cn) && name.endsWith(WARNINGS_SUFFIX);
}
/**
* Gets a property from this image or from its source. If the given name is for the property
* to be computed by this class and if that property has not been computed before, then this
* method invokes {@link #computeProperty()} and caches its result.
*
* @param name name of the property to get.
* @return the property for the given name ({@code null} is a valid result),
* or {@link Image#UndefinedProperty} if the given name is not a recognized property name.
*/
@Override
public final Object getProperty(final String name) {
Object value;
final String property = getComputedPropertyName();
if (property.equals(name)) {
/*
* Get the previously computed value. Note that the value may have been computed by another
* `AnnotatedImage` instance of the same class wrapping the same image, which is why we do
* not store the result in this class.
*/
final Object key = getCacheKey(property);
value = cache.peek(key);
if (value == null) {
boolean success = false;
final Cache.Handler<Object> handler = cache.lock(key);
try {
value = handler.peek();
if (value == null) try {
value = computeProperty();
if (value == null) value = NULL;
success = (errors == null);
} catch (Exception e) {
if (failOnException) {
throw (ImagingOpException) new ImagingOpException(
Errors.format(Errors.Keys.CanNotCompute_1, property)).initCause(e);
}
/*
* Stores the exception in a log record. We use a log record in order to initialize
* the timestamp and thread ID to the values they had at the time the error occurred.
* We do not synchronize because all worker threads should have finished now.
*/
ErrorHandler.Report report = errors;
if (report == null) {
errors = report = new ErrorHandler.Report();
}
report.add(null, e, () -> Errors.getResources((Locale) null)
.getLogRecord(Level.WARNING, Errors.Keys.CanNotCompute_1, property));
}
} finally {
handler.putAndUnlock(success ? value : null); // Cache only if no error occurred.
}
if (value == NULL) value = null;
else value = cloneProperty(property, value);
}
} else if (isErrorProperty(property, name)) {
value = errors;
} else {
value = source.getProperty(name);
}
return value;
}
/**
* If an error occurred, logs the message with the specified class and method as the source.
* The {@code classe} and {@code method} arguments overwrite the {@link LogRecord#getSourceClassName()}
* and {@link LogRecord#getSourceMethodName()} values. The log record is cleared by this method call
* and will no longer be reported, unless the property is recomputed.
*
* <h4>Context of use</h4>
* This method should be invoked only on images that are going to be disposed after the caller extracted
* the computed property value. This method should not be invoked on image accessible by the user,
* because clearing the error may be surprising.
*
* @param classe the class to report as the source of the logging message.
* @param method the method to report as the source of the logging message.
* @param handler where to send the log message.
*/
final void logAndClearError(final Class<?> classe, final String method, final ErrorHandler handler) {
final ErrorHandler.Report report = errors;
if (report != null) {
synchronized (report) {
final LogRecord record = report.getDescription();
record.setSourceClassName(classe.getCanonicalName());
record.setSourceMethodName(method);
errors = null;
}
handler.handle(report);
}
}
/**
* Invoked when the property needs to be computed. If the property can not be computed,
* then the result will be {@code null} and the exception thrown by this method will be
* wrapped in a property of the same name with the {@value #WARNINGS_SUFFIX} suffix.
*
* <p>The default implementation makes the following choice:</p>
* <ul class="verbose">
* <li>If {@link #parallel} is {@code true}, {@link #collector()} returns a non-null value
* and the area of interest covers at least two tiles, then this method distributes
* calculation on many threads using the functions provided by the collector.
* See {@link #collector()} Javadoc for more information.</li>
* <li>Otherwise this method delegates to {@link #computeSequentially()}.</li>
* </ul>
*
* @return the computed property value. Note that {@code null} is a valid result.
* @throws Exception if an error occurred while computing the property.
*/
protected Object computeProperty() throws Exception {
if (parallel) {
final TileOpExecutor executor = new TileOpExecutor(source, boundsOfInterest);
if (executor.isMultiTiled()) {
final Collector<? super Raster,?,?> collector = collector();
if (collector != null) {
if (!failOnException) {
executor.setErrorHandler((e) -> errors = e, AnnotatedImage.class, "getProperty");
}
executor.setAreaOfInterest(source, areaOfInterest);
return executor.executeOnReadable(source, collector);
}
}
}
return computeSequentially();
}
/**
* Invoked when the property needs to be computed sequentially (all computations in current thread).
* If the property can not be computed, then the result will be {@code null} and the exception thrown
* by this method will be wrapped in a property of the same name with the {@value #WARNINGS_SUFFIX} suffix.
*
* <p>This method is invoked when this class does not support parallel execution ({@link #collector()}
* returned {@code null}), or when it is not worth to parallelize (image has only one tile), or when
* the {@linkplain #source} image may be non-thread safe ({@link #parallel} is {@code false}).</p>
*
* @return the computed property value. Note that {@code null} is a valid result.
* @throws Exception if an error occurred while computing the property.
*/
protected abstract Object computeSequentially() throws Exception;
/**
* Returns the function to execute for computing the property value, together with other required functions
* (supplier of accumulator, combiner, finisher). Those functions allow multi-threaded property calculation.
* This collector is used in a way similar to {@link java.util.stream.Stream#collect(Collector)}. A typical
* approach is two define 3 private methods in the subclass as below (where <var>P</var> is the type of the
* property to compute):
*
* {@preformat java
* private P createAccumulator() {
* // Create an object holding the information to be computed by a single thread.
* // This is invoked for each worker thread before the worker starts its execution.
* }
*
* private static P combine(P previous, P computed) {
* // Invoked after a thread finished to process all its tiles and
* // wants to combine its result with the result of another thread.
* }
*
* private static void compute(P accumulator, Raster tile) {
* // Perform the actual computation using one tile and update the accumulator with the result.
* // The accumulator may already contain data, which need to be augmented (not overwritten).
* }
*
* @Override
* protected Collector<Raster,P,P> collector() {
* return Collector.of(this::createAccumulator, MyClass::compute, MyClass::combine);
* }
* }
*
* @return functions for multi-threaded computation of property value, or {@code null} if unsupported.
*/
protected Collector<? super Raster, ?, ?> collector() {
return null;
}
/**
* Invoked when a property of the given name has been requested and that property is cached.
* If the property is mutable, subclasses may want to clone it before to return it to users.
* The default implementation returns {@code value} unchanged.
*
* @param name the property name.
* @param value the property value (never {@code null}).
* @return the property value to give to user.
*/
protected Object cloneProperty(final String name, final Object value) {
return value;
}
/**
* Appends the name of the computed property in the {@link #toString()} representation,
* after the class name and before the string representation of the wrapped image.
*/
@Override
final Class<AnnotatedImage> appendStringContent(final StringBuilder buffer) {
final String property = getComputedPropertyName();
if (cache.containsKey(getCacheKey(property))) {
buffer.append("Cached ");
}
buffer.append('"').append(property).append('"');
return AnnotatedImage.class;
}
/**
* Returns a hash code value for this image. This method should be quick;
* it should not compute the hash code from sample values.
*
* @return a hash code value based on a description of the operation performed by this image.
*/
@Override
public int hashCode() {
return super.hashCode() + Objects.hashCode(areaOfInterest) + Boolean.hashCode(failOnException);
}
/**
* Compares the given object with this image for equality. This method should be quick and compare
* how images compute their values from their sources; it should not compare the actual pixel values.
*
* @param object the object to compare with this image.
* @return {@code true} if the given object is an image performing the same calculation than this image.
*/
@Override
public boolean equals(final Object object) {
return super.equals(object) && equalParameters((AnnotatedImage) object);
}
/**
* Returns {@code true} if the area of interest and some other fields are equal.
* The {@link #boundsOfInterest} is omitted because it is derived from {@link #areaOfInterest}.
* The {@link #errors} is omitted because it is part of computation results.
*/
private boolean equalParameters(final AnnotatedImage other) {
return parallel == other.parallel && failOnException == other.failOnException
&& Objects.equals(areaOfInterest, other.areaOfInterest)
&& Arrays.equals(getExtraParameter(), other.getExtraParameter());
}
}
|
MDGSF/interviews | javascript/src/algorithms/sorting/__test__/Sort.test.js | <filename>javascript/src/algorithms/sorting/__test__/Sort.test.js
const Sort = require("../Sort");
describe("Sort", () => {
it("should throw an error when trying to call Sort.sort() method directly", () => {
function doForbiddenSort() {
const sorter = new Sort();
sorter.sort();
}
expect(doForbiddenSort).toThrow();
});
});
|
s360/StudentSuccessLink-Website | app/scripts/controllers/student/student.js | <gh_stars>0
(function($) {
'use strict';
angular.module('sslv2App')
.controller('StudentCtrl', StudentCtrl);
StudentCtrl.$inject = ['$timeout','StudentService','$filter','$confirm', '$cookies'];
function StudentCtrl($timeout,StudentService,$filter,$confirm, $cookies) {
var vm = this;
vm.show_user = false;
vm.header_name_selected = true;
vm.header_district_id_selected = false;
vm.header_grade_level_selected = false;
vm.header_school_district = false;
vm.header_current_school_selected = false;
vm.header_attendance_selected = false;
vm.header_behavior_selected = false;
vm.sort_name = true;
vm.sort_school_district_id = false;
vm.sort_grade_level = false;
vm.sort_school_district = false;
vm.sort_current_school = false;
vm.sort_attendance = false;
vm.sort_behavior = false;
var data ="";
var success = "";
var student ={};
var list_of_students =[];
var list_of_district_options = [];
var list_of_school_options = [];
var list_of_trend = [];
vm.students = "";
var temp_template_attendance ="";
var temp_template_behavior ="";
var attendance_template = "<div class='list-modal'><dl><dt></dt><dd>{days_missed_in_month} {days_missed_in_year}</dd></dl></div>";
var trend_template = "<div class='list-modal'>{trend}</div>";
var behavior_template = "<div class='list-modal'><dl><dt></dt><dd>Student has {behavior_month} {incident_month} in the latest term of which we have data.</dd><dt></dt><dd>Student has {behavior_academic} {incident_academic} in the current academic year.</dd></dl></div>";
vm.organization_name = $cookies.get('organization_name');
vm.deleteStudent = deleteStudent;
vm.school_selected = school_selected;
vm.district_selected = district_selected;
vm.sort = sort;
function sort(status,col) {
switch (col){
case 'name':
vm.header_name_selected = true;
vm.header_district_id_selected = false;
vm.header_grade_level_selected = false;
vm.header_school_district = false;
vm.header_current_school_selected = false;
vm.header_attendance_selected = false;
vm.header_behavior_selected = false;
if(status == false){
vm.students = _.sortBy(vm.students, [function(o) { return o.first_name; }]);
vm.sort_name = !status;
}else{
vm.students = vm.students.reverse();
vm.sort_name = !status;
}
break;
case 'district_id':
vm.header_name_selected = false;
vm.header_district_id_selected = true;
vm.header_grade_level_selected = false;
vm.header_school_district = false;
vm.header_current_school_selected = false;
vm.header_attendance_selected = false;
vm.header_behavior_selected = false;
if(status == false){
vm.students = _.sortBy(vm.students, [function(o) { return parseInt(o.district_student_id, 10); }]);
vm.sort_school_district_id = !status;
}else{
vm.students = vm.students.reverse();
vm.sort_school_district_id = !status;
}
break;
case 'school_district':
vm.header_name_selected = false;
vm.header_district_id_selected = false;
vm.header_grade_level_selected = false;
vm.header_school_district = true;
vm.header_current_school_selected = false;
vm.header_attendance_selected = false;
vm.header_behavior_selected = false;
if(status == false){
vm.students = _.sortBy(vm.students, [function(o) { return o.school_district; }]);
vm.sort_school_district = !status;
}else{
vm.students = vm.students.reverse();
vm.sort_school_district = !status;
}
break;
case 'grade_level':
vm.header_name_selected = false;
vm.header_district_id_selected = false;
vm.header_grade_level_selected = true;
vm.header_school_district = false;
vm.header_current_school_selected = false;
vm.header_attendance_selected = false;
vm.header_behavior_selected = false;
if(status == false){
vm.students = _.sortBy(vm.students, [function(o) { return o.xsre.grade_level; }]);
vm.sort_grade_level = !status;
}else{
vm.students = vm.students.reverse();
vm.sort_grade_level = !status;
}
break;
case 'current_school':
vm.header_name_selected = false;
vm.header_district_id_selected = false;
vm.header_grade_level_selected = false;
vm.header_school_district = false;
vm.header_current_school_selected = true;
vm.header_attendance_selected = false;
vm.header_behavior_selected = false;
if(status == false){
vm.students = _.sortBy(vm.students, [function(o) { return o.xsre.school_name; }]);
vm.sort_current_school = !status;
}else{
vm.students = vm.students.reverse();
vm.sort_current_school = !status;
}
break;
case 'attendance':
vm.header_name_selected = false;
vm.header_district_id_selected = false;
vm.header_grade_level_selected = false;
vm.header_school_district = false;
vm.header_current_school_selected = false;
vm.header_attendance_selected = true;
vm.header_behavior_selected = false;
if(status == false){
vm.students = _.sortBy(vm.students, [function(o) { return o.xsre.attendance.academic.count; }]);
vm.sort_attendance = !status;
}else{
vm.students = vm.students.reverse();
vm.sort_attendance = !status;
}
break;
case 'behavior':
vm.header_name_selected = false;
vm.header_district_id_selected = false;
vm.header_grade_level_selected = false;
vm.header_school_district = false;
vm.header_current_school_selected = false;
vm.header_attendance_selected = false;
vm.header_behavior_selected = true;
if(status == false){
vm.students = _.sortBy(vm.students, [function(o) { return o.xsre.behavior.academic.behavior_academic_count; }]);
vm.sort_behavior = !status;
}else{
vm.students = vm.students.reverse();
vm.sort_behavior = !status;
}
break;
}
}
init();
function init(){
vm.selected_schools =[];
vm.selected_districts =[];
vm.schools_options = [];
vm.districts_options = [];
clearVariables();
StudentService.getAllStudent()
.then(function(response){
$timeout(getAll(response),500);
},function(error){
});
StudentService.getStudentSummary()
.then(function(response){
$timeout(getSummary(response),500);
},function(error){
});
}
function school_selected(){
return function(val){
if(vm.selected_schools.length > 0){
for(var i in vm.selected_schools){
if(val.xsre.school_name === vm.selected_schools[i].name){
return true;
}
}
}else{
return true;
}
}
}
function district_selected(){
return function(val){
if(vm.selected_districts.length > 0){
for(var i in vm.selected_districts){
if(val.school_district === vm.selected_districts[i].name){
return true;
}
}
}else{
return true;
}
}
}
function deleteStudent(id,index){
$confirm({
title: 'Delete Student Record',
text:'Are you sure you want to delete this record?',
ok: 'Delete',
})
.then(function(){
StudentService.deleteStudent(id)
.then(function(response){
if(response.data.success === true){
vm.students.splice(index,1);
}
},function(error){
})
});
}
function clearVariables(){
student = {
id:0,
address:'',
addresses:[],
college_bound:'',
created:'',
creator:'',
district_student_id:'',
email:'',
emergency1_email:'',
emergency1_name:'',
emergency1_phone:'',
emergency1_relationship:'',
emergency2_email:'',
emergency2_name:'',
emergency2_phone:'',
emergency2_relationship:'',
first_name:'',
last_name:'',
last_updated:'',
organization:'',
phone:'',
programs:[],
school_district:'',
xsre:{
attendance:{
month:{
attendance_month_count:'',
flag:'',
type:''
},
academic:{
attendance_academic_count:'',
flag:'',
type:''
},
template:''
},
attendance_risk:{
day_absent:'',
risk_level:'',
trend:''
},
behavior:{
month:{
behavior_month_count:'',
flag:'',
type:''
},
academic:{
behavior_month_count:'',
flag:'',
type:''
}
},
grade_level:'',
latest_date:'',
latest_date_time:'',
on_track_to_graduate:'',
school_name:'',
school_year:''
}
}
}
function getAll(response){
success = _.get(response,"data.success",false);
data = _.get(response,"data.data","");
if(success === true && data !== ""){
var student_profiles = [];
var single_profile = {};
_.forEach(data,function(data){
clearVariables();
student.id = _.get(data,"_id","");
student.address = _.get(data,"address","");
student.addresses = _.get(data,"addresses","");
student.college_bound = _.get(data,"college_bound","");
student.created = $filter('date')(_.get(data,"created",""), "yyyy/MM/dd");
student.creator = _.get(data,"creator","");
student.district_student_id = _.get(data,"district_student_id","");
student.email = _.get(data,"email","");
student.emergency1_email = _.get(data,"emergency1_email","");
student.emergency1_name = _.get(data,"emergency1_name","");
student.emergency1_phone = _.get(data,"emergency1_phone","");
student.emergency1_relationship = _.get(data,"emergency1_relationship","");
student.emergency2_email = _.get(data,"emergency2_email","");
student.emergency2_name = _.get(data,"emergency2_name","");
student.emergency2_phone = _.get(data,"emergency2_phone","");
student.emergency2_relationship = _.get(data,"emergency2_relationship","");
student.first_name = _.get(data,"first_name", _.get(data, 'xsre.firstName', ""));
student.last_name = _.get(data,"last_name", _.get(data, 'xsre.lastName', ""));
student.last_updated = $filter('date')(_.get(data,"last_updated",""), "yyyy/MM/dd");
student.organization = _.get(data,"organization","");
student.phone = _.get(data,"phone","");
student.programs = _.get(data,"programs",[]);
student.school_district = _.get(data,"school_district","");
student.xsre.grade_level = _.get(data,"xsre.gradeLevel","");
student.xsre.latest_date = $filter('date')(_.get(data,"latestDate",""), "yyyy/MM/dd");
student.xsre.latest_date_time = $filter('date')(_.get(data,"latestDateTime",""), "yyyy/MM/dd");
student.xsre.on_track_to_graduate = _.get(data,"xsre.onTrackToGraduate","");
student.xsre.school_name = _.get(data,"xsre.schoolName","");
student.xsre.school_year = _.get(data,"xsre.schoolYear","");
single_profile = {
id:student.id,
value:"#!/student/"+student.id+"/detail",
on_track_graduate : _.get(data,"xsre.onTrackToGraduate","")
}
student_profiles.push(single_profile);
_.forEach(data.xsre.attendanceRiskFlag,function(value){
student.xsre.attendance_risk.day_absent = value.daysAbsent;
student.xsre.attendance_risk.risk_level = value.riskLevel;
student.xsre.attendance_risk.trend = value.trend;
student.xsre.attendance_risk.template = _.replace(trend_template,'{trend}',value.trend);
});
_.forEach(_.get(data,"xsre.behaviorCount",[]),function(value,key){
if(key === 0){
temp_template_behavior = behavior_template;
}
if(value.type === "lastMonth"){
var incident="";
if(value.count === 0){value.count = '';}
student.xsre.behavior.month.count = value.count;
student.xsre.behavior.month.behavior_month_count = value.count;
student.xsre.behavior.month.flag = value.flag.toLowerCase();
student.xsre.behavior.month.type = value.type;
if(value.count === 1){
incident = "incident";
}else{
incident = "incidents";
}
temp_template_behavior = _.replace(temp_template_behavior,'{behavior_month}',value.count);
temp_template_behavior = _.replace(temp_template_behavior,'{incident_month}',incident);
}else if(value.type === "currentAcademicYear"){
if(value.count === 1){
incident = "incident";
}else{
incident = "incidents";
}
if(value.count === 0){value.count = '';}
student.xsre.behavior.academic.count = value.count;
student.xsre.behavior.academic.behavior_academic_count = value.count;
student.xsre.behavior.academic.flag = value.flag.toLowerCase();
student.xsre.behavior.academic.type = value.type;
temp_template_behavior = _.replace(temp_template_behavior,'{behavior_academic}',value.count);
temp_template_behavior = _.replace(temp_template_behavior,'{incident_academic}',incident);
}
if(key === 1){
student.xsre.behavior.template = temp_template_behavior;
}
});
_.forEach(_.get(data,"xsre.attendanceCount",[]),function(value,key){
if(key === 0){
temp_template_attendance = attendance_template;
}
if(value.type === "lastMonth"){
var day="";
student.xsre.attendance.month.count = value.count;
student.xsre.attendance.month.attendance_month_count = value.count;
student.xsre.attendance.month.flag = value.flag.toLowerCase();
student.xsre.attendance.month.type = value.type;
if(value.count === 1){
day = " day";
}else{
day = " days";
}
if(value.count === 0){
var days_missed_in_month = '';
}else{
var days_missed_in_month = 'Missed '.concat(value.count,day, ' this month.');
}
temp_template_attendance = _.replace(temp_template_attendance,'{days_missed_in_month}',days_missed_in_month);
}else if(value.type === "currentAcademicYear"){
var day="";
student.xsre.attendance.academic.count = parseInt(value.count);
student.xsre.attendance.academic.attendance_academic_count = parseInt(value.count);
student.xsre.attendance.academic.flag = value.flag.toLowerCase();
student.xsre.attendance.academic.type = value.type;
if(value.count === 1){
day = " day";
}else{
day = " days";
}
//temp_template_attendance = _.replace(temp_template_attendance,'{current_academic}',value.count);
if(value.count === 0){
var days_missed_in_year = '';
}else{
var days_missed_in_year = ' '.concat(parseInt(value.count), day, ' missed this year.');
}
temp_template_attendance = _.replace(temp_template_attendance,'{days_missed_in_year}',days_missed_in_year);
}else{
console.log(true);
student.xsre.attendance.academic.attendance_academic_count = "";
}
if(key === 1){
student.xsre.attendance.template = temp_template_attendance;
}
});
list_of_district_options.push(student.school_district);
list_of_school_options.push(student.xsre.school_name);
list_of_students.push(student);
});
vm.students = list_of_students;
//console.log(vm.students);
list_of_district_options = _.uniqBy(list_of_district_options,function(value){
return value;
});
list_of_school_options = _.uniqBy(list_of_school_options,function(value){
return value;
});
_.forEach(list_of_district_options,function(value){
vm.districts_options.push({
id:value,
name:value
})
});
_.forEach(list_of_school_options,function(value){
if(value.length !== 0){
vm.schools_options.push({
id:value,
name:value
})
}
});
if(localStorage.getItem("student_profiles")!== null){
localStorage.removeItem("student_profiles");
}
//sessionStorage.setItem("student_profiles",JSON.stringify(student_profiles));
localStorage.setItem("student_profiles",JSON.stringify(student_profiles));
vm.show_user = true;
}else{
vm.show_user = false;
}
}
function getSummary(response){
}
}
})(jQuery);
|
mackron/openchernobyl | source/oc/ocEngine/ocPath.cpp | // Copyright (C) 2018 <NAME>. See included LICENSE file.
ocBool32 ocPathSegmentsEqual(const char* s0Path, const ocPathSegment s0, const char* s1Path, const ocPathSegment s1)
{
if (s0Path == NULL || s1Path == NULL) {
return OC_FALSE;
}
if (s0.length != s1.length) {
return OC_FALSE;
}
return strncmp(s0Path + s0.offset, s1Path + s1.offset, s0.length) == 0;
}
ocBool32 ocPathFirst(const char* path, ocPathIterator* i)
{
if (i == 0) return OC_FALSE;
i->path = path;
i->segment.offset = 0;
i->segment.length = 0;
if (path == 0 || path[0] == '\0') {
return OC_FALSE;
}
while (i->path[i->segment.length] != '\0' && (i->path[i->segment.length] != '/' && i->path[i->segment.length] != '\\')) {
i->segment.length += 1;
}
return OC_TRUE;
}
ocBool32 ocPathLast(const char* path, ocPathIterator* i)
{
if (i == 0) return OC_FALSE;
i->path = path;
i->segment.offset = 0;
i->segment.length = 0;
if (path == 0 || path[0] == '\0') {
return OC_FALSE;
}
i->path = path;
i->segment.offset = strlen(path);
i->segment.length = 0;
return ocPathPrev(i);
}
ocBool32 ocPathNext(ocPathIterator* i)
{
if (i == NULL || i->path == NULL) {
return OC_FALSE;
}
i->segment.offset = i->segment.offset + i->segment.length;
i->segment.length = 0;
while (i->path[i->segment.offset] != '\0' && (i->path[i->segment.offset] == '/' || i->path[i->segment.offset] == '\\')) {
i->segment.offset += 1;
}
if (i->path[i->segment.offset] == '\0') {
return OC_FALSE;
}
while (i->path[i->segment.offset + i->segment.length] != '\0' && (i->path[i->segment.offset + i->segment.length] != '/' && i->path[i->segment.offset + i->segment.length] != '\\')) {
i->segment.length += 1;
}
return OC_TRUE;
}
ocBool32 ocPathPrev(ocPathIterator* i)
{
if (i == NULL || i->path == NULL || i->segment.offset == 0) {
return OC_FALSE;
}
i->segment.length = 0;
do
{
i->segment.offset -= 1;
} while (i->segment.offset > 0 && (i->path[i->segment.offset] == '/' || i->path[i->segment.offset] == '\\'));
if (i->segment.offset == 0) {
if (i->path[i->segment.offset] == '/' || i->path[i->segment.offset] == '\\') {
i->segment.length = 0;
return OC_TRUE;
}
return OC_FALSE;
}
size_t offsetEnd = i->segment.offset + 1;
while (i->segment.offset > 0 && (i->path[i->segment.offset] != '/' && i->path[i->segment.offset] != '\\')) {
i->segment.offset -= 1;
}
if (i->path[i->segment.offset] == '/' || i->path[i->segment.offset] == '\\') {
i->segment.offset += 1;
}
i->segment.length = offsetEnd - i->segment.offset;
return OC_TRUE;
}
ocBool32 ocPathAtEnd(ocPathIterator i)
{
return i.path == 0 || i.path[i.segment.offset] == '\0';
}
ocBool32 ocPathAtStart(ocPathIterator i)
{
return i.path != 0 && i.segment.offset == 0;
}
ocBool32 ocPathIteratorsEqual(const ocPathIterator i0, const ocPathIterator i1)
{
return ocPathSegmentsEqual(i0.path, i0.segment, i1.path, i1.segment);
}
ocBool32 ocPathIsRoot(const char* path)
{
return ocPathIsUnixStyleRoot(path) || ocPathIsWin32StyleRoot(path);
}
ocBool32 ocPathIsRootSegment(const char* path, const ocPathSegment segment)
{
return ocPathIsUnixStyleRootSegment(path, segment) || ocPathIsWin32StyleRootSegment(path, segment);
}
ocBool32 ocPathIsUnixStyleRoot(const char* path)
{
if (path == NULL) {
return OC_FALSE;
}
if (path[0] == '/') {
return OC_TRUE;
}
return OC_FALSE;
}
ocBool32 ocPathIsUnixStyleRootSegment(const char* path, const ocPathSegment segment)
{
if (path == NULL) {
return OC_FALSE;
}
if (segment.offset == 0 && segment.length == 0) {
return OC_TRUE; // "/" style root.
}
return OC_FALSE;
}
ocBool32 ocPathIsWin32StyleRoot(const char* path)
{
if (path == NULL) {
return OC_FALSE;
}
if (((path[0] >= 'a' && path[0] <= 'z') || (path[0] >= 'A' && path[0] <= 'Z')) && path[1] == ':' && path[2] == '\0') {
return OC_TRUE;
}
return OC_FALSE;
}
ocBool32 ocPathIsWin32StyleRootSegment(const char* path, const ocPathSegment segment)
{
if (path == NULL) {
return OC_FALSE;
}
if (segment.offset == 0 && segment.length == 2) {
if (((path[0] >= 'a' && path[0] <= 'z') || (path[0] >= 'A' && path[0] <= 'Z')) && path[1] == ':') {
return OC_TRUE;
}
}
return OC_FALSE;
}
void ocPathToForwardSlashes(char* path)
{
if (path == NULL) {
return;
}
while (path[0] != '\0') {
if (path[0] == '\\') {
path[0] = '/';
}
path += 1;
}
}
void ocPathToBackSlashes(char* path)
{
if (path == NULL) {
return;
}
while (path[0] != '\0') {
if (path[0] == '/') {
path[0] = '\\';
}
path += 1;
}
}
ocBool32 ocPathIsDescendant(const char* descendantAbsolutePath, const char* parentAbsolutePath)
{
ocPathIterator iChild;
if (!ocPathFirst(descendantAbsolutePath, &iChild)) {
return OC_FALSE; // The descendant is an empty string which makes it impossible for it to be a descendant.
}
ocPathIterator iParent;
if (ocPathFirst(parentAbsolutePath, &iParent)) {
do {
// If the segment is different, the paths are different and thus it is not a descendant.
if (!ocPathIteratorsEqual(iParent, iChild)) {
return OC_FALSE;
}
if (!ocPathNext(&iChild)) {
return OC_FALSE; // The descendant is shorter which means it's impossible for it to be a descendant.
}
} while (ocPathNext(&iParent));
}
return OC_TRUE;
}
ocBool32 ocPathIsChild(const char* childAbsolutePath, const char* parentAbsolutePath)
{
ocPathIterator iChild;
if (!ocPathFirst(childAbsolutePath, &iChild)) {
return OC_FALSE; // The descendant is an empty string which makes it impossible for it to be a descendant.
}
ocPathIterator iParent;
if (ocPathFirst(parentAbsolutePath, &iParent)) {
do {
// If the segment is different, the paths are different and thus it is not a descendant.
if (!ocPathIteratorsEqual(iParent, iChild)) {
return OC_FALSE;
}
if (!ocPathNext(&iChild)) {
return OC_FALSE; // The descendant is shorter which means it's impossible for it to be a descendant.
}
} while (ocPathNext(&iParent));
}
// At this point we have finished iteration of the parent, which should be shorter one. We now do one more iterations of
// the child to ensure it is indeed a direct child and not a deeper descendant.
return !ocPathNext(&iChild);
}
const char* ocPathFileName(const char* path)
{
if (path == NULL) {
return NULL;
}
const char* fileName = path;
// We just loop through the path until we find the last slash.
while (path[0] != '\0') {
if (path[0] == '/' || path[0] == '\\') {
fileName = path;
}
path += 1;
}
// At this point the file name is sitting on a slash, so just move forward.
while (fileName[0] != '\0' && (fileName[0] == '/' || fileName[0] == '\\')) {
fileName += 1;
}
return fileName;
}
const char* ocPathExtension(const char* path)
{
if (path == NULL) {
path = "";
}
const char* extension = ocPathFileName(path);
const char* lastOccurance = NULL;
// Just find the last '.' and return.
while (extension[0] != '\0') {
if (extension[0] == '.') {
extension += 1;
lastOccurance = extension;
}
extension += 1;
}
return (lastOccurance != NULL) ? lastOccurance : extension;
}
ocBool32 ocPathEqual(const char* path1, const char* path2)
{
if (path1 == NULL || path2 == NULL) {
return OC_FALSE;
}
if (path1 == path2 || (path1[0] == '\0' && path2[0] == '\0')) {
return OC_TRUE; // Two empty paths are treated as the same.
}
ocPathIterator iPath1;
ocPathIterator iPath2;
if (ocPathFirst(path1, &iPath1) && ocPathFirst(path2, &iPath2)) {
ocBool32 isPath1Valid;
ocBool32 isPath2Valid;
do {
if (!ocPathIteratorsEqual(iPath1, iPath2)) {
return OC_FALSE;
}
isPath1Valid = ocPathNext(&iPath1);
isPath2Valid = ocPathNext(&iPath2);
} while (isPath1Valid && isPath2Valid);
// At this point either iPath1 and/or iPath2 have finished iterating. If both of them are at the end, the two paths are equal.
return isPath1Valid == isPath2Valid && iPath1.path[iPath1.segment.offset] == '\0' && iPath2.path[iPath2.segment.offset] == '\0';
}
return OC_FALSE;
}
ocBool32 ocPathExtensionEqual(const char* path, const char* extension)
{
if (path == NULL || extension == NULL) {
return OC_FALSE;
}
const char* ext1 = extension;
const char* ext2 = ocPathExtension(path);
#ifdef _MSC_VER
return _stricmp(ext1, ext2) == 0;
#else
return strcasecmp(ext1, ext2) == 0;
#endif
}
ocBool32 ocPathIsRelative(const char* path)
{
if (path == NULL) {
return OC_FALSE;
}
ocPathIterator seg;
if (ocPathFirst(path, &seg)) {
return !ocPathIsRootSegment(seg.path, seg.segment);
}
// We'll get here if the path is empty. We consider this to be a relative path.
return OC_TRUE;
}
ocBool32 ocPathIsAbsolute(const char* path)
{
return !ocPathIsRelative(path);
}
size_t ocPathBasePath(char* pathOut, size_t pathOutSize, const char* path)
{
if (pathOut != NULL && pathOutSize > 0) {
pathOut[0] = '\0';
}
if (path == NULL) {
return 0;
}
const char* pathorig = path;
const char* baseend = path;
// We just loop through the path until we find the last slash.
while (path[0] != '\0') {
if (path[0] == '/' || path[0] == '\\') {
baseend = path;
}
path += 1;
}
// Now we just loop backwards until we hit the first non-slash (this handles situations where there may be multiple path separators such as "C:/MyFolder////MyFile").
while (baseend > path) {
if (baseend[0] != '/' && baseend[0] != '\\') {
break;
}
baseend -= 1;
}
size_t pathOutLen = (baseend - pathorig);
if (pathOut != NULL) {
oc_strncpy_s(pathOut, pathOutSize, pathorig, pathOutLen);
}
return (baseend - pathorig) + 1; // +1 for null terminator
}
size_t ocPathFileNameWithoutExtension(char* pathOut, size_t pathOutSize, const char* path)
{
return ocPathRemoveExtension(pathOut, pathOutSize, ocPathFileName(path));
}
static size_t ocPathAppend_Internal(char* pathOut, size_t pathOutSize, const char* base, const char* other, size_t otherLen)
{
if (base == NULL) {
base = "";
}
if (other == NULL) {
other = "";
otherLen = 0;
}
// It only ever makes sense to "append" an absolute path to a blank path. If the other path is absolute, but the base
// path is not blank we need to return an error because it just doesn't make sense to do this.
if (ocPathIsAbsolute(other) && base[0] != '\0') {
return 0;
}
if (other[0] == '\0') {
if (pathOut != NULL) {
if (pathOut != base) {
oc_strcpy_s(pathOut, pathOutSize, base);
}
}
return strlen(base) + 1; // +1 for null terminator.
}
size_t path1Length = strlen(base);
size_t path2Length = otherLen;
size_t slashLength = 0;
if (path1Length > 0 && base[path1Length-1] != '/' && base[path1Length-1] != '\\') {
slashLength = 1;
}
size_t pathOutLength = path1Length + slashLength + path2Length;
if (pathOut != NULL) {
if (pathOutLength+1 <= pathOutSize) {
if (pathOut != base) {
oc_strncpy_s(pathOut, pathOutSize, base, path1Length);
}
oc_strncpy_s(pathOut + path1Length, pathOutSize - path1Length, "/", slashLength);
oc_strncpy_s(pathOut + path1Length + slashLength, pathOutSize - path1Length - slashLength, other, path2Length);
}
}
return pathOutLength + 1; // +1 for null terminator.
}
size_t ocPathAppend(char* pathOut, size_t pathOutSize, const char* base, const char* other)
{
if (other == NULL) {
other = "";
}
return ocPathAppend_Internal(pathOut, pathOutSize, base, other, strlen(other));
}
size_t ocPathAppendIterator(char* pathOut, size_t pathOutSize, const char* base, ocPathIterator i)
{
return ocPathAppend_Internal(pathOut, pathOutSize, base, i.path + i.segment.offset, i.segment.length);
}
size_t ocPathAppendExtension(char* pathOut, size_t pathOutSize, const char* base, const char* extension)
{
if (base == NULL) {
base = "";
}
if (extension == NULL) {
extension = "";
}
if (extension[0] == '\0') {
if (pathOut != NULL) {
if (pathOut != base) {
oc_strcpy_s(pathOut, pathOutSize, base);
}
}
return strlen(base) + 1; // +1 for null terminator.
}
size_t baseLength = strlen(base);
size_t extLength = strlen(extension);
size_t pathOutLength = baseLength + 1 + extLength;
if (pathOut != NULL) {
if (pathOutLength+1 <= pathOutSize) {
if (pathOut != base) {
oc_strcpy_s(pathOut + 0, pathOutSize - 0, base);
}
oc_strcpy_s(pathOut + baseLength, pathOutSize - baseLength, ".");
oc_strcpy_s(pathOut + baseLength + 1, pathOutSize - baseLength - 1, extension);
}
}
return pathOutLength + 1; // +1 for null terminator.
}
size_t ocPathClean_TryWrite(ocPathIterator* iterators, unsigned int iteratorCount, char* pathOut, size_t pathOutSize, unsigned int ignoreCounter)
{
if (iteratorCount == 0) {
return 0;
}
ocPathIterator isegment = iterators[iteratorCount - 1];
// If this segment is a ".", we ignore it. If it is a ".." we ignore it and increment "ignoreCount".
ocBool32 ignoreThisSegment = ignoreCounter > 0 && isegment.segment.length > 0;
if (isegment.segment.length == 1 && isegment.path[isegment.segment.offset] == '.') {
// "."
ignoreThisSegment = OC_TRUE;
} else {
if (isegment.segment.length == 2 && isegment.path[isegment.segment.offset] == '.' && isegment.path[isegment.segment.offset + 1] == '.') {
// ".."
ignoreThisSegment = OC_TRUE;
ignoreCounter += 1;
} else {
// It's a regular segment, so decrement the ignore counter.
if (ignoreCounter > 0) {
ignoreCounter -= 1;
}
}
}
// The previous segment needs to be written before we can write this one.
size_t bytesWritten = 0;
ocPathIterator prev = isegment;
if (!ocPathPrev(&prev)) {
if (iteratorCount > 1) {
iteratorCount -= 1;
prev = iterators[iteratorCount - 1];
} else {
prev.path = NULL;
prev.segment.offset = 0;
prev.segment.length = 0;
}
}
if (prev.segment.length > 0) {
iterators[iteratorCount - 1] = prev;
bytesWritten = ocPathClean_TryWrite(iterators, iteratorCount, pathOut, pathOutSize, ignoreCounter);
}
if (!ignoreThisSegment) {
if (pathOut != NULL) {
pathOut += bytesWritten;
if (pathOutSize >= bytesWritten) {
pathOutSize -= bytesWritten;
} else {
pathOutSize = 0;
}
}
if (bytesWritten > 0) {
if (pathOut != NULL) {
pathOut[0] = '/';
pathOut += 1;
if (pathOutSize >= 1) {
pathOutSize -= 1;
} else {
pathOutSize = 0;
}
}
bytesWritten += 1;
}
if (pathOut != NULL) {
oc_strncpy_s(pathOut, pathOutSize, isegment.path + isegment.segment.offset, isegment.segment.length);
}
bytesWritten += isegment.segment.length;
}
return bytesWritten;
}
size_t ocPathClean(char* pathOut, size_t pathOutSize, const char* path)
{
if (path == NULL) {
return 0;
}
ocPathIterator last;
if (ocPathLast(path, &last)) {
size_t bytesWritten = 0;
if (path[0] == '/') {
if (pathOut != NULL && pathOutSize > 1) {
pathOut[0] = '/';
}
bytesWritten = 1;
}
if (pathOut == NULL || pathOutSize <= bytesWritten) {
bytesWritten += ocPathClean_TryWrite(&last, 1, NULL, 0, 0);
} else {
bytesWritten += ocPathClean_TryWrite(&last, 1, pathOut + bytesWritten, pathOutSize - bytesWritten - 1, 0); // -1 to ensure there is enough room for a null terminator later on.
}
if (pathOut != NULL && pathOutSize > bytesWritten) {
pathOut[bytesWritten] = '\0';
}
return bytesWritten + 1;
}
return 0;
}
size_t ocPathAppendAndClean(char* pathOut, size_t pathOutSize, const char* base, const char* other)
{
if (base == NULL || other == NULL) {
return 0;
}
ocPathIterator last[2] = {
{NULL, {0, 0}},
{NULL, {0, 0}}
};
ocBool32 isPathEmpty0 = !ocPathLast(base, last + 0);
ocBool32 isPathEmpty1 = !ocPathLast(other, last + 1);
int iteratorCount = !isPathEmpty0 + !isPathEmpty1;
if (iteratorCount == 0) {
return 0; // Both input strings are empty.
}
size_t bytesWritten = 0;
if (base[0] == '/') {
if (pathOut != NULL && pathOutSize > 1) {
pathOut[0] = '/';
}
bytesWritten = 1;
}
if (pathOut == NULL || pathOutSize <= bytesWritten) {
bytesWritten += ocPathClean_TryWrite(last, 2, NULL, 0, 0);
} else {
bytesWritten += ocPathClean_TryWrite(last, 2, pathOut + bytesWritten, pathOutSize - bytesWritten - 1, 0); // -1 to ensure there is enough room for a null terminator later on.
}
if (pathOut != NULL && pathOutSize > bytesWritten) {
pathOut[bytesWritten] = '\0';
}
return bytesWritten + 1;
}
size_t ocPathRemoveExtension(char* pathOut, size_t pathOutSize, const char* path)
{
if (path == NULL) {
path = "";
}
const char* extension = ocPathExtension(path);
if (extension != NULL && extension[0] != '\0') {
extension -= 1; // -1 to ensure the dot is removed as well.
}
size_t pathOutLength = (size_t)(extension - path);
if (pathOut != NULL) {
oc_strncpy_s(pathOut, pathOutSize, path, pathOutLength);
}
return pathOutLength;
}
size_t ocPathRemoveFileName(char* pathOut, size_t pathOutSize, const char* path)
{
if (path == NULL) {
path = "";
}
// We just create an iterator that starts at the last segment. We then move back one and place a null terminator at the end of
// that segment. That will ensure the resulting path is not left with a slash.
ocPathIterator iLast;
if (!ocPathLast(path, &iLast)) {
return 0; // The path is empty.
}
// Don't remove root segments.
if (ocPathIsRootSegment(iLast.path, iLast.segment)) {
return 0;
}
// If the last segment (the file name portion of the path) is the only segment, just return an empty string. Otherwise we copy
// up to the end of the second last segment.
ocPathIterator iSecondLast = iLast;
if (ocPathPrev(&iSecondLast)) {
size_t pathOutLength;
if (ocPathIsUnixStyleRootSegment(iSecondLast.path, iSecondLast.segment)) {
pathOutLength = iLast.segment.offset;
} else {
pathOutLength = iSecondLast.segment.offset + iSecondLast.segment.length;
}
if (pathOut != NULL) {
oc_strncpy_s(pathOut, pathOutSize, path, pathOutLength);
}
return pathOutLength + 1;
} else {
if (pathOut != NULL && pathOutSize > 0) {
pathOut[0] = '\0';
}
return 1; // Return 1 because we need to include the null terminator.
}
}
size_t ocPathRemoveFileNameInPlace(char* path)
{
if (path == NULL) {
return 0;
}
// We just create an iterator that starts at the last segment. We then move back one and place a null terminator at the end of
// that segment. That will ensure the resulting path is not left with a slash.
ocPathIterator iLast;
if (!ocPathLast(path, &iLast)) {
return 0; // The path is empty.
}
// Don't remove root segments.
if (ocPathIsRootSegment(iLast.path, iLast.segment)) {
return 0;
}
// If the last segment (the file name portion of the path) is the only segment, just return an empty string. Otherwise we copy
// up to the end of the second last segment.
ocPathIterator iSecondLast = iLast;
if (ocPathPrev(&iSecondLast)) {
size_t pathOutLength;
if (ocPathIsUnixStyleRootSegment(iSecondLast.path, iSecondLast.segment)) {
pathOutLength = iLast.segment.offset;
} else {
pathOutLength = iSecondLast.segment.offset + iSecondLast.segment.length;
}
path[pathOutLength] = '\0';
return pathOutLength + 1;
} else {
path[0] = 0;
return 1; // Return 1 because we need to include the null terminator.
}
}
size_t ocPathToRelative(char* pathOut, size_t pathOutSize, const char* absolutePathToMakeRelative, const char* absolutePathToMakeRelativeTo)
{
// We do this in two phases. The first phase just iterates past each segment of both the path to convert and the
// base path until we find two that are not equal. The second phase just adds the appropriate ".." segments.
if (pathOut != NULL && pathOutSize > 0) {
pathOut[0] = '\0';
}
if (!ocPathIsAbsolute(absolutePathToMakeRelative) || !ocPathIsAbsolute(absolutePathToMakeRelativeTo)) {
return 0;
}
ocPathIterator iPath;
ocPathIterator iBase;
ocBool32 isPathEmpty = !ocPathFirst(absolutePathToMakeRelative, &iPath);
ocBool32 isBaseEmpty = !ocPathFirst(absolutePathToMakeRelativeTo, &iBase);
if (isPathEmpty && isBaseEmpty) {
return 0; // Looks like both paths are empty.
}
// Phase 1: Get past the common section.
int isPathAtEnd = 0;
int isBaseAtEnd = 0;
while (!isPathAtEnd && !isBaseAtEnd && ocPathIteratorsEqual(iPath, iBase)) {
isPathAtEnd = !ocPathNext(&iPath);
isBaseAtEnd = !ocPathNext(&iBase);
}
if (iPath.segment.offset == 0) {
return 0; // The path is not relative to the base path.
}
// Phase 2: Append ".." segments - one for each remaining segment in the base path.
size_t pathOutLength = 0;
if (!ocPathAtEnd(iBase)) {
do {
if (pathOutLength == 0) {
// It's the first segment, so we need to ensure we don't lead with a slash.
if (pathOut != NULL && pathOutLength+2 < pathOutSize) {
pathOut[pathOutLength + 0] = '.';
pathOut[pathOutLength + 1] = '.';
}
pathOutLength += 2;
} else {
// It's not the first segment. Make sure we lead with a slash.
if (pathOut != NULL && pathOutLength+3 < pathOutSize) {
pathOut[pathOutLength + 0] = '/';
pathOut[pathOutLength + 1] = '.';
pathOut[pathOutLength + 2] = '.';
}
pathOutLength += 3;
}
} while (ocPathNext(&iBase));
}
// Now we just append whatever is left of the main path. We want the path to be clean, so we append segment-by-segment.
if (!ocPathAtEnd(iPath)) {
do {
// Leading slash, if required.
if (pathOutLength != 0) {
if (pathOut != NULL && pathOutLength+1 < pathOutSize) {
pathOut[pathOutLength] = '/';
}
pathOutLength += 1;
}
if (pathOut != NULL) {
oc_strncpy_s(pathOut + pathOutLength, pathOutSize - pathOutLength, iPath.path + iPath.segment.offset, iPath.segment.length);
}
pathOutLength += iPath.segment.length;
} while (ocPathNext(&iPath));
}
// Always null terminate.
if (pathOut != NULL && pathOutLength+1 <= pathOutSize) {
pathOut[pathOutLength] = '\0';
}
return pathOutLength + 1; // +1 for null terminator.
}
size_t ocPathToAbsolute(char* pathOut, size_t pathOutSize, const char* relativePathToMakeAbsolute, const char* basePath)
{
return ocPathAppendAndClean(pathOut, pathOutSize, basePath, relativePathToMakeAbsolute);
}
///////////////////////////////////////////////////////////////////////////////
//
// High Level APIs
//
///////////////////////////////////////////////////////////////////////////////
ocString ocPathFileNameWithoutExtensionStr(const char* path)
{
size_t len = ocPathFileNameWithoutExtension(NULL, 0, path);
if (len == 0) {
return NULL;
}
ocString str = ocMallocString(len+1);
if (str == NULL) {
return NULL;
}
ocPathFileNameWithoutExtension(str, len+1, path);
return str;
}
|
sap-contributions/libjvm | manifest_test.go | /*
* Copyright 2018-2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package libjvm_test
import (
"io/ioutil"
"os"
"path/filepath"
"testing"
. "github.com/onsi/gomega"
"github.com/sclevine/spec"
"github.com/paketo-buildpacks/libjvm"
)
func testManifest(t *testing.T, context spec.G, it spec.S) {
var (
Expect = NewWithT(t).Expect
path string
)
it.Before(func() {
var err error
path, err = ioutil.TempDir("", "manifest")
Expect(err).NotTo(HaveOccurred())
})
it.After(func() {
Expect(os.RemoveAll(path)).To(Succeed())
})
it("returns empty manifest if file doesn't exist", func() {
m, err := libjvm.NewManifest(path)
Expect(err).NotTo(HaveOccurred())
Expect(m.Len()).To(Equal(0))
})
it("returns populated manifest if file exists", func() {
Expect(os.MkdirAll(filepath.Join(path, "META-INF"), 0755)).To(Succeed())
Expect(ioutil.WriteFile(filepath.Join(path, "META-INF", "MANIFEST.MF"), []byte("test-key=test-value"), 0644)).To(Succeed())
m, err := libjvm.NewManifest(path)
Expect(err).NotTo(HaveOccurred())
k, ok := m.Get("test-key")
Expect(ok).To(BeTrue())
Expect(k).To(Equal("test-value"))
})
it("returns proper values when lines are broken", func() {
Expect(os.MkdirAll(filepath.Join(path, "META-INF"), 0755)).To(Succeed())
Expect(ioutil.WriteFile(filepath.Join(path, "META-INF", "MANIFEST.MF"), []byte(`
Manifest-Version: 1.0
Implementation-Title: petclinic
Implementation-Version: 2.1.0.BUILD-SNAPSHOT
Start-Class: org.springframework.samples.petclinic.PetClinicApplicatio
n
Spring-Boot-Classes: BOOT-INF/classes/
Spring-Boot-Lib: BOOT-INF/lib/
Build-Jdk-Spec: 1.8
Spring-Boot-Version: 2.1.6.RELEASE
Created-By: Maven Archiver 3.4.0
Main-Class: org.springframework.boot.loader.JarLauncher
`), 0644)).To(Succeed())
m, err := libjvm.NewManifest(path)
Expect(err).NotTo(HaveOccurred())
k, ok := m.Get("Start-Class")
Expect(ok).To(BeTrue())
Expect(k).To(Equal("org.springframework.samples.petclinic.PetClinicApplication"))
})
}
|
petercdcn/LeetCode-Sol-Res | src/main/java/com/freetymekiyan/algorithms/level/medium/PathSum2.java | <filename>src/main/java/com/freetymekiyan/algorithms/level/medium/PathSum2.java
package com.freetymekiyan.algorithms.level.medium;
import com.freetymekiyan.algorithms.utils.Utils.TreeNode;
import java.util.ArrayList;
import java.util.List;
/**
* 113. Path Sum II
*
* Given a binary tree and a sum, find all root-to-leaf paths where each path's sum equals the given sum.
*
* Note: A leaf is a node with no children.
*
* Example:
*
* Given the below binary tree and sum = 22,
*
* 5
* / \
* 4 8
* / / \
* 11 13 4
* / \ / \
* 7 2 5 1
* Return:
*
* [
* [5,4,11,2],
* [5,8,4,5]
* ]
*
* Related Topics: Tree, Depth-first Search
*
* Similar Questions: Path Sum (E), Binary Tree Paths (E), Path Sum III (E), Path Sum IV (M)
*/
public class PathSum2 {
public List<List<Integer>> pathSum(TreeNode root, int sum) {
List<List<Integer>> res = new ArrayList<>();
if (root == null) return res;
dfs(root, sum, new ArrayList<>(), res);
return res;
}
public void dfs(TreeNode root, int sum, List<Integer> path, List<List<Integer>> res) {
if (root == null) return;
sum -= root.val;
if (root.left == null && root.right == null && sum == 0) {
path.add(root.val);
res.add(new ArrayList<>(path));
path.remove(path.size() - 1);
return;
}
path.add(root.val);
dfs(root.left, sum, path, res);
dfs(root.right, sum, path, res);
path.remove(path.size() - 1);
}
} |
shaochangbin/chromium-crosswalk | tools/telemetry/telemetry/core/platform/profiler/android_prebuilt_profiler_helper.py | <filename>tools/telemetry/telemetry/core/platform/profiler/android_prebuilt_profiler_helper.py
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Android-specific, downloads and installs pre-built profilers.
These pre-built binaries are stored in Cloud Storage, and they were
built from AOSP source. Specific profilers using this helper class contain
more detailed information.
"""
import logging
import os
import sys
from telemetry import decorators
from telemetry.core import util
from telemetry.page import cloud_storage
_DEVICE_PROFILER_DIR = '/data/local/tmp/profilers/'
def GetDevicePath(profiler_binary):
return os.path.join(_DEVICE_PROFILER_DIR, os.path.basename(profiler_binary))
def GetHostPath(profiler_binary):
return os.path.join(util.GetTelemetryDir(),
'bin', 'prebuilt', 'android', profiler_binary)
def GetIfChanged(profiler_binary):
cloud_storage.GetIfChanged(GetHostPath(profiler_binary),
cloud_storage.PUBLIC_BUCKET)
@decorators.Cache
def InstallOnDevice(adb, profiler_binary):
host_binary_path = util.FindSupportBinary(profiler_binary)
if not host_binary_path:
has_prebuilt = (
sys.platform.startswith('linux') and
adb.system_properties['ro.product.cpu.abi'].startswith('armeabi'))
if has_prebuilt:
GetIfChanged(profiler_binary)
host_binary_path = GetHostPath(profiler_binary)
else:
logging.error('Profiler binary "%s" not found. Could not be installed',
profiler_binary)
return False
device_binary_path = GetDevicePath(profiler_binary)
adb.PushIfNeeded(host_binary_path, device_binary_path)
adb.RunShellCommand('chmod 777 ' + device_binary_path)
return True
|
Polidea/SiriusObfuscator | SymbolExtractorAndRenamer/lldb/include/lldb/Symbol/ObjectContainer.h | <filename>SymbolExtractorAndRenamer/lldb/include/lldb/Symbol/ObjectContainer.h<gh_stars>100-1000
//===-- ObjectContainer.h ---------------------------------------*- C++ -*-===//
//
// The LLVM Compiler Infrastructure
//
// This file is distributed under the University of Illinois Open Source
// License. See LICENSE.TXT for details.
//
//===----------------------------------------------------------------------===//
#ifndef liblldb_ObjectContainer_h_
#define liblldb_ObjectContainer_h_
// C Includes
// C++ Includes
// Other libraries and framework includes
// Project includes
#include "lldb/Core/DataExtractor.h"
#include "lldb/Core/ModuleChild.h"
#include "lldb/Core/PluginInterface.h"
#include "lldb/Host/Endian.h"
#include "lldb/Host/FileSpec.h"
#include "lldb/lldb-private.h"
namespace lldb_private {
//----------------------------------------------------------------------
/// @class ObjectContainer ObjectContainer.h "lldb/Symbol/ObjectContainer.h"
/// @brief A plug-in interface definition class for object containers.
///
/// Object containers contain object files from one or more
/// architectures, and also can contain one or more named objects.
///
/// Typical object containers are static libraries (.a files) that
/// contain multiple named object files, and universal files that contain
/// multiple architectures.
//----------------------------------------------------------------------
class ObjectContainer : public PluginInterface, public ModuleChild {
public:
//------------------------------------------------------------------
/// Construct with a parent module, offset, and header data.
///
/// Object files belong to modules and a valid module must be
/// supplied upon construction. The at an offset within a file for
/// objects that contain more than one architecture or object.
//------------------------------------------------------------------
ObjectContainer(const lldb::ModuleSP &module_sp, const FileSpec *file,
lldb::offset_t file_offset, lldb::offset_t length,
lldb::DataBufferSP &data_sp, lldb::offset_t data_offset)
: ModuleChild(module_sp),
m_file(), // This file can be different than the module's file spec
m_offset(file_offset), m_length(length), m_data() {
if (file)
m_file = *file;
if (data_sp)
m_data.SetData(data_sp, data_offset, length);
}
//------------------------------------------------------------------
/// Destructor.
///
/// The destructor is virtual since this class is designed to be
/// inherited from by the plug-in instance.
//------------------------------------------------------------------
~ObjectContainer() override = default;
//------------------------------------------------------------------
/// Dump a description of this object to a Stream.
///
/// Dump a description of the current contents of this object
/// to the supplied stream \a s. The dumping should include the
/// section list if it has been parsed, and the symbol table
/// if it has been parsed.
///
/// @param[in] s
/// The stream to which to dump the object description.
//------------------------------------------------------------------
virtual void Dump(Stream *s) const = 0;
//------------------------------------------------------------------
/// Gets the architecture given an index.
///
/// Copies the architecture specification for index \a idx.
///
/// @param[in] idx
/// The architecture index to extract.
///
/// @param[out] arch
/// A architecture object that will be filled in if \a idx is a
/// architecture valid index.
///
/// @return
/// Returns \b true if \a idx is valid and \a arch has been
/// filled in, \b false otherwise.
///
/// @see ObjectContainer::GetNumArchitectures() const
//------------------------------------------------------------------
virtual bool GetArchitectureAtIndex(uint32_t idx, ArchSpec &arch) const {
return false;
}
//------------------------------------------------------------------
/// Returns the offset into a file at which this object resides.
///
/// Some files contain many object files, and this function allows
/// access to an object's offset within the file.
///
/// @return
/// The offset in bytes into the file. Defaults to zero for
/// simple object files that a represented by an entire file.
//------------------------------------------------------------------
virtual lldb::addr_t GetOffset() const { return m_offset; }
virtual lldb::addr_t GetByteSize() const { return m_length; }
//------------------------------------------------------------------
/// Get the number of objects within this object file (archives).
///
/// @return
/// Zero for object files that are not archives, or the number
/// of objects contained in the archive.
//------------------------------------------------------------------
virtual size_t GetNumObjects() const { return 0; }
//------------------------------------------------------------------
/// Get the number of architectures in this object file.
///
/// The default implementation returns 1 as for object files that
/// contain a single architecture. ObjectContainer instances that
/// contain more than one architecture should override this function
/// and return an appropriate value.
///
/// @return
/// The number of architectures contained in this object file.
//------------------------------------------------------------------
virtual size_t GetNumArchitectures() const { return 0; }
//------------------------------------------------------------------
/// Attempts to parse the object header.
///
/// This function is used as a test to see if a given plug-in
/// instance can parse the header data already contained in
/// ObjectContainer::m_data. If an object file parser does not
/// recognize that magic bytes in a header, false should be returned
/// and the next plug-in can attempt to parse an object file.
///
/// @return
/// Returns \b true if the header was parsed successfully, \b
/// false otherwise.
//------------------------------------------------------------------
virtual bool ParseHeader() = 0;
//------------------------------------------------------------------
/// Selects an architecture in an object file.
///
/// Object files that contain a single architecture should verify
/// that the specified \a arch matches the architecture in in
/// object file and return \b true or \b false accordingly.
///
/// Object files that contain more than one architecture should
/// attempt to select that architecture, and if successful, clear
/// out any previous state from any previously selected architecture
/// and prepare to return information for the new architecture.
///
/// @return
/// Returns a pointer to the object file of the requested \a
/// arch and optional \a name. Returns nullptr of no such object
/// file exists in the container.
//------------------------------------------------------------------
virtual lldb::ObjectFileSP GetObjectFile(const FileSpec *file) = 0;
virtual bool ObjectAtIndexIsContainer(uint32_t object_idx) { return false; }
virtual ObjectFile *GetObjectFileAtIndex(uint32_t object_idx) {
return nullptr;
}
virtual ObjectContainer *GetObjectContainerAtIndex(uint32_t object_idx) {
return nullptr;
}
virtual const char *GetObjectNameAtIndex(uint32_t object_idx) const {
return nullptr;
}
protected:
//------------------------------------------------------------------
// Member variables.
//------------------------------------------------------------------
FileSpec m_file; ///< The file that represents this container objects (which
///can be different from the module's file).
lldb::addr_t
m_offset; ///< The offset in bytes into the file, or the address in memory
lldb::addr_t m_length; ///< The size in bytes if known (can be zero).
DataExtractor
m_data; ///< The data for this object file so things can be parsed lazily.
private:
DISALLOW_COPY_AND_ASSIGN(ObjectContainer);
};
} // namespace lldb_private
#endif // liblldb_ObjectContainer_h_
|
jinahya/mysql-sakila-entities | src/test/java/com/github/jinahya/sakila/persistence/InventoryService.java | <filename>src/test/java/com/github/jinahya/sakila/persistence/InventoryService.java<gh_stars>1-10
package com.github.jinahya.sakila.persistence;
/*-
* #%L
* sakila-entities
* %%
* Copyright (C) 2019 Jinahya, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import lombok.extern.slf4j.Slf4j;
import org.jetbrains.annotations.Nullable;
import javax.validation.constraints.NotNull;
import javax.validation.constraints.Positive;
import javax.validation.constraints.PositiveOrZero;
import java.util.List;
/**
* A service class for {@link Inventory}.
*
* @author <NAME> <onacit_at_gmail.com>
*/
@Slf4j
class InventoryService extends BaseEntityService<Inventory> {
// -----------------------------------------------------------------------------------------------------------------
/**
* Creates a new instance.
*/
InventoryService() {
super(Inventory.class);
}
// -----------------------------------------------------------------------------------------------------------------
/**
* Returns the number of stores that specified film is available.
*
* @param film the film to check.
* @return the number of stores that {@code film} is available.
*/
@PositiveOrZero long countStores(@NotNull final Film film) {
// TODO: 2019-07-22 implement!!!
throw new UnsupportedOperationException("not implemented yet");
}
/**
* List stores that specified film is available sorted by {@link Country#ATTRIBUTE_NAME_COUNTRY Country#country}
* attribute, {@link City#ATTRIBUTE_NAME_CITY City#city} attribute, {@link Address#ATTRIBUTE_NAME_DISTRICT
* Address#district} attribute, and {@link BaseEntity#ATTRIBUTE_NAME_ID Store#storeId} attribute in all ascending
* order.
*
* @param film the film to check.
* @param firstResult first index of the result; {@code null} for an unspecified result.
* @param maxResults maximum results to retrieve; {@code null} for an unspecified result.
* @return a list of stores.
*/
@NotNull List<@NotNull Store> listStores(@NotNull final Film film,
@PositiveOrZero @Nullable final Integer firstResult,
@Positive @Nullable final Integer maxResults) {
// TODO: 2019-07-22 implement!!!
throw new UnsupportedOperationException("not implemented yet");
}
// -----------------------------------------------------------------------------------------------------------------
/**
* Counts distinct films available on specified store.
*
* @param store the store whose distinct films are counted.
* @return the number of films available in {@code store}.
*/
@PositiveOrZero long countFilms(@NotNull final Film store) {
// TODO: 2019-07-22 implement!!!
throw new UnsupportedOperationException("not implemented yet");
}
/**
* List distinct films available on specified store sorted by {@link Film#ATTRIBUTE_NAME_TITLE title} in ascending
* order.
*
* @param store the store whose films are listed.
* @param firstResult first index of the result; {@code null} for an unspecified result.
* @param maxResults maximum results to retrieve; {@code null} for an unspecified result.
* @return a list of films.
*/
@NotNull List<@NotNull Film> listFilm(@NotNull final Store store,
@PositiveOrZero @Nullable final Integer firstResult,
@Positive @Nullable final Integer maxResults) {
// TODO: 2019-07-22 implement!!!
throw new UnsupportedOperationException("not implemented yet");
}
}
|
LoveKino/my_start_page | webpack.config.js | <reponame>LoveKino/my_start_page
'use strict';
const webpack = require('webpack');
module.exports = {
mode: process.env.DEV ? 'development' : 'production',
entry: {
app: './lib/index.js'
},
devtool: 'source-map',
output: {
path: __dirname + '/asset',
filename: '[name].js',
publicPath: '/assets'
},
module: {
rules: [{
test: /\.jsx?$/,
exclude: /node_modules/,
use: {
loader: 'babel-loader',
options: {
presets: ['react', 'babel-preset-env'],
plugins: ['wildcard']
}
}
}, {
test: /\.(woff|woff2)(\?v=\d+\.\d+\.\d+)?$/,
loader: 'url?limit=10000&mimetype=application/font-woff'
}, {
test: /\.ttf(\?v=\d+\.\d+\.\d+)?$/,
loader: 'url?limit=10000&mimetype=application/octet-stream'
}, {
test: /\.eot(\?v=\d+\.\d+\.\d+)?$/,
loader: 'file'
}, {
test: /\.svg/,
loader: 'url-loader?limit=10000&mimetype=image/svg+xml'
}]
},
plugins: [
new webpack.HotModuleReplacementPlugin()
],
devServer: {
contentBase: __dirname + '/dist',
hot: true,
progress: true,
port: 8080,
proxy: {},
watchContentBase: true,
watchOptions: {
poll: true,
ignored: /node_modules/
}
}
};
|
ScalablyTyped/SlinkyTyped | r/react-native-elements/src/main/scala/typingsSlinky/reactNativeElements/anon/RecursivePartialPartialBu.scala | package typingsSlinky.reactNativeElements.anon
import slinky.core.ReactComponentClass
import slinky.core.SyntheticEvent
import typingsSlinky.reactNative.anon.Layout
import typingsSlinky.reactNative.anon.ReadonlyactionNamestring
import typingsSlinky.reactNative.mod.AccessibilityActionInfo
import typingsSlinky.reactNative.mod.AccessibilityRole
import typingsSlinky.reactNative.mod.AccessibilityState
import typingsSlinky.reactNative.mod.AccessibilityTrait
import typingsSlinky.reactNative.mod.AccessibilityValue
import typingsSlinky.reactNative.mod.ActivityIndicatorProperties
import typingsSlinky.reactNative.mod.BackgroundPropType
import typingsSlinky.reactNative.mod.Insets
import typingsSlinky.reactNative.mod.NativeTouchEvent
import typingsSlinky.reactNative.mod.NodeHandle
import typingsSlinky.reactNative.mod.StyleProp
import typingsSlinky.reactNative.mod.TVParallaxProperties
import typingsSlinky.reactNative.mod.TargetedEvent
import typingsSlinky.reactNative.mod.TextProperties
import typingsSlinky.reactNative.mod.TextStyle
import typingsSlinky.reactNative.mod.ViewStyle
import typingsSlinky.reactNativeElements.mod.IconNode
import typingsSlinky.reactNativeElements.mod.RecursivePartial
import typingsSlinky.reactNativeElements.reactNativeElementsStrings.`no-hide-descendants`
import typingsSlinky.reactNativeElements.reactNativeElementsStrings.assertive
import typingsSlinky.reactNativeElements.reactNativeElementsStrings.auto
import typingsSlinky.reactNativeElements.reactNativeElementsStrings.button
import typingsSlinky.reactNativeElements.reactNativeElementsStrings.clear
import typingsSlinky.reactNativeElements.reactNativeElementsStrings.no
import typingsSlinky.reactNativeElements.reactNativeElementsStrings.none
import typingsSlinky.reactNativeElements.reactNativeElementsStrings.outline
import typingsSlinky.reactNativeElements.reactNativeElementsStrings.polite
import typingsSlinky.reactNativeElements.reactNativeElementsStrings.radiobutton_checked
import typingsSlinky.reactNativeElements.reactNativeElementsStrings.radiobutton_unchecked
import typingsSlinky.reactNativeElements.reactNativeElementsStrings.solid
import typingsSlinky.reactNativeElements.reactNativeElementsStrings.yes
import org.scalablytyped.runtime.StObject
import scala.scalajs.js
import scala.scalajs.js.`|`
import scala.scalajs.js.annotation.{JSGlobalScope, JSGlobal, JSImport, JSName, JSBracketAccess}
/* Inlined react-native-elements.react-native-elements.RecursivePartial<std.Partial<react-native-elements.react-native-elements.ButtonProps>> */
@js.native
trait RecursivePartialPartialBu extends StObject {
var TouchableComponent: js.UndefOr[RecursivePartial[js.UndefOr[ReactComponentClass[js.Object]]]] = js.native
var ViewComponent: js.UndefOr[RecursivePartial[js.UndefOr[ReactComponentClass[_]]]] = js.native
var accessibilityActions: js.UndefOr[RecursivePartial[js.UndefOr[js.Array[AccessibilityActionInfo]]]] = js.native
var accessibilityComponentType: js.UndefOr[
RecursivePartial[js.UndefOr[none | button | radiobutton_checked | radiobutton_unchecked]]
] = js.native
var accessibilityElementsHidden: js.UndefOr[RecursivePartial[js.UndefOr[Boolean]]] = js.native
var accessibilityHint: js.UndefOr[RecursivePartial[js.UndefOr[String]]] = js.native
var accessibilityIgnoresInvertColors: js.UndefOr[RecursivePartial[js.UndefOr[Boolean]]] = js.native
var accessibilityLabel: js.UndefOr[RecursivePartial[js.UndefOr[String]]] = js.native
var accessibilityLiveRegion: js.UndefOr[RecursivePartial[js.UndefOr[none | polite | assertive]]] = js.native
var accessibilityRole: js.UndefOr[RecursivePartial[js.UndefOr[AccessibilityRole]]] = js.native
var accessibilityState: js.UndefOr[RecursivePartial[js.UndefOr[AccessibilityState]]] = js.native
var accessibilityTraits: js.UndefOr[RecursivePartial[js.UndefOr[AccessibilityTrait | js.Array[AccessibilityTrait]]]] = js.native
var accessibilityValue: js.UndefOr[RecursivePartial[js.UndefOr[AccessibilityValue]]] = js.native
var accessibilityViewIsModal: js.UndefOr[RecursivePartial[js.UndefOr[Boolean]]] = js.native
var accessible: js.UndefOr[RecursivePartial[js.UndefOr[Boolean]]] = js.native
var activeOpacity: js.UndefOr[RecursivePartial[js.UndefOr[Double]]] = js.native
var background: js.UndefOr[RecursivePartial[js.UndefOr[BackgroundPropType]]] = js.native
var buttonStyle: js.UndefOr[RecursivePartial[js.UndefOr[StyleProp[ViewStyle]]]] = js.native
var containerStyle: js.UndefOr[RecursivePartial[js.UndefOr[StyleProp[ViewStyle]]]] = js.native
var delayLongPress: js.UndefOr[RecursivePartial[js.UndefOr[Double]]] = js.native
var delayPressIn: js.UndefOr[RecursivePartial[js.UndefOr[Double]]] = js.native
var delayPressOut: js.UndefOr[RecursivePartial[js.UndefOr[Double]]] = js.native
var disabled: js.UndefOr[RecursivePartial[js.UndefOr[Boolean]]] = js.native
var disabledStyle: js.UndefOr[RecursivePartial[js.UndefOr[StyleProp[ViewStyle]]]] = js.native
var disabledTitleStyle: js.UndefOr[RecursivePartial[js.UndefOr[StyleProp[TextStyle]]]] = js.native
var hasTVPreferredFocus: js.UndefOr[RecursivePartial[js.UndefOr[Boolean]]] = js.native
var hitSlop: js.UndefOr[RecursivePartial[js.UndefOr[Insets]]] = js.native
var icon: js.UndefOr[RecursivePartial[js.UndefOr[IconNode]]] = js.native
var iconContainerStyle: js.UndefOr[RecursivePartial[js.UndefOr[StyleProp[ViewStyle]]]] = js.native
var iconRight: js.UndefOr[RecursivePartial[js.UndefOr[Boolean]]] = js.native
var importantForAccessibility: js.UndefOr[RecursivePartial[js.UndefOr[auto | yes | no | `no-hide-descendants`]]] = js.native
var linearGradientProps: js.UndefOr[RecursivePartial[js.UndefOr[js.Object]]] = js.native
var loading: js.UndefOr[RecursivePartial[js.UndefOr[Boolean]]] = js.native
var loadingProps: js.UndefOr[RecursivePartial[js.UndefOr[ActivityIndicatorProperties]]] = js.native
var loadingStyle: js.UndefOr[RecursivePartial[js.UndefOr[StyleProp[ViewStyle]]]] = js.native
var onAccessibilityAction: js.UndefOr[
RecursivePartial[
js.UndefOr[js.Function1[SyntheticEvent[NodeHandle, ReadonlyactionNamestring], Unit]]
]
] = js.native
var onAccessibilityEscape: js.UndefOr[RecursivePartial[js.UndefOr[js.Function0[Unit]]]] = js.native
var onAccessibilityTap: js.UndefOr[RecursivePartial[js.UndefOr[js.Function0[Unit]]]] = js.native
var onBlur: js.UndefOr[
RecursivePartial[js.UndefOr[js.Function1[SyntheticEvent[NodeHandle, TargetedEvent], Unit]]]
] = js.native
var onFocus: js.UndefOr[
RecursivePartial[js.UndefOr[js.Function1[SyntheticEvent[NodeHandle, TargetedEvent], Unit]]]
] = js.native
var onLayout: js.UndefOr[
RecursivePartial[js.UndefOr[js.Function1[SyntheticEvent[NodeHandle, Layout], Unit]]]
] = js.native
var onLongPress: js.UndefOr[
RecursivePartial[js.UndefOr[js.Function1[SyntheticEvent[NodeHandle, NativeTouchEvent], Unit]]]
] = js.native
var onMagicTap: js.UndefOr[RecursivePartial[js.UndefOr[js.Function0[Unit]]]] = js.native
var onPress: js.UndefOr[
RecursivePartial[js.UndefOr[js.Function1[SyntheticEvent[NodeHandle, NativeTouchEvent], Unit]]]
] = js.native
var onPressIn: js.UndefOr[
RecursivePartial[js.UndefOr[js.Function1[SyntheticEvent[NodeHandle, NativeTouchEvent], Unit]]]
] = js.native
var onPressOut: js.UndefOr[
RecursivePartial[js.UndefOr[js.Function1[SyntheticEvent[NodeHandle, NativeTouchEvent], Unit]]]
] = js.native
var pressRetentionOffset: js.UndefOr[RecursivePartial[js.UndefOr[Insets]]] = js.native
var raised: js.UndefOr[RecursivePartial[js.UndefOr[Boolean]]] = js.native
var style: js.UndefOr[RecursivePartial[js.UndefOr[StyleProp[ViewStyle]]]] = js.native
var testID: js.UndefOr[RecursivePartial[js.UndefOr[String]]] = js.native
var title: js.UndefOr[RecursivePartial[js.UndefOr[String]]] = js.native
var titleProps: js.UndefOr[RecursivePartial[js.UndefOr[TextProperties]]] = js.native
var titleStyle: js.UndefOr[RecursivePartial[js.UndefOr[StyleProp[TextStyle]]]] = js.native
var touchSoundDisabled: js.UndefOr[RecursivePartial[js.UndefOr[Boolean | Null]]] = js.native
var tvParallaxProperties: js.UndefOr[RecursivePartial[js.UndefOr[TVParallaxProperties]]] = js.native
var `type`: js.UndefOr[RecursivePartial[js.UndefOr[solid | clear | outline]]] = js.native
var useForeground: js.UndefOr[RecursivePartial[js.UndefOr[Boolean]]] = js.native
}
object RecursivePartialPartialBu {
@scala.inline
def apply(): RecursivePartialPartialBu = {
val __obj = js.Dynamic.literal()
__obj.asInstanceOf[RecursivePartialPartialBu]
}
@scala.inline
implicit class RecursivePartialPartialBuMutableBuilder[Self <: RecursivePartialPartialBu] (val x: Self) extends AnyVal {
@scala.inline
def setAccessibilityActions(value: RecursivePartial[js.UndefOr[js.Array[AccessibilityActionInfo]]]): Self = StObject.set(x, "accessibilityActions", value.asInstanceOf[js.Any])
@scala.inline
def setAccessibilityActionsUndefined: Self = StObject.set(x, "accessibilityActions", js.undefined)
@scala.inline
def setAccessibilityComponentType(value: RecursivePartial[js.UndefOr[none | button | radiobutton_checked | radiobutton_unchecked]]): Self = StObject.set(x, "accessibilityComponentType", value.asInstanceOf[js.Any])
@scala.inline
def setAccessibilityComponentTypeUndefined: Self = StObject.set(x, "accessibilityComponentType", js.undefined)
@scala.inline
def setAccessibilityElementsHidden(value: RecursivePartial[js.UndefOr[Boolean]]): Self = StObject.set(x, "accessibilityElementsHidden", value.asInstanceOf[js.Any])
@scala.inline
def setAccessibilityElementsHiddenUndefined: Self = StObject.set(x, "accessibilityElementsHidden", js.undefined)
@scala.inline
def setAccessibilityHint(value: RecursivePartial[js.UndefOr[String]]): Self = StObject.set(x, "accessibilityHint", value.asInstanceOf[js.Any])
@scala.inline
def setAccessibilityHintUndefined: Self = StObject.set(x, "accessibilityHint", js.undefined)
@scala.inline
def setAccessibilityIgnoresInvertColors(value: RecursivePartial[js.UndefOr[Boolean]]): Self = StObject.set(x, "accessibilityIgnoresInvertColors", value.asInstanceOf[js.Any])
@scala.inline
def setAccessibilityIgnoresInvertColorsUndefined: Self = StObject.set(x, "accessibilityIgnoresInvertColors", js.undefined)
@scala.inline
def setAccessibilityLabel(value: RecursivePartial[js.UndefOr[String]]): Self = StObject.set(x, "accessibilityLabel", value.asInstanceOf[js.Any])
@scala.inline
def setAccessibilityLabelUndefined: Self = StObject.set(x, "accessibilityLabel", js.undefined)
@scala.inline
def setAccessibilityLiveRegion(value: RecursivePartial[js.UndefOr[none | polite | assertive]]): Self = StObject.set(x, "accessibilityLiveRegion", value.asInstanceOf[js.Any])
@scala.inline
def setAccessibilityLiveRegionUndefined: Self = StObject.set(x, "accessibilityLiveRegion", js.undefined)
@scala.inline
def setAccessibilityRole(value: RecursivePartial[js.UndefOr[AccessibilityRole]]): Self = StObject.set(x, "accessibilityRole", value.asInstanceOf[js.Any])
@scala.inline
def setAccessibilityRoleUndefined: Self = StObject.set(x, "accessibilityRole", js.undefined)
@scala.inline
def setAccessibilityState(value: RecursivePartial[js.UndefOr[AccessibilityState]]): Self = StObject.set(x, "accessibilityState", value.asInstanceOf[js.Any])
@scala.inline
def setAccessibilityStateUndefined: Self = StObject.set(x, "accessibilityState", js.undefined)
@scala.inline
def setAccessibilityTraits(value: RecursivePartial[js.UndefOr[AccessibilityTrait | js.Array[AccessibilityTrait]]]): Self = StObject.set(x, "accessibilityTraits", value.asInstanceOf[js.Any])
@scala.inline
def setAccessibilityTraitsUndefined: Self = StObject.set(x, "accessibilityTraits", js.undefined)
@scala.inline
def setAccessibilityValue(value: RecursivePartial[js.UndefOr[AccessibilityValue]]): Self = StObject.set(x, "accessibilityValue", value.asInstanceOf[js.Any])
@scala.inline
def setAccessibilityValueUndefined: Self = StObject.set(x, "accessibilityValue", js.undefined)
@scala.inline
def setAccessibilityViewIsModal(value: RecursivePartial[js.UndefOr[Boolean]]): Self = StObject.set(x, "accessibilityViewIsModal", value.asInstanceOf[js.Any])
@scala.inline
def setAccessibilityViewIsModalUndefined: Self = StObject.set(x, "accessibilityViewIsModal", js.undefined)
@scala.inline
def setAccessible(value: RecursivePartial[js.UndefOr[Boolean]]): Self = StObject.set(x, "accessible", value.asInstanceOf[js.Any])
@scala.inline
def setAccessibleUndefined: Self = StObject.set(x, "accessible", js.undefined)
@scala.inline
def setActiveOpacity(value: RecursivePartial[js.UndefOr[Double]]): Self = StObject.set(x, "activeOpacity", value.asInstanceOf[js.Any])
@scala.inline
def setActiveOpacityUndefined: Self = StObject.set(x, "activeOpacity", js.undefined)
@scala.inline
def setBackground(value: RecursivePartial[js.UndefOr[BackgroundPropType]]): Self = StObject.set(x, "background", value.asInstanceOf[js.Any])
@scala.inline
def setBackgroundUndefined: Self = StObject.set(x, "background", js.undefined)
@scala.inline
def setButtonStyle(value: RecursivePartial[js.UndefOr[StyleProp[ViewStyle]]]): Self = StObject.set(x, "buttonStyle", value.asInstanceOf[js.Any])
@scala.inline
def setButtonStyleUndefined: Self = StObject.set(x, "buttonStyle", js.undefined)
@scala.inline
def setContainerStyle(value: RecursivePartial[js.UndefOr[StyleProp[ViewStyle]]]): Self = StObject.set(x, "containerStyle", value.asInstanceOf[js.Any])
@scala.inline
def setContainerStyleUndefined: Self = StObject.set(x, "containerStyle", js.undefined)
@scala.inline
def setDelayLongPress(value: RecursivePartial[js.UndefOr[Double]]): Self = StObject.set(x, "delayLongPress", value.asInstanceOf[js.Any])
@scala.inline
def setDelayLongPressUndefined: Self = StObject.set(x, "delayLongPress", js.undefined)
@scala.inline
def setDelayPressIn(value: RecursivePartial[js.UndefOr[Double]]): Self = StObject.set(x, "delayPressIn", value.asInstanceOf[js.Any])
@scala.inline
def setDelayPressInUndefined: Self = StObject.set(x, "delayPressIn", js.undefined)
@scala.inline
def setDelayPressOut(value: RecursivePartial[js.UndefOr[Double]]): Self = StObject.set(x, "delayPressOut", value.asInstanceOf[js.Any])
@scala.inline
def setDelayPressOutUndefined: Self = StObject.set(x, "delayPressOut", js.undefined)
@scala.inline
def setDisabled(value: RecursivePartial[js.UndefOr[Boolean]]): Self = StObject.set(x, "disabled", value.asInstanceOf[js.Any])
@scala.inline
def setDisabledStyle(value: RecursivePartial[js.UndefOr[StyleProp[ViewStyle]]]): Self = StObject.set(x, "disabledStyle", value.asInstanceOf[js.Any])
@scala.inline
def setDisabledStyleUndefined: Self = StObject.set(x, "disabledStyle", js.undefined)
@scala.inline
def setDisabledTitleStyle(value: RecursivePartial[js.UndefOr[StyleProp[TextStyle]]]): Self = StObject.set(x, "disabledTitleStyle", value.asInstanceOf[js.Any])
@scala.inline
def setDisabledTitleStyleUndefined: Self = StObject.set(x, "disabledTitleStyle", js.undefined)
@scala.inline
def setDisabledUndefined: Self = StObject.set(x, "disabled", js.undefined)
@scala.inline
def setHasTVPreferredFocus(value: RecursivePartial[js.UndefOr[Boolean]]): Self = StObject.set(x, "hasTVPreferredFocus", value.asInstanceOf[js.Any])
@scala.inline
def setHasTVPreferredFocusUndefined: Self = StObject.set(x, "hasTVPreferredFocus", js.undefined)
@scala.inline
def setHitSlop(value: RecursivePartial[js.UndefOr[Insets]]): Self = StObject.set(x, "hitSlop", value.asInstanceOf[js.Any])
@scala.inline
def setHitSlopUndefined: Self = StObject.set(x, "hitSlop", js.undefined)
@scala.inline
def setIcon(value: RecursivePartial[js.UndefOr[IconNode]]): Self = StObject.set(x, "icon", value.asInstanceOf[js.Any])
@scala.inline
def setIconContainerStyle(value: RecursivePartial[js.UndefOr[StyleProp[ViewStyle]]]): Self = StObject.set(x, "iconContainerStyle", value.asInstanceOf[js.Any])
@scala.inline
def setIconContainerStyleUndefined: Self = StObject.set(x, "iconContainerStyle", js.undefined)
@scala.inline
def setIconRight(value: RecursivePartial[js.UndefOr[Boolean]]): Self = StObject.set(x, "iconRight", value.asInstanceOf[js.Any])
@scala.inline
def setIconRightUndefined: Self = StObject.set(x, "iconRight", js.undefined)
@scala.inline
def setIconUndefined: Self = StObject.set(x, "icon", js.undefined)
@scala.inline
def setImportantForAccessibility(value: RecursivePartial[js.UndefOr[auto | yes | no | `no-hide-descendants`]]): Self = StObject.set(x, "importantForAccessibility", value.asInstanceOf[js.Any])
@scala.inline
def setImportantForAccessibilityUndefined: Self = StObject.set(x, "importantForAccessibility", js.undefined)
@scala.inline
def setLinearGradientProps(value: RecursivePartial[js.UndefOr[js.Object]]): Self = StObject.set(x, "linearGradientProps", value.asInstanceOf[js.Any])
@scala.inline
def setLinearGradientPropsUndefined: Self = StObject.set(x, "linearGradientProps", js.undefined)
@scala.inline
def setLoading(value: RecursivePartial[js.UndefOr[Boolean]]): Self = StObject.set(x, "loading", value.asInstanceOf[js.Any])
@scala.inline
def setLoadingProps(value: RecursivePartial[js.UndefOr[ActivityIndicatorProperties]]): Self = StObject.set(x, "loadingProps", value.asInstanceOf[js.Any])
@scala.inline
def setLoadingPropsUndefined: Self = StObject.set(x, "loadingProps", js.undefined)
@scala.inline
def setLoadingStyle(value: RecursivePartial[js.UndefOr[StyleProp[ViewStyle]]]): Self = StObject.set(x, "loadingStyle", value.asInstanceOf[js.Any])
@scala.inline
def setLoadingStyleUndefined: Self = StObject.set(x, "loadingStyle", js.undefined)
@scala.inline
def setLoadingUndefined: Self = StObject.set(x, "loading", js.undefined)
@scala.inline
def setOnAccessibilityAction(
value: RecursivePartial[
js.UndefOr[js.Function1[SyntheticEvent[NodeHandle, ReadonlyactionNamestring], Unit]]
]
): Self = StObject.set(x, "onAccessibilityAction", value.asInstanceOf[js.Any])
@scala.inline
def setOnAccessibilityActionUndefined: Self = StObject.set(x, "onAccessibilityAction", js.undefined)
@scala.inline
def setOnAccessibilityEscape(value: RecursivePartial[js.UndefOr[js.Function0[Unit]]]): Self = StObject.set(x, "onAccessibilityEscape", value.asInstanceOf[js.Any])
@scala.inline
def setOnAccessibilityEscapeUndefined: Self = StObject.set(x, "onAccessibilityEscape", js.undefined)
@scala.inline
def setOnAccessibilityTap(value: RecursivePartial[js.UndefOr[js.Function0[Unit]]]): Self = StObject.set(x, "onAccessibilityTap", value.asInstanceOf[js.Any])
@scala.inline
def setOnAccessibilityTapUndefined: Self = StObject.set(x, "onAccessibilityTap", js.undefined)
@scala.inline
def setOnBlur(value: RecursivePartial[js.UndefOr[js.Function1[SyntheticEvent[NodeHandle, TargetedEvent], Unit]]]): Self = StObject.set(x, "onBlur", value.asInstanceOf[js.Any])
@scala.inline
def setOnBlurUndefined: Self = StObject.set(x, "onBlur", js.undefined)
@scala.inline
def setOnFocus(value: RecursivePartial[js.UndefOr[js.Function1[SyntheticEvent[NodeHandle, TargetedEvent], Unit]]]): Self = StObject.set(x, "onFocus", value.asInstanceOf[js.Any])
@scala.inline
def setOnFocusUndefined: Self = StObject.set(x, "onFocus", js.undefined)
@scala.inline
def setOnLayout(value: RecursivePartial[js.UndefOr[js.Function1[SyntheticEvent[NodeHandle, Layout], Unit]]]): Self = StObject.set(x, "onLayout", value.asInstanceOf[js.Any])
@scala.inline
def setOnLayoutUndefined: Self = StObject.set(x, "onLayout", js.undefined)
@scala.inline
def setOnLongPress(
value: RecursivePartial[js.UndefOr[js.Function1[SyntheticEvent[NodeHandle, NativeTouchEvent], Unit]]]
): Self = StObject.set(x, "onLongPress", value.asInstanceOf[js.Any])
@scala.inline
def setOnLongPressUndefined: Self = StObject.set(x, "onLongPress", js.undefined)
@scala.inline
def setOnMagicTap(value: RecursivePartial[js.UndefOr[js.Function0[Unit]]]): Self = StObject.set(x, "onMagicTap", value.asInstanceOf[js.Any])
@scala.inline
def setOnMagicTapUndefined: Self = StObject.set(x, "onMagicTap", js.undefined)
@scala.inline
def setOnPress(
value: RecursivePartial[js.UndefOr[js.Function1[SyntheticEvent[NodeHandle, NativeTouchEvent], Unit]]]
): Self = StObject.set(x, "onPress", value.asInstanceOf[js.Any])
@scala.inline
def setOnPressIn(
value: RecursivePartial[js.UndefOr[js.Function1[SyntheticEvent[NodeHandle, NativeTouchEvent], Unit]]]
): Self = StObject.set(x, "onPressIn", value.asInstanceOf[js.Any])
@scala.inline
def setOnPressInUndefined: Self = StObject.set(x, "onPressIn", js.undefined)
@scala.inline
def setOnPressOut(
value: RecursivePartial[js.UndefOr[js.Function1[SyntheticEvent[NodeHandle, NativeTouchEvent], Unit]]]
): Self = StObject.set(x, "onPressOut", value.asInstanceOf[js.Any])
@scala.inline
def setOnPressOutUndefined: Self = StObject.set(x, "onPressOut", js.undefined)
@scala.inline
def setOnPressUndefined: Self = StObject.set(x, "onPress", js.undefined)
@scala.inline
def setPressRetentionOffset(value: RecursivePartial[js.UndefOr[Insets]]): Self = StObject.set(x, "pressRetentionOffset", value.asInstanceOf[js.Any])
@scala.inline
def setPressRetentionOffsetUndefined: Self = StObject.set(x, "pressRetentionOffset", js.undefined)
@scala.inline
def setRaised(value: RecursivePartial[js.UndefOr[Boolean]]): Self = StObject.set(x, "raised", value.asInstanceOf[js.Any])
@scala.inline
def setRaisedUndefined: Self = StObject.set(x, "raised", js.undefined)
@scala.inline
def setStyle(value: RecursivePartial[js.UndefOr[StyleProp[ViewStyle]]]): Self = StObject.set(x, "style", value.asInstanceOf[js.Any])
@scala.inline
def setStyleUndefined: Self = StObject.set(x, "style", js.undefined)
@scala.inline
def setTestID(value: RecursivePartial[js.UndefOr[String]]): Self = StObject.set(x, "testID", value.asInstanceOf[js.Any])
@scala.inline
def setTestIDUndefined: Self = StObject.set(x, "testID", js.undefined)
@scala.inline
def setTitle(value: RecursivePartial[js.UndefOr[String]]): Self = StObject.set(x, "title", value.asInstanceOf[js.Any])
@scala.inline
def setTitleProps(value: RecursivePartial[js.UndefOr[TextProperties]]): Self = StObject.set(x, "titleProps", value.asInstanceOf[js.Any])
@scala.inline
def setTitlePropsUndefined: Self = StObject.set(x, "titleProps", js.undefined)
@scala.inline
def setTitleStyle(value: RecursivePartial[js.UndefOr[StyleProp[TextStyle]]]): Self = StObject.set(x, "titleStyle", value.asInstanceOf[js.Any])
@scala.inline
def setTitleStyleUndefined: Self = StObject.set(x, "titleStyle", js.undefined)
@scala.inline
def setTitleUndefined: Self = StObject.set(x, "title", js.undefined)
@scala.inline
def setTouchSoundDisabled(value: RecursivePartial[js.UndefOr[Boolean | Null]]): Self = StObject.set(x, "touchSoundDisabled", value.asInstanceOf[js.Any])
@scala.inline
def setTouchSoundDisabledUndefined: Self = StObject.set(x, "touchSoundDisabled", js.undefined)
@scala.inline
def setTouchableComponent(value: RecursivePartial[js.UndefOr[ReactComponentClass[js.Object]]]): Self = StObject.set(x, "TouchableComponent", value.asInstanceOf[js.Any])
@scala.inline
def setTouchableComponentUndefined: Self = StObject.set(x, "TouchableComponent", js.undefined)
@scala.inline
def setTvParallaxProperties(value: RecursivePartial[js.UndefOr[TVParallaxProperties]]): Self = StObject.set(x, "tvParallaxProperties", value.asInstanceOf[js.Any])
@scala.inline
def setTvParallaxPropertiesUndefined: Self = StObject.set(x, "tvParallaxProperties", js.undefined)
@scala.inline
def setType(value: RecursivePartial[js.UndefOr[solid | clear | outline]]): Self = StObject.set(x, "type", value.asInstanceOf[js.Any])
@scala.inline
def setTypeUndefined: Self = StObject.set(x, "type", js.undefined)
@scala.inline
def setUseForeground(value: RecursivePartial[js.UndefOr[Boolean]]): Self = StObject.set(x, "useForeground", value.asInstanceOf[js.Any])
@scala.inline
def setUseForegroundUndefined: Self = StObject.set(x, "useForeground", js.undefined)
@scala.inline
def setViewComponent(value: RecursivePartial[js.UndefOr[ReactComponentClass[_]]]): Self = StObject.set(x, "ViewComponent", value.asInstanceOf[js.Any])
@scala.inline
def setViewComponentUndefined: Self = StObject.set(x, "ViewComponent", js.undefined)
}
}
|
caffebaby/_leetcode | src/_leetcode/IQ_test/_2029/Solution.java | package _leetcode.IQ_test._2029;
/**
* @Description: 2029. 石子游戏 IX
* Alice 和 Bob 再次设计了一款新的石子游戏。现有一行 n 个石子,每个石子都有一个关联的数字表示它的价值。给你一个整数数组 stones ,其中 stones[i] 是第 i 个石子的价值。
* Alice 和 Bob 轮流进行自己的回合,Alice 先手。每一回合,玩家需要从 stones 中移除任一石子。
* 如果玩家移除石子后,导致 所有已移除石子 的价值 总和 可以被 3 整除,那么该玩家就 输掉游戏 。
* 如果不满足上一条,且移除后没有任何剩余的石子,那么 Bob 将会直接获胜(即便是在 Alice 的回合)。
* 假设两位玩家均采用 最佳 决策。如果 Alice 获胜,返回 true ;如果 Bob 获胜,返回 false 。
* @Date: 2021/11/11
*/
public class Solution {
/**
* 1.不考虑0的情况时所取的序列有两种情况112121212....和221212121....0可以插入在任意非首位的位置
* 2.当s[0]为偶数时,若1或2个数为零,要么石头能取完,要么111Alice会拿到3的倍数也会输;
* 3.若均不为零时,Alice取少的一个则必赢,相等时取任一个均能赢
* 4.当s[0]为奇数时,次序能颠倒,故当1和2个数相差大于2时,取多的一个则在颠倒次序后alice必赢,否则就输
*/
public boolean stoneGameIX(int[] stones) {
int[] c = new int[3];
for (int stone : stones) c[stone % 3]++;
if (c[0] % 2 == 0) return c[1] > 0 && c[2] > 0;
return c[1] - 2 > c[2] || c[2] - 2 > c[1];
}
}
|
JackEasons/esl | test/moduleconf/cat.js | define( ['module'], function ( module ) {
return {
name: 'moduleconf/cat',
check: function () {
return module.config().desc === 'cat';
}
};
} ); |
KASTKING/KASTKING-CLOUD1 | kastking-salesforecast-server/src/main/java/com/kastking/salesforcastInfo/mapper/SalesforcastInfoMapper.java | package com.kastking.salesforcastInfo.mapper;
import com.kastking.salesforcastInfo.domain.SalesforcastDemanda;
import com.kastking.salesforcastInfo.domain.SalesforcastInfo;
import org.apache.ibatis.annotations.Param;
import java.util.Date;
import java.util.List;
/**
* 销量预计信息Mapper接口
*
* @author Michael
* @date 2020-02-14
*/
public interface SalesforcastInfoMapper {
/***
* 审核
* @param anti 是否反审核
* @param salesforcastInfoId
* @param person
* @return
*/
int review(@Param("anti") int anti, @Param("salesforcastInfoId") Integer salesforcastInfoId, @Param("person") String person);
/**
* 销量预计销售额报表
*
* @return
*/
List<SalesforcastDemanda> listSalesforcastSales(SalesforcastDemanda salesforcastDemanda);
/**
* 销量预计需求量报表
*
* @return
*/
List<SalesforcastDemanda> listSalesforcastDemand(SalesforcastDemanda salesforcastDemanda);
/**
* 生成销量预计
*
* @param period
* @return
*/
public int generateSalesEstimates(Date period);
/**
* 查询销量预计信息
*
* @param salesforcastInfoId 销量预计信息ID
* @return 销量预计信息
*/
public SalesforcastInfo selectSalesforcastInfoById(Long salesforcastInfoId);
/**
* 查询销量预计信息列表
*
* @param salesforcastInfo 销量预计信息
* @return 销量预计信息集合
*/
public List<SalesforcastInfo> selectSalesforcastInfoList(SalesforcastInfo salesforcastInfo);
/**
* 新增销量预计信息
*
* @param salesforcastInfo 销量预计信息
* @return 结果
*/
public int insertSalesforcastInfo(SalesforcastInfo salesforcastInfo);
/**
* 修改销量预计信息
*
* @param salesforcastInfo 销量预计信息
* @return 结果
*/
public int updateSalesforcastInfo(SalesforcastInfo salesforcastInfo);
/**
* 删除销量预计信息
*
* @param salesforcastInfoId 销量预计信息ID
* @return 结果
*/
public int deleteSalesforcastInfoById(Long salesforcastInfoId);
/**
* 批量删除销量预计信息
*
* @param salesforcastInfoIds 需要删除的数据ID
* @return 结果
*/
public int deleteSalesforcastInfoByIds(String[] salesforcastInfoIds);
}
|
katka-h/google-ads-java | google-ads/src/main/java/com/google/ads/googleads/v2/resources/CampaignCriterionSimulationProto.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v2/resources/campaign_criterion_simulation.proto
package com.google.ads.googleads.v2.resources;
public final class CampaignCriterionSimulationProto {
private CampaignCriterionSimulationProto() {}
public static void registerAllExtensions(
com.google.protobuf.ExtensionRegistryLite registry) {
}
public static void registerAllExtensions(
com.google.protobuf.ExtensionRegistry registry) {
registerAllExtensions(
(com.google.protobuf.ExtensionRegistryLite) registry);
}
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_ads_googleads_v2_resources_CampaignCriterionSimulation_descriptor;
static final
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_ads_googleads_v2_resources_CampaignCriterionSimulation_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
}
private static com.google.protobuf.Descriptors.FileDescriptor
descriptor;
static {
java.lang.String[] descriptorData = {
"\nEgoogle/ads/googleads/v2/resources/camp" +
"aign_criterion_simulation.proto\022!google." +
"ads.googleads.v2.resources\032/google/ads/g" +
"oogleads/v2/common/simulation.proto\032Bgoo" +
"gle/ads/googleads/v2/enums/simulation_mo" +
"dification_method.proto\0323google/ads/goog" +
"leads/v2/enums/simulation_type.proto\032\037go" +
"ogle/api/field_behavior.proto\032\031google/ap" +
"i/resource.proto\032\036google/protobuf/wrappe" +
"rs.proto\032\034google/api/annotations.proto\"\250" +
"\006\n\033CampaignCriterionSimulation\022S\n\rresour" +
"ce_name\030\001 \001(\tB<\340A\003\372A6\n4googleads.googlea" +
"pis.com/CampaignCriterionSimulation\0225\n\013c" +
"ampaign_id\030\002 \001(\0132\033.google.protobuf.Int64" +
"ValueB\003\340A\003\0226\n\014criterion_id\030\003 \001(\0132\033.googl" +
"e.protobuf.Int64ValueB\003\340A\003\022S\n\004type\030\004 \001(\016" +
"2@.google.ads.googleads.v2.enums.Simulat" +
"ionTypeEnum.SimulationTypeB\003\340A\003\022~\n\023modif" +
"ication_method\030\005 \001(\0162\\.google.ads.google" +
"ads.v2.enums.SimulationModificationMetho" +
"dEnum.SimulationModificationMethodB\003\340A\003\022" +
"5\n\nstart_date\030\006 \001(\0132\034.google.protobuf.St" +
"ringValueB\003\340A\003\0223\n\010end_date\030\007 \001(\0132\034.googl" +
"e.protobuf.StringValueB\003\340A\003\022f\n\027bid_modif" +
"ier_point_list\030\010 \001(\0132>.google.ads.google" +
"ads.v2.common.BidModifierSimulationPoint" +
"ListB\003\340A\003H\000:\215\001\352A\211\001\n4googleads.googleapis" +
".com/CampaignCriterionSimulation\022Qcustom" +
"ers/{customer}/campaignCriterionSimulati" +
"ons/{campaign_criterion_simulation}B\014\n\np" +
"oint_listB\215\002\n%com.google.ads.googleads.v" +
"2.resourcesB CampaignCriterionSimulation" +
"ProtoP\001ZJgoogle.golang.org/genproto/goog" +
"leapis/ads/googleads/v2/resources;resour" +
"ces\242\002\003GAA\252\002!Google.Ads.GoogleAds.V2.Reso" +
"urces\312\002!Google\\Ads\\GoogleAds\\V2\\Resource" +
"s\352\002%Google::Ads::GoogleAds::V2::Resource" +
"sb\006proto3"
};
descriptor = com.google.protobuf.Descriptors.FileDescriptor
.internalBuildGeneratedFileFrom(descriptorData,
new com.google.protobuf.Descriptors.FileDescriptor[] {
com.google.ads.googleads.v2.common.SimulationProto.getDescriptor(),
com.google.ads.googleads.v2.enums.SimulationModificationMethodProto.getDescriptor(),
com.google.ads.googleads.v2.enums.SimulationTypeProto.getDescriptor(),
com.google.api.FieldBehaviorProto.getDescriptor(),
com.google.api.ResourceProto.getDescriptor(),
com.google.protobuf.WrappersProto.getDescriptor(),
com.google.api.AnnotationsProto.getDescriptor(),
});
internal_static_google_ads_googleads_v2_resources_CampaignCriterionSimulation_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_google_ads_googleads_v2_resources_CampaignCriterionSimulation_fieldAccessorTable = new
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_ads_googleads_v2_resources_CampaignCriterionSimulation_descriptor,
new java.lang.String[] { "ResourceName", "CampaignId", "CriterionId", "Type", "ModificationMethod", "StartDate", "EndDate", "BidModifierPointList", "PointList", });
com.google.protobuf.ExtensionRegistry registry =
com.google.protobuf.ExtensionRegistry.newInstance();
registry.add(com.google.api.FieldBehaviorProto.fieldBehavior);
registry.add(com.google.api.ResourceProto.resource);
registry.add(com.google.api.ResourceProto.resourceReference);
com.google.protobuf.Descriptors.FileDescriptor
.internalUpdateFileDescriptor(descriptor, registry);
com.google.ads.googleads.v2.common.SimulationProto.getDescriptor();
com.google.ads.googleads.v2.enums.SimulationModificationMethodProto.getDescriptor();
com.google.ads.googleads.v2.enums.SimulationTypeProto.getDescriptor();
com.google.api.FieldBehaviorProto.getDescriptor();
com.google.api.ResourceProto.getDescriptor();
com.google.protobuf.WrappersProto.getDescriptor();
com.google.api.AnnotationsProto.getDescriptor();
}
// @@protoc_insertion_point(outer_class_scope)
}
|
SenseException/instantsearch.js | src/components/Pagination/__tests__/Pagination-test.js | <gh_stars>1-10
import React from 'react';
import sinon from 'sinon';
import { RawPagination as Pagination } from '../Pagination';
import Paginator from '../../../connectors/pagination/Paginator';
import renderer from 'react-test-renderer';
describe('Pagination', () => {
const pager = new Paginator({
currentPage: 0,
total: 20,
padding: 3,
});
const defaultProps = {
cssClasses: {
root: 'root',
item: 'item',
page: 'page',
previous: 'previous',
next: 'next',
first: 'first',
last: 'last',
active: 'active',
disabled: 'disabled',
},
createURL: (...args) => JSON.stringify(args),
labels: { first: '', last: '', next: '', previous: '' },
currentPage: 0,
nbHits: 200,
pages: pager.pages(),
isFirstPage: pager.isFirstPage(),
isLastPage: pager.isLastPage(),
nbPages: 20,
padding: 3,
setCurrentPage: () => {},
};
it('should render five elements', () => {
const tree = renderer.create(<Pagination {...defaultProps} />).toJSON();
expect(tree).toMatchSnapshot();
});
it('should display the first/last link', () => {
const tree = renderer
.create(<Pagination {...defaultProps} showFirstLast />)
.toJSON();
expect(tree).toMatchSnapshot();
});
it('should disable last page if already on it', () => {
const tree = renderer
.create(
<Pagination
{...defaultProps}
showFirstLast
pages={[13, 14, 15, 16, 17, 18, 19]}
currentPage={19}
isFirstPage={false}
isLastPage={true}
/>
)
.toJSON();
expect(tree).toMatchSnapshot();
});
it('should handle special clicks', () => {
const props = {
setCurrentPage: sinon.spy(),
};
const preventDefault = sinon.spy();
const component = new Pagination(props);
['ctrlKey', 'shiftKey', 'altKey', 'metaKey'].forEach(e => {
const event = { preventDefault };
event[e] = true;
component.handleClick(42, event);
expect(props.setCurrentPage.called).toBe(
false,
'setCurrentPage never called'
);
expect(preventDefault.called).toBe(false, 'preventDefault never called');
});
component.handleClick(42, { preventDefault });
expect(props.setCurrentPage.calledOnce).toBe(
true,
'setCurrentPage called once'
);
expect(preventDefault.calledOnce).toBe(true, 'preventDefault called once');
});
it('should have all buttons disabled if there are no results', () => {
const tree = renderer
.create(
<Pagination
{...defaultProps}
showFirstLast
currentPage={0}
nbHits={0}
nbPages={0}
pages={[0]}
/>
)
.toJSON();
expect(tree).toMatchSnapshot();
});
});
|
Kermit95/Android_Media | aacencoder/src/main/java/com/todoroo/aacenc/Main.java | <reponame>Kermit95/Android_Media<filename>aacencoder/src/main/java/com/todoroo/aacenc/Main.java
package com.todoroo.aacenc;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import android.app.Activity;
import android.app.ProgressDialog;
import android.content.DialogInterface;
import android.content.DialogInterface.OnCancelListener;
import android.content.Intent;
import android.media.MediaPlayer;
import android.os.Bundle;
import android.speech.RecognitionListener;
import android.speech.RecognizerIntent;
import android.speech.SpeechRecognizer;
import android.util.Log;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.TextView;
import android.widget.Toast;
public class Main extends Activity implements RecognitionListener {
private String AAC_FILE;
private String M4A_FILE = "/sdcard/audio.m4a";
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
File dir = getFilesDir();
AAC_FILE = dir.toString() + "/audio.aac";
findViewById(R.id.write).setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
write();
}
});
findViewById(R.id.play).setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
play();
}
});
sr = SpeechRecognizer.createSpeechRecognizer(this);
}
private void play() {
MediaPlayer mediaPlayer = new MediaPlayer();
try {
mediaPlayer.setDataSource(M4A_FILE);
mediaPlayer.prepare();
mediaPlayer.start();
} catch (IllegalArgumentException e) {
throw new RuntimeException(e);
} catch (IllegalStateException e) {
throw new RuntimeException(e);
} catch (IOException e) {
throw new RuntimeException(e);
}
Toast.makeText(Main.this, "Playing Audio", Toast.LENGTH_LONG).show();
}
private AACEncoder encoder = new AACEncoder();
private long speechStarted = 0;
private SpeechRecognizer sr;
private ProgressDialog pd;
private void write() {
sr.setRecognitionListener(this);
Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL,
RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
intent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE, "com.domain.app");
speechStarted = 0;
baos.reset();
pd = new ProgressDialog(this);
pd.setMessage("Speak now...");
pd.setIndeterminate(true);
pd.setCancelable(true);
pd.setOnCancelListener(new OnCancelListener() {
@Override
public void onCancel(DialogInterface dialog) {
sr.cancel();
onEndOfSpeech();
}
});
pd.show();
sr.startListening(intent);
speechStarted = System.currentTimeMillis();
}
@Override
public void onBeginningOfSpeech() {
System.err.println("beginning");
}
ByteArrayOutputStream baos = new ByteArrayOutputStream();
@Override
public void onBufferReceived(byte[] buffer) {
if(speechStarted > 0) {
try {
baos.write(buffer);
} catch (IOException e) {
//
}
}
}
@Override
protected void onStop() {
super.onStop();
sr.destroy();
}
@Override
public void onEndOfSpeech() {
pd.dismiss();
if(speechStarted == 0)
return;
long delta = System.currentTimeMillis() - speechStarted;
int sampleRate = (int) (baos.size() * 1000 / delta);
sampleRate = 8000; // THIS IS A MAGIC NUMBER@?!!?!?!
// can i has calculate?
System.err.println("computed sample rate: " + sampleRate);
encoder.init(64000, 1, sampleRate, 16, AAC_FILE);
encoder.encode(baos.toByteArray());
System.err.println("end");
encoder.uninit();
try {
new AACToM4A().convert(this, AAC_FILE, M4A_FILE);
Toast.makeText(Main.this, "File Saved!", Toast.LENGTH_LONG).show();
} catch (IOException e) {
Toast.makeText(Main.this, "Error :(", Toast.LENGTH_LONG).show();
Log.e("ERROR", "error converting", e);
}
}
@Override
public void onError(int error) {
Log.w("Speech Error", "Error code: " + error);
}
@Override
public void onEvent(int arg0, Bundle arg1) {
//
}
@Override
public void onPartialResults(Bundle partialResults) {
onResults(partialResults);
}
@Override
public void onReadyForSpeech(Bundle arg0) {
}
@Override
public void onResults(Bundle results) {
ArrayList<String> strings = results.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION);
((TextView)findViewById(R.id.text)).setText(
strings.size() == 0 ? "" : strings.get(0));
}
@Override
public void onRmsChanged(float arg0) {
}
} |
hungptit/ioutils | benchmark/read_data.cpp | <reponame>hungptit/ioutils
#include <fstream>
#include <iostream>
#include "boost_memmap.hpp"
#include "celero/Celero.h"
#include "fmt/format.h"
#include "ioutils.hpp"
#include <iostream>
// #include "utils/memchr.hpp"
#include "experiments.hpp"
#include "linestats.hpp"
namespace test {
constexpr char EOL = '\n';
template <typename Container> Container read_iostream(const std::string &afile) {
std::ifstream t(afile);
Container str;
t.seekg(0, std::ios::end);
str.reserve(t.tellg());
t.seekg(0, std::ios::beg);
str.assign((std::istreambuf_iterator<char>(t)), std::istreambuf_iterator<char>());
return str;
}
size_t iostream_linestats(const std::string &afile) {
std::ifstream t(afile);
size_t lines = 0;
std::for_each(std::istreambuf_iterator<char>(t), std::istreambuf_iterator<char>(), [&lines](auto const item) {
if (item == EOL) ++lines;
});
return lines;
}
size_t memmap_linestats(const std::string &afile) {
boost::iostreams::mapped_file mmap(afile, boost::iostreams::mapped_file::readonly);
auto begin = mmap.const_data();
auto end = begin + mmap.size();
size_t lines;
std::for_each(begin, end, [&lines](auto const item) {
if (item == EOL) ++lines;
});
return lines;
}
} // namespace test
const int number_of_samples = 20;
const int number_of_operations = 2;
const std::string afile("3200.txt");
CELERO_MAIN
BASELINE(read, iostream, number_of_samples, number_of_operations) {
celero::DoNotOptimizeAway(test::read_iostream<std::string>(afile));
}
BENCHMARK(read, boost_memmap, number_of_samples, number_of_operations) {
celero::DoNotOptimizeAway(ioutils::read_memmap<std::string>(afile));
}
BENCHMARK(read, read_2_10, number_of_samples, number_of_operations) {
using Reader = ioutils::StreamReader<ioutils::AppendPolicy<std::string>, 1 << 10>;
Reader reader;
reader(afile.c_str());
}
BENCHMARK(read, read_2_11, number_of_samples, number_of_operations) {
using Reader = ioutils::StreamReader<ioutils::AppendPolicy<std::string>, 1 << 11>;
Reader reader;
reader(afile.c_str());
}
BENCHMARK(read, read_2_12, number_of_samples, number_of_operations) {
using Reader = ioutils::StreamReader<ioutils::AppendPolicy<std::string>, 1 << 12>;
Reader reader;
reader(afile.c_str());
}
BENCHMARK(read, read_2_13, number_of_samples, number_of_operations) {
using Reader = ioutils::StreamReader<ioutils::AppendPolicy<std::string>, 1 << 13>;
Reader reader;
reader(afile.c_str());
}
BENCHMARK(read, read_2_14, number_of_samples, number_of_operations) {
using Reader = ioutils::StreamReader<ioutils::AppendPolicy<std::string>, 1 << 14>;
Reader reader;
reader(afile.c_str());
}
BENCHMARK(read, read_2_15, number_of_samples, number_of_operations) {
using Reader = ioutils::StreamReader<ioutils::AppendPolicy<std::string>, 1 << 15>;
Reader reader;
reader(afile.c_str());
}
BENCHMARK(read, read_2_16, number_of_samples, number_of_operations) {
using Reader = ioutils::StreamReader<ioutils::AppendPolicy<std::string>, 1 << 16>;
Reader reader;
reader(afile.c_str());
}
BENCHMARK(read, read_2_17, number_of_samples, number_of_operations) {
using Reader = ioutils::StreamReader<ioutils::AppendPolicy<std::string>, 1 << 17>;
Reader reader;
reader(afile.c_str());
}
BENCHMARK(read, read_2_18, number_of_samples, number_of_operations) {
using Reader = ioutils::StreamReader<ioutils::AppendPolicy<std::string>, 1 << 18>;
Reader reader;
reader(afile.c_str());
}
BENCHMARK(read, read_2_19, number_of_samples, number_of_operations) {
using Reader = ioutils::StreamReader<ioutils::AppendPolicy<std::string>, 1 << 19>;
Reader reader;
reader(afile.c_str());
}
BENCHMARK(read, read_2_20, number_of_samples, number_of_operations) {
using Reader = ioutils::StreamReader<ioutils::AppendPolicy<std::string>, 1 << 20>;
Reader reader;
reader(afile.c_str());
}
BENCHMARK(read, read_2_17_deque, number_of_samples, number_of_operations) {
using Reader = ioutils::StreamReader<ioutils::AppendPolicy<std::deque<std::string>>, 1 << 17>;
Reader reader;
reader(afile.c_str());
}
BENCHMARK(read, read_2_17_vector, number_of_samples, number_of_operations) {
using Reader = ioutils::StreamReader<ioutils::AppendPolicy<std::vector<std::string>>, 1 << 17>;
Reader reader;
reader(afile.c_str());
}
// Read and process data benchmark
BASELINE(linestats, iostream_linestats, number_of_samples, number_of_operations) {
celero::DoNotOptimizeAway(test::iostream_linestats(afile));
// std::cout << test::iostream_linestats(afile) << "\n";
}
BENCHMARK(linestats, memmap_linestats, number_of_samples, number_of_operations) {
celero::DoNotOptimizeAway(test::memmap_linestats(afile));
// std::cout << test::memmap_linestats(afile) << "\n";
}
using LineStatsStd = typename ioutils::experiments::LineStats_std<ioutils::experiments::LineStatsBase>;
using LineStats = typename ioutils::experiments::LineStats<ioutils::experiments::LineStatsBase>;
BENCHMARK(linestats, linestats_2_12, number_of_samples, number_of_operations) {
using FastLineStats = ioutils::StreamReader<LineStatsStd, 1 << 12>;
FastLineStats linestats;
linestats(afile.c_str());
}
BENCHMARK(linestats, linestats_2_13, number_of_samples, number_of_operations) {
using FastLineStats = ioutils::StreamReader<LineStats, 1 << 13>;
FastLineStats linestats;
linestats(afile.c_str());
}
BENCHMARK(linestats, linestats_2_14, number_of_samples, number_of_operations) {
using FastLineStats = ioutils::StreamReader<LineStatsStd, 1 << 14>;
FastLineStats linestats;
linestats(afile.c_str());
}
BENCHMARK(linestats, linestats_2_15, number_of_samples, number_of_operations) {
using FastLineStats = ioutils::StreamReader<LineStatsStd, 1 << 15>;
FastLineStats linestats;
linestats(afile.c_str());
}
BENCHMARK(linestats, linestats_2_16, number_of_samples, number_of_operations) {
using FastLineStats = ioutils::StreamReader<LineStatsStd, 1 << 16>;
FastLineStats linestats;
linestats(afile.c_str());
}
BENCHMARK(linestats, linestats_2_17, number_of_samples, number_of_operations) {
using FastLineStats = ioutils::StreamReader<LineStatsStd, 1 << 17>;
FastLineStats linestats;
linestats(afile.c_str());
}
BENCHMARK(linestats, linestats_2_18, number_of_samples, number_of_operations) {
using FastLineStats = ioutils::StreamReader<LineStatsStd, 1 << 18>;
FastLineStats linestats;
linestats(afile.c_str());
}
BENCHMARK(linestats, linestats_2_19, number_of_samples, number_of_operations) {
using FastLineStats = ioutils::StreamReader<LineStatsStd, 1 << 19>;
FastLineStats linestats;
linestats(afile.c_str());
}
BENCHMARK(linestats, linestats_2_20, number_of_samples, number_of_operations) {
using FastLineStats = ioutils::StreamReader<LineStatsStd, 1 << 20>;
FastLineStats linestats;
linestats(afile.c_str());
}
BENCHMARK(linestats, memchr, number_of_samples, number_of_operations) {
using Reader = ioutils::StreamReader<LineStats, 1 << 17>;
Reader linestats;
linestats(afile.c_str());
}
BENCHMARK(linestats, filestats, number_of_samples, number_of_operations) {
using Reader = ioutils::StreamReader<ioutils::FileStats, 1 << 17>;
Reader linestats;
linestats(afile.c_str());
}
|
atish3/mig-website | mig_main/migrations/0014_auto_20160103_1812.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('mig_main', '0013_auto_20150927_1121'),
]
operations = [
migrations.AddField(
model_name='officerposition',
name='is_elected',
field=models.BooleanField(default=True),
preserve_default=True,
),
migrations.AddField(
model_name='officerposition',
name='term_length',
field=models.CharField(default=b'S', max_length=1, choices=[(b'S', b'Semester'), (b'A', b'Academic Year'), (b'C', b'Calendar year')]),
preserve_default=True,
),
]
|
christophe-rannou/ovh-api-services | src/api/cloud/price/cloud-price.service.js | <gh_stars>1-10
// This file is deprecated
angular.module('ovh-api-services').service('OvhApiCloudPrice', ($injector) => ({
v6() {
return $injector.get('OvhApiCloudPriceV6');
},
}));
|
ximingxing/Close-Look-Java | Code/1345-Jump-Game-IV/src/Solution.java | <reponame>ximingxing/Close-Look-Java<filename>Code/1345-Jump-Game-IV/src/Solution.java
import java.util.*;
/**
* Description: 跳跃游戏 IV (特点:隐式图, 边长为1;每次仅跳跃一步)
* Solution: BFS
* 跳跃到相同的值时,仅第一次到达有意义
* Created By xingximing.xxm
*/
public class Solution {
public int minJumps(int[] arr) {
final int n = arr.length;
// 使用map存储arr之中的元素以及其对应的下标,由于可能存在相同元素,所以下标使用list存储
HashMap<Integer, List<Integer>> all = new HashMap<>();
// steps表示从起点跳跃到当前位置的最小步数(同时标记走过的位置)
int[] steps = new int[n];
// 初始化 map 和 steps 数组
for (int i = 0; i < n; i++) {
List<Integer> temp = all.getOrDefault(arr[i], new ArrayList<>());
temp.add(i);
all.put(arr[i], temp);
steps[i] = -1; // 表示从未到达过下标i的位置
}
// BFS
// 队列用于存储跳跃过程中经过元素的下标
Queue<Integer> queue = new LinkedList<>();
queue.offer(0);
// 从起点下标0开始,一直遍历到最后一个未被访问过的位置
for (steps[0] = 0; steps[n - 1] < 0; ) {
int idx = queue.poll();
// 向左跳跃
if (idx > 0 && steps[idx - 1] < 0) { // `step[idx - 1] < 0`表示没有经过 idx - 1
steps[idx - 1] = steps[idx] + 1;
queue.offer(idx - 1);
}
// 向右跳跃
if (idx + 1 < n && steps[idx + 1] < 0) {
steps[idx + 1] = steps[idx] + 1;
queue.offer(idx + 1);
}
// 同值跳跃
if (all.containsKey(arr[idx])) {
for (int same : all.getOrDefault(arr[idx], new ArrayList<>())) {
if (steps[same] < 0) {
steps[same] = steps[idx] + 1; // 注意:这里仅跳跃一步
queue.offer(same);
}
}
// 跳跃到相同的值时(除起点外),仅第一次到达有意义,因为后续到达肯定会走更多的步数
all.remove(arr[idx]);
}
}
return steps[n - 1];
}
public static void main(String[] args) {
// int[] arr = new int[]{100, -23, -23, 404, 100, 23, 23, 23, 3, 404};
// int[] arr = new int[]{7};
int[] arr = new int[]{6, 1, 9};
System.out.println(new Solution().minJumps(arr));
}
}
|
mfkiwl/embox | project/pjsip/cmds/sip-nuklear/nuklear_rawfb_with_file_font.h | #ifndef NK_RAWFB_FILE_FONT_H_
#define NK_RAWFB_FILE_FONT_H_
/*
* It's analog of nk_rawfb_init() from nuklear_rawfb.h,
* but with support of external font.
*/
struct nk_font *rawfb_fonts[RAWFB_FONT_COUNT];
NK_API struct rawfb_context*
nk_rawfb_init_with_file_font(void *fb, void *tex_mem, const unsigned int w, const unsigned int h,
const unsigned int pitch, const rawfb_pl pl, const char *font_file,
float font_size[]) {
const void *tex;
struct rawfb_context *rawfb;
struct nk_font_config cfg = nk_font_config(0);
int i;
rawfb = malloc(sizeof(struct rawfb_context));
if (!rawfb) {
return NULL;
}
if (font_size[0] == 0) {
perror("No fonts passed\n");
return NULL;
}
NK_MEMSET(rawfb, 0, sizeof(struct rawfb_context));
rawfb->font_tex.pixels = tex_mem;
rawfb->font_tex.format = NK_FONT_ATLAS_ALPHA8;
rawfb->font_tex.w = rawfb->font_tex.h = 0;
rawfb->fb.pixels = fb;
rawfb->fb.w= w;
rawfb->fb.h = h;
rawfb->fb.pl = pl;
if (pl == PIXEL_LAYOUT_RGBX_8888 || pl == PIXEL_LAYOUT_XRGB_8888) {
rawfb->fb.format = NK_FONT_ATLAS_RGBA32;
rawfb->fb.pitch = pitch;
} else {
perror("nk_rawfb_init(): Unsupported pixel layout.\n");
free(rawfb);
return NULL;
}
nk_font_atlas_init_default(&rawfb->atlas);
nk_font_atlas_begin(&rawfb->atlas);
for (i = 0; i < RAWFB_FONT_COUNT; i++) {
if (!font_size[i]) {
break;
}
rawfb_fonts[i] = nk_font_atlas_add_from_file(&rawfb->atlas, font_file, font_size[i], &cfg);
}
tex = nk_font_atlas_bake(&rawfb->atlas, &rawfb->font_tex.w, &rawfb->font_tex.h, rawfb->font_tex.format);
if (!tex) {
free(rawfb);
return NULL;
}
if (0 == nk_init_default(&rawfb->ctx, &rawfb_fonts[0]->handle)) {
free(rawfb);
return NULL;
}
switch(rawfb->font_tex.format) {
case NK_FONT_ATLAS_ALPHA8:
rawfb->font_tex.pitch = rawfb->font_tex.w * 1;
break;
case NK_FONT_ATLAS_RGBA32:
rawfb->font_tex.pitch = rawfb->font_tex.w * 4;
break;
};
/* Store the font texture in tex scratch memory */
memcpy(rawfb->font_tex.pixels, tex, rawfb->font_tex.pitch * rawfb->font_tex.h);
nk_font_atlas_end(&rawfb->atlas, nk_handle_ptr(NULL), NULL);
if (rawfb->atlas.default_font)
nk_style_set_font(&rawfb->ctx, &rawfb->atlas.default_font->handle);
nk_style_load_all_cursors(&rawfb->ctx, rawfb->atlas.cursors);
nk_rawfb_scissor(rawfb, 0, 0, rawfb->fb.w, rawfb->fb.h);
return rawfb;
}
#endif /* NK_RAWFB_FILE_FONT_H_ */
|
GodIsWord/NewFindSecret | FindSecret/Classes/Native/protobufU2Dnet_ProtoBuf_Serializers_NetObjectSeria2339978381.h | <gh_stars>0
#pragma once
#include "il2cpp-config.h"
#ifndef _MSC_VER
# include <alloca.h>
#else
# include <malloc.h>
#endif
#include <stdint.h>
#include "mscorlib_System_Object3080106164.h"
#include "protobufU2Dnet_ProtoBuf_BclHelpers_NetObjectOption1875464664.h"
// System.Type
struct Type_t;
#ifdef __clang__
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Winvalid-offsetof"
#pragma clang diagnostic ignored "-Wunused-variable"
#endif
// ProtoBuf.Serializers.NetObjectSerializer
struct NetObjectSerializer_t2339978381 : public Il2CppObject
{
public:
// System.Int32 ProtoBuf.Serializers.NetObjectSerializer::key
int32_t ___key_0;
// System.Type ProtoBuf.Serializers.NetObjectSerializer::type
Type_t * ___type_1;
// ProtoBuf.BclHelpers/NetObjectOptions ProtoBuf.Serializers.NetObjectSerializer::options
uint8_t ___options_2;
public:
inline static int32_t get_offset_of_key_0() { return static_cast<int32_t>(offsetof(NetObjectSerializer_t2339978381, ___key_0)); }
inline int32_t get_key_0() const { return ___key_0; }
inline int32_t* get_address_of_key_0() { return &___key_0; }
inline void set_key_0(int32_t value)
{
___key_0 = value;
}
inline static int32_t get_offset_of_type_1() { return static_cast<int32_t>(offsetof(NetObjectSerializer_t2339978381, ___type_1)); }
inline Type_t * get_type_1() const { return ___type_1; }
inline Type_t ** get_address_of_type_1() { return &___type_1; }
inline void set_type_1(Type_t * value)
{
___type_1 = value;
Il2CppCodeGenWriteBarrier(&___type_1, value);
}
inline static int32_t get_offset_of_options_2() { return static_cast<int32_t>(offsetof(NetObjectSerializer_t2339978381, ___options_2)); }
inline uint8_t get_options_2() const { return ___options_2; }
inline uint8_t* get_address_of_options_2() { return &___options_2; }
inline void set_options_2(uint8_t value)
{
___options_2 = value;
}
};
#ifdef __clang__
#pragma clang diagnostic pop
#endif
|
avarun42/micronaut-data | data-mongodb/src/test/java/io/micronaut/data/document/mongodb/repositories/MongoZoneRepository.java | <filename>data-mongodb/src/test/java/io/micronaut/data/document/mongodb/repositories/MongoZoneRepository.java<gh_stars>100-1000
package io.micronaut.data.document.mongodb.repositories;
import io.micronaut.data.mongodb.annotation.MongoRepository;
import io.micronaut.data.document.tck.repositories.ZoneRepository;
@MongoRepository
public interface MongoZoneRepository extends ZoneRepository {
}
|
rmake/cor-engine | libraries/scripts/gen_task/mruby_interface.rb | <reponame>rmake/cor-engine
MrubyBindingGen.name "cor_mruby"
MrubyBindingGen.namespace "mruby_interface"
MrubyBindingGen.output_path "../cor_mruby_interface/sources/basic_bind"
MrubyBindingGen.add_cor_lib_list([
'cor_type',
'cor_data_structure',
'cor_algorithm',
'cor_system',
'cor_mruby_interface',
])
load "gen_info/mruby_interface.rb"
|
EnGinners/TechReborn | src/main/java/techreborn/api/fluidreplicator/FluidReplicatorRecipeCrafter.java | /*
* This file is part of TechReborn, licensed under the MIT License (MIT).
*
* Copyright (c) 2018 TechReborn
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package techreborn.api.fluidreplicator;
import net.minecraft.item.ItemStack;
import net.minecraft.tileentity.TileEntity;
import net.minecraftforge.fluids.Fluid;
import net.minecraftforge.fluids.FluidStack;
import reborncore.common.recipes.RecipeCrafter;
import reborncore.common.util.Inventory;
import reborncore.common.util.Tank;
import techreborn.api.Reference;
import techreborn.init.ModItems;
import techreborn.tiles.multiblock.TileFluidReplicator;
/**
* @author drcrazy
*
*/
public class FluidReplicatorRecipeCrafter extends RecipeCrafter {
public FluidReplicatorRecipe currentRecipe;
int ticksSinceLastChange;
/**
* RecipeCrafter for Fluid Replicator
*
* @param parentTile TileEntity Reference to the tile having this crafter
* @param inventory Inventory reference to inventory used for crafting
* @param inputSlots This is the list of the slots that the crafting logic should look for the input UU-Matter.
* @param outputSlots This is the list of slots that the crafting logic should look for output fluid
*/
public FluidReplicatorRecipeCrafter(TileEntity parentTile, Inventory inventory, int[] inputSlots, int[] outputSlots) {
super(Reference.FLUID_REPLICATOR_RECIPE, parentTile, 1, 1, inventory, inputSlots, outputSlots);
}
/**
* FluidReplicatorRecipe version of hasAllInputs
*/
private boolean hasAllInputs(FluidReplicatorRecipe recipe) {
if (recipe == null) {
return false;
}
ItemStack inputStack = inventory.getStackInSlot(inputSlots[0]);
if (!inputStack.isItemEqual(new ItemStack(ModItems.UU_MATTER))) {
return false;
}
if (inputStack.getCount() < recipe.getInput()) {
return false;
}
return true;
}
private boolean canFit(Fluid fluid, Tank tank) {
if (tank.fill(new FluidStack(fluid, Fluid.BUCKET_VOLUME), false) != Fluid.BUCKET_VOLUME) {
return false;
}
return true;
}
public void setCurrentRecipe(FluidReplicatorRecipe recipe) {
try {
this.currentRecipe = (FluidReplicatorRecipe) recipe.clone();
} catch (CloneNotSupportedException e) {
e.printStackTrace();
}
}
// RecipeCrafter
@Override
public void updateEntity() {
if (parentTile.getWorld().isRemote) {
return;
}
ticksSinceLastChange++;
// Force a has changed every second
if (ticksSinceLastChange >= 20) {
setInvDirty(true);
ticksSinceLastChange = 0;
}
// It will now look for new recipes.
if (currentRecipe == null && isInvDirty()) {
updateCurrentRecipe();
}
if(currentRecipe != null) {
// If it doesn't have all the inputs reset
if (isInvDirty() && !hasAllInputs()) {
currentRecipe = null;
currentTickTime = 0;
setIsActive();
}
// If it has reached the recipe tick time
if (currentRecipe != null && currentTickTime >= currentNeededTicks && hasAllInputs()) {
TileFluidReplicator tileFluidReplicator = (TileFluidReplicator) parentTile;
// Checks to see if it can fit the output
// And fill tank with replicated fluid
if (canFit(currentRecipe.getFluid(), tileFluidReplicator.tank) && currentRecipe.onCraft(tileFluidReplicator)) {
tileFluidReplicator.tank.fill(new FluidStack(currentRecipe.getFluid(), Fluid.BUCKET_VOLUME), true);
// This uses all the inputs
useAllInputs();
// Reset
currentRecipe = null;
currentTickTime = 0;
updateCurrentRecipe();
//Update active state if the tile isnt going to start crafting again
if(currentRecipe == null){
setIsActive();
}
}
} else if (currentRecipe != null && currentTickTime < currentNeededTicks) {
// This uses the power
if (energy.canUseEnergy(getEuPerTick(currentRecipe.getEuTick()))) {
energy.useEnergy(getEuPerTick(currentRecipe.getEuTick()));
// Increase the ticktime
currentTickTime ++;
if(currentTickTime == 1 || currentTickTime % 20 == 0 && soundHanlder != null){
soundHanlder.playSound(false, parentTile);
}
}
}
}
setInvDirty(false);
}
@Override
public void updateCurrentRecipe() {
TileFluidReplicator tileFluidReplicator = (TileFluidReplicator) parentTile;
for (FluidReplicatorRecipe recipe : FluidReplicatorRecipeList.recipes) {
if (recipe.canCraft(tileFluidReplicator) && hasAllInputs(recipe)) {
if (!canFit(recipe.getFluid(), tileFluidReplicator.tank)) {
this.currentRecipe = null;
currentTickTime = 0;
setIsActive();
return;
}
setCurrentRecipe(recipe);
currentNeededTicks = Math.max((int) (currentRecipe.getTickTime()* (1.0 - getSpeedMultiplier())), 1);
currentTickTime = 0;
setIsActive();
return;
}
}
}
@Override
public boolean hasAllInputs() {
if (this.currentRecipe == null) {
return false;
}
else {
return hasAllInputs(this.currentRecipe);
}
}
@Override
public void useAllInputs() {
if (currentRecipe == null) {
return;
}
if (hasAllInputs(currentRecipe)) {
inventory.decrStackSize(inputSlots[0], currentRecipe.getInput());
}
}
@Override
public boolean canCraftAgain() {
TileFluidReplicator tileFluidReplicator = (TileFluidReplicator) parentTile;
for (FluidReplicatorRecipe recipe : FluidReplicatorRecipeList.recipes) {
if (recipe.canCraft(tileFluidReplicator) && hasAllInputs(recipe)) {
if (!canFit(recipe.getFluid(), tileFluidReplicator.tank)) {
return false;
}
if (energy.getEnergy() < recipe.getEuTick()) {
return false;
}
return true;
}
}
return false;
}
} |
maevagrondin/max_externals | source/matrix/jit.split/jit.split.c | <reponame>maevagrondin/max_externals<filename>source/matrix/jit.split/jit.split.c
/*
jit.split
Copyright 2001-2005 - Cycling '74
<NAME> <EMAIL>
*/
#include "jit.common.h"
#include "ext_strings.h"
typedef struct _jit_split
{
t_object ob;
char splitdim;
long splitpoint;
char autoclear;
} t_jit_split;
void *_jit_split_class;
t_jit_split *jit_split_new(void);
void jit_split_free(t_jit_split *x);
t_jit_err jit_split_matrix_calc(t_jit_split *x, void *inputs, void *outputs);
t_jit_err jit_split_init(void);
t_jit_err jit_split_calc_out_matrix(t_jit_split *x, t_jit_matrix_info *in_minfo, void *in_matrix,
t_jit_matrix_info *out2_minfo, void *out2_matrix, t_jit_matrix_info *out_minfo, void *out_matrix);
t_jit_err jit_split_init(void)
{
long attrflags=0;
t_jit_object *attr;
t_jit_object *mop;
_jit_split_class = jit_class_new("jit_split",(method)jit_split_new,(method)jit_split_free,
sizeof(t_jit_split),0L);
//add mop
mop = jit_object_new(_jit_sym_jit_mop,1,2);
jit_mop_output_nolink(mop,1);
jit_mop_output_nolink(mop,2);
jit_class_addadornment(_jit_split_class,mop);
//add methods
jit_class_addmethod(_jit_split_class, (method)jit_split_matrix_calc, "matrix_calc", A_CANT, 0L);
//add attributes
attrflags = JIT_ATTR_GET_DEFER_LOW | JIT_ATTR_SET_USURP_LOW;
attr = jit_object_new(_jit_sym_jit_attr_offset,"splitdim",_jit_sym_char,attrflags,
(method)0L,(method)0L,calcoffset(t_jit_split,splitdim));
jit_class_addattr(_jit_split_class,attr);
object_addattr_parse(attr,"label",_jit_sym_symbol,0,"\"Split Dim\"");
attr = jit_object_new(_jit_sym_jit_attr_offset,"autoclear",_jit_sym_char,attrflags,
(method)0L,(method)0L,calcoffset(t_jit_split,autoclear));
jit_class_addattr(_jit_split_class,attr);
object_addattr_parse(attr,"label",_jit_sym_symbol,0,"\"Auto Clear\"");
attr = jit_object_new(_jit_sym_jit_attr_offset,"splitpoint",_jit_sym_long,attrflags,
(method)0L,(method)0L,calcoffset(t_jit_split,splitpoint));
jit_class_addattr(_jit_split_class,attr);
object_addattr_parse(attr,"label",_jit_sym_symbol,0,"\"Split Point\"");
//add methods
jit_class_register(_jit_split_class);
return JIT_ERR_NONE;
}
t_jit_err jit_split_matrix_calc(t_jit_split *x, void *inputs, void *outputs)
{
t_jit_err err=JIT_ERR_NONE;
t_jit_matrix_info in_minfo,out_minfo,out2_minfo;
long i;
t_matrix_conv_info conv, conv2;
char splitdim = CLAMP(x->splitdim,0,JIT_MATRIX_MAX_DIMCOUNT);
long splitpoint = x->splitpoint;
t_jit_matrix_info *a_minfo,*b_minfo;
void *a_matrix,*b_matrix;
void *in_matrix, *out_matrix, *out2_matrix;
in_matrix = jit_object_method(inputs,_jit_sym_getindex,0);
out_matrix = jit_object_method(outputs,_jit_sym_getindex,0);
out2_matrix = jit_object_method(outputs,_jit_sym_getindex,1);
if (x && in_matrix && out_matrix && out2_matrix) {
jit_object_method(in_matrix,_jit_sym_getinfo,&in_minfo);
jit_object_method(out_matrix,_jit_sym_getinfo,&out_minfo);
jit_object_method(out2_matrix,_jit_sym_getinfo,&out2_minfo);
if (jit_split_calc_out_matrix(x, &in_minfo, in_matrix, &out_minfo, out_matrix, &out2_minfo, out2_matrix)) {
err=JIT_ERR_GENERIC;
goto out;
}
//double check
//compatible types?
if ((in_minfo.type != out2_minfo.type) && (in_minfo.type != out_minfo.type)) {
err=JIT_ERR_MISMATCH_TYPE;
goto out;
}
//allow any planes, will simply wrap. i think that's okay - jkc
memset(&conv,0,sizeof(t_matrix_conv_info));
memset(&conv2,0,sizeof(t_matrix_conv_info));
for (i=0; i<JIT_MATRIX_MAX_PLANECOUNT; i++) {
conv.planemap[i] = i;
conv2.planemap[i] = i;
}
conv.flags = conv2.flags = JIT_MATRIX_CONVERT_SRCDIM | JIT_MATRIX_CONVERT_DSTDIM;
// just for ease and later expandability
a_minfo = &out_minfo;
a_matrix = out_matrix;
b_minfo = &out2_minfo;
b_matrix = out2_matrix;
for (i = 0; i < in_minfo.dimcount; i++) {
if (i == splitdim) {
conv.srcdimstart[i] = 0;
conv.srcdimend[i] = CLAMP(splitpoint - 1, 0, in_minfo.dim[i] - 1);
conv.dstdimstart[i] = 0;
conv.dstdimend[i] = a_minfo->dim[i] - 1;
conv2.srcdimstart[i] = CLAMP(splitpoint, 0, in_minfo.dim[i] - 1);
conv2.srcdimend[i] = in_minfo.dim[i] - 1;
conv2.dstdimstart[i] = 0;
conv2.dstdimend[i] = b_minfo->dim[i] - 1;
}
else {
conv.srcdimstart[i] = conv2.srcdimstart[i] = 0;
conv.srcdimend[i] = conv2.srcdimend[i] = in_minfo.dim[i] - 1;
conv.dstdimstart[i] = conv2.dstdimstart[i] = 0;
conv.dstdimend[i] = conv2.dstdimend[i] = in_minfo.dim[i] - 1;
}
}
jit_object_method(out_matrix, _jit_sym_frommatrix, in_matrix, &conv);
jit_object_method(out2_matrix, _jit_sym_frommatrix, in_matrix, &conv2);
}
else return JIT_ERR_INVALID_PTR;
out:
return err;
}
t_jit_err jit_split_calc_out_matrix(t_jit_split *x, t_jit_matrix_info *in_minfo, void *in_matrix,
t_jit_matrix_info *out_minfo, void *out_matrix, t_jit_matrix_info *out2_minfo, void *out2_matrix)
{
t_jit_matrix_info temp_minfo, temp2_minfo;
long i,dimcount;
char splitdim = CLAMP(x->splitdim,0,JIT_MATRIX_MAX_DIMCOUNT);
long splitpoint = x->splitpoint;
jit_object_method(out_matrix, _jit_sym_getinfo, &temp_minfo);
jit_object_method(out2_matrix, _jit_sym_getinfo, &temp2_minfo);
// later handle splitdim -1(planes)
out_minfo->planecount = out2_minfo->planecount = in_minfo->planecount;
out_minfo->type = out2_minfo->type = in_minfo->type;
out_minfo->dimcount = out2_minfo->dimcount = in_minfo->dimcount;
for (i = 0; i < in_minfo->dimcount; i++) {
if (i==splitdim) {
out_minfo->dim[i] = CLAMP(splitpoint, 0, in_minfo->dim[i]);
out2_minfo->dim[i] = CLAMP(in_minfo->dim[i] - splitpoint, 0, in_minfo->dim[i]);
} else {
out_minfo->dim[i] = out2_minfo->dim[i] = in_minfo->dim[i];
}
}
// check and change if necessary
if ((out_minfo->type != temp_minfo.type) || (out2_minfo->type != temp2_minfo.type)) {
if (jit_object_method(out_matrix, _jit_sym_setinfo, out_minfo) || jit_object_method(out2_matrix, _jit_sym_setinfo, out2_minfo))
goto err;
goto out;
}
if ((out_minfo->planecount != temp_minfo.planecount) || (out2_minfo->planecount != temp2_minfo.planecount)) {
if (jit_object_method(out_matrix, _jit_sym_setinfo, out_minfo) || jit_object_method(out2_matrix, _jit_sym_setinfo, out2_minfo))
goto err;
goto out;
}
if ((out_minfo->dimcount != temp_minfo.dimcount) || (out2_minfo->dimcount != temp2_minfo.dimcount)) {
if (jit_object_method(out_matrix, _jit_sym_setinfo, out_minfo) || jit_object_method(out2_matrix, _jit_sym_setinfo, out2_minfo))
goto err;
goto out;
}
for (i = 0; i < in_minfo->dimcount; i++) {
if ((out_minfo->dim[i] != temp_minfo.dim[i]) || (out2_minfo->dim[i] != temp2_minfo.dim[i])) {
if (jit_object_method(out_matrix, _jit_sym_setinfo, out_minfo) || jit_object_method(out2_matrix, _jit_sym_setinfo, out2_minfo))
goto err;
goto out;
}
}
out:
if (x->autoclear) {
jit_object_method(out_matrix, _jit_sym_clear);
jit_object_method(out2_matrix, _jit_sym_clear);
}
return 0;
err:
return 1;
}
t_jit_split *jit_split_new(void)
{
t_jit_split *x;
t_jit_matrix_info info;
long i;
if (x=(t_jit_split *)jit_object_alloc(_jit_split_class)) {
x->splitdim = 0;
x->splitpoint = 1;
x->autoclear = 1;
} else {
x = NULL;
}
return x;
}
void jit_split_free(t_jit_split *x)
{
//nada
}
|
planttheidea/inline-loops.macro | __tests__/__fixtures__/inlined-arrow-return/flatMapRight/code.js | import { flatMapRight } from '../../../../src/inline-loops.macro';
const flattened = flatMapRight(array, entry => {
return [entry[0]];
});
|
arjayW/auda-sa | html/class_o_d_task.js | var class_o_d_task =
[
[ "eODNone", "class_o_d_task.html#ad002643e170a8fbb283faa9da45c9a47a7650c98bb4a17c4201653e2cb41a86d8", null ],
[ "eODFLAC", "class_o_d_task.html#ad002643e170a8fbb283faa9da45c9a47affeff343487da8ae57703491f67161c9", null ],
[ "eODMP3", "class_o_d_task.html#ad002643e170a8fbb283faa9da45c9a47a52ad3a931a1b6ce3f08548b4befb30d8", null ],
[ "eODFFMPEG", "class_o_d_task.html#ad002643e170a8fbb283faa9da45c9a47ad88bca25826172c0537150b614462c0c", null ],
[ "eODPCMSummary", "class_o_d_task.html#ad002643e170a8fbb283faa9da45c9a47a36e0f57063d422bebdf739f3cd6a7acc", null ],
[ "eODOTHER", "class_o_d_task.html#ad002643e170a8fbb283faa9da45c9a47a70f494ea98b133cab297ee84953ce6a4", null ],
[ "ODTask", "class_o_d_task.html#a515b35bc13701a3565b8555ad22b7af2", null ],
[ "~ODTask", "class_o_d_task.html#a07e86f3711923ada39da0c20ad0aa606", null ],
[ "AddWaveTrack", "class_o_d_task.html#a3438e8f3f97a2ad2954913b23beaa0a6", null ],
[ "CalculatePercentComplete", "class_o_d_task.html#abbe5b9fc9d3dd4420876ac1364980ca2", null ],
[ "CanMergeWith", "class_o_d_task.html#a491d4a2787367322422f78433dd33e8e", null ],
[ "Clone", "class_o_d_task.html#a76bed17dad8a34f83fcde04c557c9345", null ],
[ "ComputeNextWorkUntilPercentageComplete", "class_o_d_task.html#a092be4b67467f5b5e717861bbc0b5e89", null ],
[ "DemandTrackUpdate", "class_o_d_task.html#a6dfeaa8283bb419c3584dd77f642d0d2", null ],
[ "DoAll", "class_o_d_task.html#a5beda32403ff5ab98820a76426a9168e", null ],
[ "DoSome", "class_o_d_task.html#af144a3997d1d3c9a26ee4d5fc7a36e3d", null ],
[ "DoSomeInternal", "class_o_d_task.html#a45888cf5d09536d7e600f5c6afeb65f1", null ],
[ "GetDemandSample", "class_o_d_task.html#aa0119161504026e46a215b82d4990595", null ],
[ "GetNeedsODUpdate", "class_o_d_task.html#ab2451ca624aa10332d5faeaaae994fa9", null ],
[ "GetNumWaveTracks", "class_o_d_task.html#afaadfdc39a36023fdf2d434240a71f92", null ],
[ "GetODType", "class_o_d_task.html#a1bbfd8894e7aae490eb41887596c8b31", null ],
[ "GetTaskName", "class_o_d_task.html#af7c7ff0cadb3ac71bf582d74731204af", null ],
[ "GetTaskNumber", "class_o_d_task.html#a66343a5bc3e8482459ef2419abe5157a", null ],
[ "GetTip", "class_o_d_task.html#ac3f49c9c387de5c5cad4b5457c3fe7f8", null ],
[ "GetWaveTrack", "class_o_d_task.html#a33106da71ce2e3e6c2124c38e211efd9", null ],
[ "IsComplete", "class_o_d_task.html#a8753fd6fa8de7ca45b8ba4cf88cd75d5", null ],
[ "IsRunning", "class_o_d_task.html#a42704b882937363771e1b9924e0f6c7a", null ],
[ "IsTaskAssociatedWithProject", "class_o_d_task.html#a0abf3d0247adbf354be5d207d0800cec", null ],
[ "ODUpdate", "class_o_d_task.html#a441c889eed01e475ca55b8a6e6d8e100", null ],
[ "PercentComplete", "class_o_d_task.html#a7b714f63d612ef0e18db8afcccd2223a", null ],
[ "RecalculatePercentComplete", "class_o_d_task.html#a5c6c1265c7f3abe2de27c77806cb6819", null ],
[ "ReplaceWaveTrack", "class_o_d_task.html#a008a48995f3a72c563e3a9c1a698a7d0", null ],
[ "ResetNeedsODUpdate", "class_o_d_task.html#a2dc57dc72cc1c8e057cee5d471d217e5", null ],
[ "SetDemandSample", "class_o_d_task.html#aee81f26145371152b579f16c90af78cb", null ],
[ "SetIsRunning", "class_o_d_task.html#ac018f7c48b1eb6a096dba290b9c0c7b2", null ],
[ "SetNeedsODUpdate", "class_o_d_task.html#a70cfd63300b8bcdbe42e224046f86932", null ],
[ "StopUsingWaveTrack", "class_o_d_task.html#a3ac4c171329ece3638adc06285e34b5c", null ],
[ "Terminate", "class_o_d_task.html#a80d76840ecaa7efb4b5f42aadd84884c", null ],
[ "TerminateAndBlock", "class_o_d_task.html#a5aced75c29548ca75bd363b223d02216", null ],
[ "Update", "class_o_d_task.html#a6348e90d6b64fd775ca020e69705d7aa", null ],
[ "UsesCustomWorkUntilPercentage", "class_o_d_task.html#a7a2b1b0c6d3d264424ebb543813a2c69", null ],
[ "mBlockUntilTerminateMutex", "class_o_d_task.html#a645d01cc507dfa49c3f7032753812bb2", null ],
[ "mDemandSample", "class_o_d_task.html#a2aea33f92a4eacdfbf88eab5b941a78a", null ],
[ "mDemandSampleMutex", "class_o_d_task.html#a24ad4b2b8c1b2d647200b86bc9aabf50", null ],
[ "mDoingTask", "class_o_d_task.html#ad90a5f35f8ec6c50357c9328781ee542", null ],
[ "mIsRunning", "class_o_d_task.html#a01b2cdf425a94cc4c1cb450a40e19f34", null ],
[ "mIsRunningMutex", "class_o_d_task.html#a56765162fb9180721fddf08e8993e9ec", null ],
[ "mPercentComplete", "class_o_d_task.html#a81842f8c6d99d72ce3cd215c113a261d", null ],
[ "mPercentCompleteMutex", "class_o_d_task.html#add5ff78b44107292569dfdeeb90c8118", null ],
[ "mTaskNumber", "class_o_d_task.html#aac8bea3092cb5afa53b6f529b54afb26", null ],
[ "mTaskStarted", "class_o_d_task.html#a0f4099666c884667eebb9433bcc3d6c7", null ],
[ "mTerminate", "class_o_d_task.html#a38dd3cdea39254e59c505f5bfc9127e5", null ],
[ "mTerminateMutex", "class_o_d_task.html#aa534f7033d7c3fc06a55f24aac50482a", null ],
[ "mWaveTrackMutex", "class_o_d_task.html#ae51c8c57331a0caea29ac0e26a295685", null ],
[ "mWaveTracks", "class_o_d_task.html#aba0bd65cc43443f5eb27bc5278334bf2", null ],
[ "ODTypeEnum", "class_o_d_task.html#a7b197c55a6ffa250f2804e182c0f826e", null ]
]; |
sbrow/wash | api/rql/internal/primary/meta/valueSchema_test.go | package meta
import (
"encoding/json"
"fmt"
"io/ioutil"
"path"
"testing"
"github.com/puppetlabs/wash/plugin"
"github.com/stretchr/testify/suite"
)
type ValueSchemaTestSuite struct {
suite.Suite
}
func (suite *ValueSchemaTestSuite) TestNewSchema() {
var rawSchema *plugin.JSONSchema
// TODO: Once https://github.com/alecthomas/jsonschema/issues/40
// is (properly) resolved, we should dynamically generate the
// schema from a struct so maintainers can see what our mock looks
// like. Right now, the (hacky) fix in our jsonschema fork generates
// duplicate definitions for anonymous structs (and this behavior's
// unpredictable), so we store the JSON in a fixture. Note that
// it still generates the right schema, there's just some redundancy
// in the generated schema.
readSchemaFixture(suite.Suite, "before_munging", &rawSchema)
var expected map[string]interface{}
readSchemaFixture(suite.Suite, "after_munging", &expected)
schema := NewValueSchema(rawSchema)
actualBytes, err := json.Marshal(schema.loader.JsonSource())
if err != nil {
suite.FailNow("Failed to marshal the munged JSON schema: %v", err)
}
var actual map[string]interface{}
if err := json.Unmarshal(actualBytes, &actual); err != nil {
suite.FailNow("Failed to unmarshal the munged JSON schema: %v", err)
}
suite.Equal(expected, actual)
}
func (s *ValueSchemaTestSuite) TestSupports() {
var rawSchema *plugin.JSONSchema
readSchemaFixture(s.Suite, "before_munging", &rawSchema)
schema := NewValueSchema(rawSchema)
// Test valid value schemas
svs := (NewSatisfyingValueSchema()).
AddObject("dp").
AddObject("dcp").
AddObject("dcap").
EndsWithPrimitiveValue()
s.True(schema.Supports(svs))
svs = (NewSatisfyingValueSchema()).
AddObject("cp").
EndsWithArray()
s.True(schema.Supports(svs))
svs = (NewSatisfyingValueSchema()).
AddObject("dp").
EndsWithAnything()
s.True(schema.Supports(svs))
// Now test invalid value schemas
// "DP" is the invalid value here with the invalid property
// "Foo"
svs = (NewSatisfyingValueSchema()).
AddObject("dp").
AddObject("foo").
EndsWithPrimitiveValue()
s.False(schema.Supports(svs))
// "AP" is a primitive type, so its value must be "null".
// Here, however, it is an object.
svs = (NewSatisfyingValueSchema()).
AddObject("ap").
EndsWithObject()
s.False(schema.Supports(svs))
// "DDP" is not a valid property of "DCP"
svs = (NewSatisfyingValueSchema()).
AddObject("dp").
AddObject("dcp").
AddObject("ddp").
EndsWithAnything()
s.False(schema.Supports(svs))
}
func TestValueSchema(t *testing.T) {
suite.Run(t, new(ValueSchemaTestSuite))
}
// v is the value that the schema-fixture will be marshaled into.
// We keep it generic in case v is a map[string]interface{} object
// instead of a *plugin.JSONSchema value
func readSchemaFixture(s suite.Suite, name string, v interface{}) {
filePath := path.Join("testdata", name+".json")
rawSchema, err := ioutil.ReadFile(filePath)
if err != nil {
s.T().Fatal(fmt.Sprintf("Failed to read %v", filePath))
}
if err := json.Unmarshal(rawSchema, v); err != nil {
s.T().Fatal(fmt.Sprintf("Failed to unmarshal %v: %v", filePath, err))
}
}
|
leoeco2000/TestJava8 | src/test/java/innerClass/Outer_this.java | <filename>src/test/java/innerClass/Outer_this.java<gh_stars>0
package test.java.innerClass;
class OuterClass {
private int testId = 1;
public OuterClass() {
System.out.println(testId);
}
class InnerClass {
private int myTestId = 0;
public InnerClass() {
this.myTestId = OuterClass.this.testId + 1;
System.out.println(myTestId);
}
}
}
public class Outer_this {
public static void main(String[] args) {
OuterClass out = new OuterClass();
OuterClass.InnerClass in = out.new InnerClass();
}
}
|
ParkinWu/leetcode | python/offer/62.py | # 0,1,,n-1这n个数字排成一个圆圈,从数字0开始,每次从这个圆圈里删除第m个数字。求出这个圆圈里剩下的最后一个数字。
#
# 例如,0、1、2、3、4这5个数字组成一个圆圈,从数字0开始每次删除第3个数字,则删除的前4个数字依次是2、0、4、1,因此最后剩下的数字是3。
#
#
#
# 示例 1:
#
# 输入: n = 5, m = 3
# 输出: 3
# 示例 2:
#
# 输入: n = 10, m = 17
# 输出: 2
#
#
# 限制:
#
# 1 <= n <= 10^5
# 1 <= m <= 10^6
#
# 来源:力扣(LeetCode)
# 链接:https://leetcode-cn.com/problems/yuan-quan-zhong-zui-hou-sheng-xia-de-shu-zi-lcof
# 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
class Solution:
def lastRemaining(self, n: int, m: int) -> int:
res = 0
for i in range(2, n + 1):
res = (res + m) % i
return res |
hailongz/golang | features/vcode/vcode/table.go | <gh_stars>0
package vcode
import (
"crypto/md5"
"encoding/hex"
"fmt"
"math/rand"
"time"
"github.com/hailongz/golang/micro"
)
func init() {
rand.Seed(time.Now().UnixNano())
}
func NewCode(app micro.IContext, length int) string {
v := fmt.Sprintf("%d", rand.Int())
for len(v) < length {
v = fmt.Sprintf("%s%d", v, rand.Int())
}
return v[0:length]
}
func Hash(code string) string {
m := md5.New()
m.Write([]byte(fmt.Sprintf("*&^YTGBNM<L:P1kedmfsf,%s", code)))
return hex.EncodeToString(m.Sum(nil))
}
|
medismailben/llvm-project | clang/test/CodeGen/sret.c | <gh_stars>1000+
// RUN: %clang_cc1 %s -emit-llvm -o - | grep sret | grep -v 'sret.c' | count 4
struct abc {
long a;
long b;
long c;
long d;
long e;
};
struct abc foo1(void);
struct abc foo2();
void bar() {
struct abc dummy1 = foo1();
struct abc dummy2 = foo2();
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.