repo_name
stringlengths 6
101
| path
stringlengths 4
300
| text
stringlengths 7
1.31M
|
|---|---|---|
rjakubco/jboss-migration
|
src/main/java/org/jboss/loom/migrators/connectionFactories/jaxb/ConnectionFactoryAS5Bean.java
|
<filename>src/main/java/org/jboss/loom/migrators/connectionFactories/jaxb/ConnectionFactoryAS5Bean.java<gh_stars>0
/**
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 .
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and limitations under the License.
*/
package org.jboss.loom.migrators.connectionFactories.jaxb;
import javax.xml.bind.annotation.*;
import org.jboss.loom.spi.ann.ConfigPartDescriptor;
/**
* JAXB bean for tx-connection-factory (AS5)
*
* @author <NAME>
*/
@ConfigPartDescriptor(
name = "Connection factory ${jndiName}"
)
@XmlRootElement(name = "tx-connection-factory")
@XmlAccessorType(XmlAccessType.NONE)
@XmlType(name = "tx-connection-factory")
public class ConnectionFactoryAS5Bean extends AbstractConnectionFactoryAS5Bean {
@XmlElement(name = "local-transaction")
private String localTransaction;
@XmlElement(name = "xa-transaction")
private String xaTransaction;
@XmlElement(name = "no-tx-separate-pools")
private String noTxSeparatePools;
@XmlElement(name = "xa-resource-timeout")
private String xaResourceTimeout;
public String getXaTransaction() { return xaTransaction; }
public void setXaTransaction(String xaTransaction) { this.xaTransaction = xaTransaction; }
public String getLocalTransaction() { return localTransaction; }
public void setLocalTransaction(String localTransaction) { this.localTransaction = localTransaction; }
public String getNoTxSeparatePools() { return noTxSeparatePools; }
public void setNoTxSeparatePools(String noTxSeparatePools) { this.noTxSeparatePools = noTxSeparatePools; }
public String getXaResourceTimeout() { return xaResourceTimeout; }
public void setXaResourceTimeout(String xaResourceTimeout) { this.xaResourceTimeout = xaResourceTimeout; }
}// class
|
compiler-tree-technologies/cil
|
clang/test/cml/output_tests/c_tests/basic/compound_assign.c
|
<filename>clang/test/cml/output_tests/c_tests/basic/compound_assign.c<gh_stars>10-100
#include <stdio.h>
// RUN: %cml %s -o %t && %t | FileCheck %s
int main() {
int a = 10;
// CHECK: 10
printf("%d\n", a);
a += 10;
// CHECK: 20
printf("%d\n", a);
a -= 10;
// CHECK: 10
printf("%d\n", a);
a *= 10;
// CHECK: 100
printf("%d\n", a);
a /= 10;
// CHECK: 10
printf("%d\n", a);
a %= 10;
// CHECK: 0
printf("%d\n", a);
return 0;
}
|
GiulianaPola/select_repeats
|
venv/lib/python3.8/site-packages/charset_normalizer/utils.py
|
<reponame>GiulianaPola/select_repeats
/home/runner/.cache/pip/pool/ea/f7/dd/039f6ed15403dff7435dfd81f2f8ae5163e8f7121f2aae1bd275d7e8ca
|
Kazalev/SoftUni-ProgrammingFundamentals-JavaScript
|
Data Types and Variables/Exercise - Data Types and Variables/More Exercises/01. Digits with Words.js
|
<reponame>Kazalev/SoftUni-ProgrammingFundamentals-JavaScript<filename>Data Types and Variables/Exercise - Data Types and Variables/More Exercises/01. Digits with Words.js
function solve(number){
switch(number){
case 'one':
console.log(1);
break;
case 'two':
console.log(2);
break;
case 'three':
console.log(3);
break;
case 'four':
console.log(4);
break;
case 'five':
console.log(5);
break;
case 'six':
console.log(6);
break;
case 'seven':
console.log(7);
break;
case 'eight':
console.log(8);
break;
case 'nine':
console.log(9);
break;
case 'ten':
console.log(10);
break;
}
}
solve('four')
|
logan-wipro/splunk-ref-pas-code
|
apps/pas_ref_app/appserver/static/suspicious_activity.js
|
require([
"splunkjs/ready!",
"splunkjs/mvc/simplexml/ready!",
"splunkjs/mvc/tableview"
], function(
mvc,
ignored,
TableView
) {
var CustomIconCellRenderer = TableView.BaseCellRenderer.extend({
canRender: function(cell) {
return cell.field === 'Code'; // the color column
},
render: function($td, cell) {
$td.html('<span style="font-size: 3em; color: ' + cell.value + '">●</span>');
}
});
var tableElement = mvc.Components.getInstance('suspicious_activity_table');
tableElement.getVisualization(function(tableView) {
tableView.table.addCellRenderer(new CustomIconCellRenderer());
tableView.table.render();
});
});
|
matoruru/purescript-react-material-ui-svgicon
|
src/MaterialUI/SVGIcon/Icon/BrushSharp.js
|
<reponame>matoruru/purescript-react-material-ui-svgicon
exports.brushSharpImpl = require('@material-ui/icons/BrushSharp').default;
|
SekyuShin/C_Project
|
C_Study_Example/Part3/Part3/mymain.c
|
/* mymain.c
#include <stdio.h>
int main(void)
#include "myheader1.h"
#include "myheader2.h"
*/
|
zparnold/aws-sdk-go-v2
|
service/route53domains/api_op_TransferDomainToAnotherAwsAccount.go
|
// Code generated by smithy-go-codegen DO NOT EDIT.
package route53domains
import (
"context"
awsmiddleware "github.com/aws/aws-sdk-go-v2/aws/middleware"
"github.com/aws/aws-sdk-go-v2/aws/signer/v4"
smithy "github.com/awslabs/smithy-go"
"github.com/awslabs/smithy-go/middleware"
smithyhttp "github.com/awslabs/smithy-go/transport/http"
)
// Transfers a domain from the current AWS account to another AWS account. Note the
// following:
//
// * The AWS account that you're transferring the domain to must
// accept the transfer. If the other account doesn't accept the transfer within 3
// days, we cancel the transfer. See AcceptDomainTransferFromAnotherAwsAccount
// (https://docs.aws.amazon.com/Route53/latest/APIReference/API_domains_AcceptDomainTransferFromAnotherAwsAccount.html).
//
//
// * You can cancel the transfer before the other account accepts it. See
// CancelDomainTransferToAnotherAwsAccount
// (https://docs.aws.amazon.com/Route53/latest/APIReference/API_domains_CancelDomainTransferToAnotherAwsAccount.html).
//
//
// * The other account can reject the transfer. See
// RejectDomainTransferFromAnotherAwsAccount
// (https://docs.aws.amazon.com/Route53/latest/APIReference/API_domains_RejectDomainTransferFromAnotherAwsAccount.html).
//
//
// <important> <p>When you transfer a domain from one AWS account to another, Route
// 53 doesn't transfer the hosted zone that is associated with the domain. DNS
// resolution isn't affected if the domain and the hosted zone are owned by
// separate accounts, so transferring the hosted zone is optional. For information
// about transferring the hosted zone to another AWS account, see <a
// href="https://docs.aws.amazon.com/Route53/latest/DeveloperGuide/hosted-zones-migrating.html">Migrating
// a Hosted Zone to a Different AWS Account</a> in the <i>Amazon Route 53 Developer
// Guide</i>.</p> </important> <p>Use either <a
// href="https://docs.aws.amazon.com/Route53/latest/APIReference/API_domains_ListOperations.html">ListOperations</a>
// or <a
// href="https://docs.aws.amazon.com/Route53/latest/APIReference/API_domains_GetOperationDetail.html">GetOperationDetail</a>
// to determine whether the operation succeeded. <a
// href="https://docs.aws.amazon.com/Route53/latest/APIReference/API_domains_GetOperationDetail.html">GetOperationDetail</a>
// provides additional information, for example, <code>Domain Transfer from Aws
// Account 111122223333 has been cancelled</code>. </p>
func (c *Client) TransferDomainToAnotherAwsAccount(ctx context.Context, params *TransferDomainToAnotherAwsAccountInput, optFns ...func(*Options)) (*TransferDomainToAnotherAwsAccountOutput, error) {
stack := middleware.NewStack("TransferDomainToAnotherAwsAccount", smithyhttp.NewStackRequest)
options := c.options.Copy()
for _, fn := range optFns {
fn(&options)
}
addawsAwsjson11_serdeOpTransferDomainToAnotherAwsAccountMiddlewares(stack)
awsmiddleware.AddRequestInvocationIDMiddleware(stack)
smithyhttp.AddContentLengthMiddleware(stack)
addResolveEndpointMiddleware(stack, options)
v4.AddComputePayloadSHA256Middleware(stack)
addRetryMiddlewares(stack, options)
addHTTPSignerV4Middleware(stack, options)
awsmiddleware.AddAttemptClockSkewMiddleware(stack)
addClientUserAgent(stack)
smithyhttp.AddErrorCloseResponseBodyMiddleware(stack)
smithyhttp.AddCloseResponseBodyMiddleware(stack)
addOpTransferDomainToAnotherAwsAccountValidationMiddleware(stack)
stack.Initialize.Add(newServiceMetadataMiddleware_opTransferDomainToAnotherAwsAccount(options.Region), middleware.Before)
addRequestIDRetrieverMiddleware(stack)
addResponseErrorMiddleware(stack)
for _, fn := range options.APIOptions {
if err := fn(stack); err != nil {
return nil, err
}
}
handler := middleware.DecorateHandler(smithyhttp.NewClientHandler(options.HTTPClient), stack)
result, metadata, err := handler.Handle(ctx, params)
if err != nil {
return nil, &smithy.OperationError{
ServiceID: ServiceID,
OperationName: "TransferDomainToAnotherAwsAccount",
Err: err,
}
}
out := result.(*TransferDomainToAnotherAwsAccountOutput)
out.ResultMetadata = metadata
return out, nil
}
// The TransferDomainToAnotherAwsAccount request includes the following elements.
type TransferDomainToAnotherAwsAccountInput struct {
// The account ID of the AWS account that you want to transfer the domain to, for
// example, 111122223333.
//
// This member is required.
AccountId *string
// The name of the domain that you want to transfer from the current AWS account to
// another account.
//
// This member is required.
DomainName *string
}
// The TransferDomainToAnotherAwsAccount response includes the following elements.
type TransferDomainToAnotherAwsAccountOutput struct {
// Identifier for tracking the progress of the request. To query the operation
// status, use GetOperationDetail
// (https://docs.aws.amazon.com/Route53/latest/APIReference/API_domains_GetOperationDetail.html).
OperationId *string
// To finish transferring a domain to another AWS account, the account that the
// domain is being transferred to must submit an
// AcceptDomainTransferFromAnotherAwsAccount
// (https://docs.aws.amazon.com/Route53/latest/APIReference/API_domains_AcceptDomainTransferFromAnotherAwsAccount.html)
// request. The request must include the value of the Password element that was
// returned in the TransferDomainToAnotherAwsAccount response.
Password *string
// Metadata pertaining to the operation's result.
ResultMetadata middleware.Metadata
}
func addawsAwsjson11_serdeOpTransferDomainToAnotherAwsAccountMiddlewares(stack *middleware.Stack) {
stack.Serialize.Add(&awsAwsjson11_serializeOpTransferDomainToAnotherAwsAccount{}, middleware.After)
stack.Deserialize.Add(&awsAwsjson11_deserializeOpTransferDomainToAnotherAwsAccount{}, middleware.After)
}
func newServiceMetadataMiddleware_opTransferDomainToAnotherAwsAccount(region string) awsmiddleware.RegisterServiceMetadata {
return awsmiddleware.RegisterServiceMetadata{
Region: region,
ServiceID: ServiceID,
SigningName: "route53domains",
OperationName: "TransferDomainToAnotherAwsAccount",
}
}
|
cuipy/tijian
|
src/main/java/com/thinkgem/jeesite/modules/wshbj/web/ExaminationPackageController.java
|
<reponame>cuipy/tijian
/**
* Copyright © 2012-2016 <a href="https://github.com/thinkgem/jeesite">JeeSite</a> All rights reserved.
*/
package com.thinkgem.jeesite.modules.wshbj.web;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import com.thinkgem.jeesite.common.utils.PinyinUtils;
import com.thinkgem.jeesite.modules.sys.utils.UserUtils;
import com.thinkgem.jeesite.modules.wshbj.bean.RequestResult;
import com.thinkgem.jeesite.modules.wshbj.entity.*;
import com.thinkgem.jeesite.modules.wshbj.service.ExaminationCategoryService;
import com.thinkgem.jeesite.modules.wshbj.service.ExaminationItemService;
import com.thinkgem.jeesite.modules.wshbj.service.PackageItemService;
import org.apache.shiro.authz.annotation.RequiresPermissions;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.*;
import org.springframework.web.servlet.mvc.support.RedirectAttributes;
import com.thinkgem.jeesite.common.config.Global;
import com.thinkgem.jeesite.common.persistence.Page;
import com.thinkgem.jeesite.common.web.BaseController;
import com.thinkgem.jeesite.common.utils.StringUtils;
import com.thinkgem.jeesite.modules.wshbj.service.ExaminationPackageService;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
/**
* 体检套餐Controller
* @author zhxl
* @version 2018-03-12
*/
@Controller
@RequestMapping(value = "${adminPath}/wshbj/examinationPackage")
public class ExaminationPackageController extends BaseController {
@Autowired
private ExaminationPackageService examinationPackageService;
@Autowired
private ExaminationItemService examinationItemService;
@Autowired
private ExaminationCategoryService examinationCategoryService;
@Autowired
private PackageItemService packageItemService;
@ModelAttribute
public ExaminationPackage get(@RequestParam(required=false) String id) {
ExaminationPackage entity = null;
if (StringUtils.isNotBlank(id)){
entity = examinationPackageService.get(id);
}
if (entity == null){
entity = new ExaminationPackage();
}
return entity;
}
@RequiresPermissions("wshbj:examinationPackage:view")
@RequestMapping(value = {"list", ""})
public String list(ExaminationPackage examinationPackage, HttpServletRequest request, HttpServletResponse response, Model model) {
Page<ExaminationPackage> page = examinationPackageService.findPage(new Page<ExaminationPackage>(request, response), examinationPackage);
model.addAttribute("page", page);
return "modules/wshbj/examinationPackageList";
}
@RequiresPermissions("wshbj:examinationPackage:view")
@RequestMapping(value = "form")
public String form(ExaminationPackage examinationPackage, Model model) {
model.addAttribute("examinationPackage", examinationPackage);
// 获取体检项目列表
ExaminationItem examinationItem = new ExaminationItem();
examinationItem.setOwner(UserUtils.getUser().getCompany().getId());
examinationItem.setDelFlag("0");
List<ExaminationItem> examinationItemList = examinationItemService.findList(examinationItem);
model.addAttribute("examinationItemList", examinationItemList);
// 获取体检分类列表
ExaminationCategory examinationCategory = new ExaminationCategory();
examinationCategory.setOwner(UserUtils.getUser().getCompany().getId());
examinationCategory.setDelFlag("0");
List<ExaminationCategory> examinationCategoryList = examinationCategoryService.findList(examinationCategory);
model.addAttribute("examinationCategoryList", examinationCategoryList);
// 获取所选体检项目列表
if(!StringUtils.isEmpty(examinationPackage.getId())) {
PackageItem pi = new PackageItem();
pi.setPackageId(examinationPackage.getId());
List<PackageItem> pis = packageItemService.findList(pi);
model.addAttribute("packageItems",pis);
}
return "modules/wshbj/examinationPackageForm";
}
@RequiresPermissions("wshbj:examinationPackage:edit")
@RequestMapping(value = "save")
public String save(ExaminationPackage examinationPackage, Model model, RedirectAttributes redirectAttributes) {
if (!beanValidator(model, examinationPackage)){
return form(examinationPackage, model);
}
if(StringUtils.isEmpty(examinationPackage.getNamePinyin())){
String py=PinyinUtils.getStringPinYin(examinationPackage.getName());
examinationPackage.setNamePinyin(py);
}
if(null==examinationPackage.getOrderNumb()){
examinationPackage.setOrderNumb(100);
}
examinationPackageService.save(examinationPackage);
addMessage(redirectAttributes, "保存体检套餐成功");
return "redirect:"+Global.getAdminPath()+"/wshbj/examinationPackage/?repage";
}
@RequiresPermissions("wshbj:examinationPackage:edit")
@RequestMapping(value = "delete")
public String delete(ExaminationPackage examinationPackage, RedirectAttributes redirectAttributes) {
examinationPackageService.delete(examinationPackage);
addMessage(redirectAttributes, "删除体检套餐成功");
return "redirect:"+Global.getAdminPath()+"/wshbj/examinationPackage/?repage";
}
@RequiresPermissions("wshbj:examinationPackage:view")
@GetMapping(value = {"ajax_for_autocompleter"})
@ResponseBody
public List<Map<String,Object>> ajax_for_autocompleter(String query,Integer limit) {
String strQuery=StringUtils.forSuperLikeQuery(query);
ExaminationPackage ep=new ExaminationPackage();
ep.setLikeField(strQuery);
Page<ExaminationPackage> page=new Page<ExaminationPackage>();
page.setPageNo(1);
page.setPageSize(limit);
Page<ExaminationPackage> pp = examinationPackageService.pageLike(page,ep);
List<Map<String,Object>> lst2=new ArrayList();
for(ExaminationPackage p:pp.getList()){
lst2.add(p.getMap());
}
return lst2;
}
@RequiresPermissions("wshbj:examinationPackage:view")
@GetMapping(value = {"ajax_get_items"})
@ResponseBody
public List<PackageItem> ajax_get_items(ExaminationPackage examinationPackage) {
List<PackageItem> items = examinationPackage.getItems();
return items;
}
}
|
yaoyao1987/toy-wheel
|
toy-function/compose.js
|
<reponame>yaoyao1987/toy-wheel<filename>toy-function/compose.js
// 同步
const compose = (...funcs) => funcs.reduce((a, b) => (...args) => a(b(...args)))
// ======================================================
/**
* testing
*/
const init = (...args) => args.reduce((total, val) => total + val, 0)
const step2 = (val) => val + 2
const step3 = (val) => val + 3
const step4 = (val) => val + 4
const steps = [step4, step3, step2, init]
let composeFunc = compose(...steps)
console.log(composeFunc(1, 2, 3))
// 最后返回的是一个promise对象,我们在then方法中将最终值输出
let composeFunc = compose(...steps)
composeFunc(1, 2, 3).then((val) => {
console.log(val)
})
|
ma1uta/gene
|
src/main/java/io/github/ma1uta/matrix/gene/api/CasApi.java
|
/*
* Copyright <EMAIL>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.github.ma1uta.matrix.gene.api;
import io.github.ma1uta.matrix.gene.model.common.EmptyResponse;
import retrofit2.http.GET;
import retrofit2.http.Query;
/**
* Central Authentication Service (CAS) is a web-based single sign-on protocol.
* <br>
* An overview of the process, as used in Matrix, is as follows:
* <ul>
* <li>The Matrix client instructs the user's browser to navigate to the /login/cas/redirect endpoint on the user's homeserver.</li>
* <li>The homeserver responds with an HTTP redirect to the CAS user interface, which the browser follows.</li>
* <li>The CAS system authenticates the user.</li>
* <li>The CAS server responds to the user's browser with a redirect back to the /login/cas/ticket endpoint on the homeserver,
* which the browser follows. A 'ticket' identifier is passed as a query parameter in the redirect.</li>
* <li>The homeserver receives the ticket ID from the user's browser, and makes a request to the CAS server to validate the ticket.</li>
* <li>Having validated the ticket, the homeserver responds to the browser with a third HTTP redirect, back to the Matrix
* client application. A login token is passed as a query parameter in the redirect.</li>
* <li>The Matrix client receives the login token and passes it to the /login API.</li>
* </ul>
*/
public interface CasApi {
/**
* A web-based Matrix client should instruct the user's browser to navigate to this endpoint in order to log in via CAS.
* <br>
* The server MUST respond with an HTTP redirect to the CAS interface. The URI MUST include a service parameter giving the path
* of the /login/cas/ticket endpoint (including the redirectUrl query parameter).
* <br>
* For example, if the endpoint is called with redirectUrl=https://client.example.com/?q=p, it might redirect to
* {@code https://cas.example.com/?service=https%3A%2F%2Fserver.example.com%2F_matrix%2Fclient%2Fr0%2Flogin%2Fcas%2Fticket%3Fredirect
* Url%3Dhttps%253A%252F%252Fclient.example.com%252F%253Fq%253Dp}.
*
* @param redirectUrl Required. URI to which the user will be redirected after the homeserver has authenticated the user with CAS.
* @return A redirect to the CAS interface.
*/
@GET("/_matrix/client/r0/login/cas/redirect")
EmptyResponse redirect(@Query("redirectUrl") String redirectUrl);
/**
* Once the CAS server has authenticated the user, it will redirect the browser to this endpoint (assuming /login/cas/redirect
* gave it the correct service parameter).
* <br>
* The server MUST call /proxyValidate on the CAS server, to validate the ticket supplied by the browser.
* <br>
* If validation is successful, the server must generate a Matrix login token. It must then respond with an HTTP redirect to the
* URI given in the redirectUrl parameter, adding a loginToken query parameter giving the generated token.
* <br>
* If validation is unsuccessful, the server should respond with a 401 Unauthorized error, the body of which will be displayed
* to the user.
*
* @param redirectUrl Required. The redirectUrl originally provided by the client to /login/cas/redirect.
* @param ticket Required. CAS authentication ticket.
* @return <ul>
* <li>Status code 302: A redirect to the Matrix client.</li>
* <li>Status code 401: The server was unable to validate the CAS ticket.</li>
* </ul>
*/
@GET("/_matrix/client/r0/login/cas/ticket")
EmptyResponse ticket(@Query("redirectUrl") String redirectUrl, @Query("ticket") String ticket);
}
|
koraktor/osquery-site
|
src/components/Icon/svg/MacFinder.svg.js
|
<reponame>koraktor/osquery-site
import React, { Component } from 'react'
class MacFinder extends Component {
static defaultProps = {
fillColor: '#00125F',
height: 32,
width: 48,
}
render() {
const { className, fillColor, height, width } = this.props
return (
<svg
className={className}
width={`${width}px`}
height={`${height}px`}
viewBox={`0 0 ${width} ${height}`}
version="1.1"
xmlns="http://www.w3.org/2000/svg"
>
<title>mac-finder-svg</title>
<defs>
<filter
x="-13.3%"
y="-15.8%"
width="126.7%"
height="142.1%"
filterUnits="objectBoundingBox"
id="filter-1"
>
<feOffset dx="0" dy="6" in="SourceAlpha" result="shadowOffsetOuter1" />
<feGaussianBlur stdDeviation="7" in="shadowOffsetOuter1" result="shadowBlurOuter1" />
<feColorMatrix
values="0 0 0 0 0.125490196 0 0 0 0 0.145098039 0 0 0 0 0.196078431 0 0 0 0.16 0"
type="matrix"
in="shadowBlurOuter1"
result="shadowMatrixOuter1"
/>
<feMerge>
<feMergeNode in="shadowMatrixOuter1" />
<feMergeNode in="SourceGraphic" />
</feMerge>
</filter>
</defs>
<g id="Page-1" stroke="none" strokeWidth="1" fill="none" fillRule="evenodd">
<g
id="osquery-io-again-again"
transform="translate(-490.000000, -589.000000)"
fillRule="nonzero"
fill={fillColor}
>
<g id="carousel-w-imac" transform="translate(0.000000, 455.000000)">
<g id="frame-overlay">
<g id="Group-7">
<g id="table-examples" transform="translate(-50.000000, 131.000000)">
<g
id="Group-25"
filter="url(#filter-1)"
transform="translate(541.000000, 0.000000)"
>
<g id="Group-9" transform="translate(12.000000, 9.000000)">
<path
d="M3,3 L10.89,3 C11.46,1.91 12.13,0.88 12.93,0 L14.04,1.11 C13.61,1.7 13.23,2.34 12.89,3 L19,3 C20.1045695,3 21,3.8954305 21,5 L21,18 C21,19.1045695 20.1045695,20 19,20 L13.93,20 L14.26,21.23 L12.43,21.95 L11.93,20 L3,20 C1.8954305,20 1,19.1045695 1,18 L1,5 C1,3.8954305 1.8954305,3 3,3 L3,3 Z M3,5 L3,18 L11.54,18 C11.5,17.67 11.44,17.34 11.4,17 C11.27,17 11.13,17 11,17 C8.25,17 5.78,16.5 4.13,15.76 L5.04,14.12 C6,14.64 8.17,15 11,15 C11.08,15 11.16,15 11.24,15 C11.21,14.33 11.22,13.66 11.27,13 L8,13 C8,13 8.4,8.97 10,5 L3,5 L3,5 Z M19,18 L19,5 L12,5 C11.1,7.22 10.58,9.46 10.3,11 L13.17,11 C13,12.28 12.97,13.62 13.06,14.93 C14.87,14.8 16.25,14.5 16.96,14.12 L17.87,15.76 C16.69,16.3 15.1,16.7 13.29,16.89 C13.35,17.27 13.41,17.64 13.5,18 L19,18 L19,18 Z M5,7 L7,7 L7,10 L5,10 L5,7 L5,7 Z M15,7 L17,7 L17,10 L15,10 L15,7 Z"
id="mac-finder-svg"
/>
</g>
</g>
</g>
</g>
</g>
</g>
</g>
</g>
</svg>
)
}
}
export default MacFinder
|
tobyatgithub/MVC_ImageFilterApp_WithCrossStitchFeatures
|
src/crossstitchmvc/model/imageeffect/ImageBlurFilter3d.java
|
package crossstitchmvc.model.imageeffect;
/**
* Filter class of image blur effect.
*/
public class ImageBlurFilter3d extends AbstractFilter3d {
@Override public int[][][] filter(Image image) {
if (image == null) {
throw new IllegalArgumentException("Image can not be null.");
}
int[][] newRed = blurIt(image.getRedChannel());
int[][] newGreen = blurIt(image.getGreenChannel());
int[][] newBlue = blurIt(image.getBlueChannel());
return rgbToHWC(newRed, newGreen, newBlue);
}
/**
* Helper method for blurring. Here we do it channel by channel.
* For example, given the 2d matrix of red channel, we blur this
* red channel and return a new 2d matrix containing blurred
* digit info.
* @param matrix a 2d matrix with integer values indicating a
* channel of an image.
* @return a new 2d matrix with integer values indicating a
* blurred channel of an image.
*/
private int[][] blurIt(int[][] matrix) {
int row = matrix.length;
int column = matrix[0].length;
int[][] blurred = new int[row][column];
for (int i = 1; i < row - 1; i++) {
for (int j = 1; j < column - 1; j++) {
blurred[i][j] = (int) (
matrix[i - 1][j - 1] * (1 / 16d) +
matrix[i - 1][j] * (1 / 8d) +
matrix[i - 1][j + 1] * (1 / 16d) +
matrix[i][j - 1] * (1 / 8d) +
matrix[i][j] * (1 / 4d) +
matrix[i][j + 1] * (1 / 8d) +
matrix[i + 1][j - 1] * (1 / 16d) +
matrix[i + 1][j] * (1 / 8d) +
matrix[i + 1][j + 1] * (1 / 16d));
blurred[i][j] = clampDigit(blurred[i][j], 0, 255);
}
}
return blurred;
}
@Override public String toString() {
return "Blur Filter";
}
}
|
Joshb888/dd
|
dashboard/public/bower_components/ckeditor/plugins/sourcedialog/lang/fo.js
|
CKEDITOR.plugins.setLang("sourcedialog", "fo", {
toolbar: "Kelda",
title: "Kelda",
});
|
Atom-me/xcEdu
|
xcEdu_service/xc-service-manage-order/src/main/java/com/xuecheng/order/dao/XcTaskHisRepository.java
|
package com.xuecheng.order.dao;
import com.xuecheng.framework.domain.task.XcTaskHis;
import org.springframework.data.jpa.repository.JpaRepository;
/**
* @author atom
*/
public interface XcTaskHisRepository extends JpaRepository<XcTaskHis, String> {
}
|
bluelovers/Danbooru
|
lib/dashboard.rb
|
<reponame>bluelovers/Danbooru
class Dashboard
class CommentActivity
attr_reader :comment, :count
def initialize(hash)
@comment = Comment.find(hash["comment_id"])
@count = hash["count"]
end
end
class PostActivity
attr_reader :post, :count
def initialize(hash)
@post = Post.find(hash["post_id"])
@count = hash["count"]
end
end
class UserActivity
attr_reader :user, :count
def initialize(hash)
@user = User.find(hash["user_id"])
@count = hash["count"]
end
end
class PostAppealActivity
attr_reader :post, :reason
def initialize(hash)
@post = Post.find(hash["post_id"])
@reason = hash["reason"]
end
end
attr_reader :min_date, :max_level
def initialize(min_date, max_level)
@min_date = min_date
@max_level = max_level
end
def flagged_post_activity
ActiveRecord::Base.select_all_sql("SELECT flagged_post_details.post_id, count(*) FROM flagged_post_details JOIN posts ON posts.id = flagged_post_details.post_id WHERE flagged_post_details.created_at > ? AND flagged_post_details.reason <> ? AND posts.status <> 'deleted' GROUP BY flagged_post_details.post_id ORDER BY count(*) DESC LIMIT 10", min_date, "Unapproved in three days").map {|x| PostActivity.new(x)}
end
def appealed_posts
PostAppeal.find(:all, :joins => "JOIN posts ON post_appeals.post_id = posts.id", :conditions => ["post_appeals.created_at > ? and posts.status <> ?", min_date, "active"], :order => "post_appeals.id desc", :limit => 10)
end
def upload_activity
ActiveRecord::Base.without_timeout do
@upload_activity = ActiveRecord::Base.select_all_sql("select posts.user_id, count(*) from posts join users on posts.user_id = users.id where posts.created_at > ? and users.level <= ? group by posts.user_id order by count(*) desc limit 10", min_date, max_level).map {|x| UserActivity.new(x)}
end
@upload_activity
end
def comment_activity(positive = false)
if positive
ActiveRecord::Base.select_all_sql("SELECT comment_votes.comment_id, count(*) FROM comment_votes JOIN comments ON comments.id = comment_votes.comment_id JOIN users ON users.id = comments.user_id WHERE comment_votes.created_at > ? AND comments.score > 0 AND users.level <= ? GROUP BY comment_votes.comment_id HAVING count(*) >= 3 ORDER BY count(*) DESC LIMIT 10", min_date, max_level).map {|x| CommentActivity.new(x)}
else
ActiveRecord::Base.select_all_sql("SELECT comment_votes.comment_id, count(*) FROM comment_votes JOIN comments ON comments.id = comment_votes.comment_id JOIN users ON users.id = comments.user_id WHERE comment_votes.created_at > ? AND comments.score < 0 AND users.level <= ? GROUP BY comment_votes.comment_id HAVING count(*) >= 3 ORDER BY count(*) DESC LIMIT 10", min_date, max_level).map {|x| CommentActivity.new(x)}
end
end
def post_activity(positive = false)
ActiveRecord::Base.without_timeout do
if positive
@post_activity = ActiveRecord::Base.select_all_sql("SELECT post_votes.post_id, count(*) FROM post_votes JOIN posts ON posts.id = post_votes.post_id JOIN users ON users.id = posts.user_id WHERE post_votes.created_at > ? AND posts.score > 0 AND users.level <= ? GROUP BY post_votes.post_id HAVING count(*) >= 3 ORDER BY count(*) DESC LIMIT 10", min_date, max_level).map {|x| PostActivity.new(x)}
else
@post_activity = ActiveRecord::Base.select_all_sql("SELECT post_votes.post_id, count(*) FROM post_votes JOIN posts ON posts.id = post_votes.post_id JOIN users ON users.id = posts.user_id WHERE post_votes.created_at > ? AND posts.score < 0 AND users.level <= ? AND posts.status <> 'deleted' GROUP BY post_votes.post_id HAVING count(*) >= 3 ORDER BY count(*) DESC LIMIT 10", min_date, max_level).map {|x| PostActivity.new(x)}
end
end
@post_activity
end
def tag_activity
ActiveRecord::Base.without_timeout do
@tag_activity = ActiveRecord::Base.select_all_sql("SELECT post_tag_histories.user_id, count(*) FROM post_tag_histories JOIN users ON users.id = post_tag_histories.user_id WHERE post_tag_histories.created_at > ? AND users.level <= ? GROUP BY post_tag_histories.user_id ORDER BY count(*) DESC LIMIT 10", min_date, max_level).map {|x| UserActivity.new(x)}
end
@tag_activity
end
def note_activity
ActiveRecord::Base.select_all_sql("SELECT note_versions.user_id, count(*) FROM note_versions JOIN users ON users.id = note_versions.user_id WHERE note_versions.created_at > ? AND users.level <= ? GROUP BY note_versions.user_id ORDER BY count(*) DESC LIMIT 10", min_date, max_level).map {|x| UserActivity.new(x)}
end
def wiki_page_activity
ActiveRecord::Base.select_all_sql("SELECT wiki_page_versions.user_id, count(*) FROM wiki_page_versions JOIN users ON users.id = wiki_page_versions.user_id WHERE wiki_page_versions.created_at > ? AND users.level <= ? GROUP BY wiki_page_versions.user_id ORDER BY count(*) DESC LIMIT 10", min_date, max_level).map {|x| UserActivity.new(x)}
end
def artist_activity
ActiveRecord::Base.select_all_sql("SELECT artist_versions.updater_id AS user_id, count(*) FROM artist_versions JOIN users ON users.id = artist_versions.updater_id WHERE artist_versions.created_at > ? AND users.level <= ? GROUP BY artist_versions.updater_id ORDER BY count(*) DESC LIMIT 10", min_date, max_level).map {|x| UserActivity.new(x)}
end
end
|
Cellularhacker/JavaProgramming2017
|
Day07/ex03_14/InputException.java
|
<gh_stars>0
//2017-09-27 KST ex03_14/InputException.java
//Ex03-14
package ex03_14;
import java.util.Scanner;
import java.util.InputMismatchException;
public class InputException {
public static void main(String[] args) {
Scanner scanner = new Scanner(System.in);
System.out.println("정수 3개를 입력하세요.");
int sum=0, n=0;
for(int i=0; i<3; i++) {
System.out.print(i + ">>");
try {
n = scanner.nextInt();
} catch(InputMismatchException e) {
System.out.println("정수가 아닙니다. 다시 입력하세요!");
scanner.next();
i--;
continue;
}
sum += n;
}
System.out.println("합은 " + sum);
scanner.close();
}
}
|
JRBonilla/Slate
|
Phoebe-core/src/ph/renderer/renderables/Sprite.cpp
|
<gh_stars>1-10
#include "Sprite.h"
namespace ph { namespace renderer {
Sprite::Sprite(float x, float y, float width, float height, uint color)
: Renderable2D(math::Vec3(x, y, 0), math::Vec2(width, height), color) { }
Sprite::Sprite(float x, float y, float width, float height, Texture2D* texture)
: Renderable2D(math::Vec3(x, y, 0), math::Vec2(width, height), 0xffffffff) {
m_Texture = texture;
}
}}
|
Paouky/GrinPlusPlus
|
tests/src/BlockChain/Test_ReorgChain.cpp
|
#include <catch.hpp>
#include <TestServer.h>
#include <TestMiner.h>
#include <TestChain.h>
#include <TxBuilder.h>
#include <TestHelper.h>
#include <Consensus.h>
#include <BlockChain/BlockChain.h>
#include <Database/Database.h>
#include <Database/BlockDb.h>
#include <PMMR/TxHashSetManager.h>
#include <TxPool/TransactionPool.h>
#include <Core/Validation/TransactionValidator.h>
#include <Core/File/FileRemover.h>
#include <Core/Util/TransactionUtil.h>
//
// a - b - c
// \
// - b'
//
// Process in the following order -
// 1. block_a
// 2. header_b
// 3. header_b_fork
// 4. block_b_fork
// 5. block_b
// 6. block_c
//
TEST_CASE("REORG 1")
{
TestServer::Ptr pTestServer = TestServer::Create();
KeyChain keyChain = KeyChain::FromRandom(*pTestServer->GetConfig());
TxBuilder txBuilder(keyChain);
auto pBlockChain = pTestServer->GetBlockChain();
TestChain chain1(pTestServer);
Test::Tx coinbase_a = txBuilder.BuildCoinbaseTx(KeyChainPath({ 0, 1 }));
MinedBlock block_a = chain1.AddNextBlock({ coinbase_a });
Test::Tx coinbase_b = txBuilder.BuildCoinbaseTx(KeyChainPath({ 0, 2 }));
MinedBlock block_b = chain1.AddNextBlock({ coinbase_b });
Test::Tx coinbase_c = txBuilder.BuildCoinbaseTx(KeyChainPath({ 0, 3 }));
MinedBlock block_c = chain1.AddNextBlock({ coinbase_c });
chain1.Rewind(2);
Test::Tx coinbase_b_fork = txBuilder.BuildCoinbaseTx(KeyChainPath({ 1, 2 }));
MinedBlock block_b_fork = chain1.AddNextBlock({ coinbase_b_fork });
////////////////////////////////////////
// 1. block_a
////////////////////////////////////////
REQUIRE(pBlockChain->AddBlock(block_a.block) == EBlockChainStatus::SUCCESS);
REQUIRE(pBlockChain->GetHeight(EChainType::CONFIRMED) == 1);
REQUIRE(pBlockChain->GetTipBlockHeader(EChainType::CONFIRMED)->GetHash() == block_a.block.GetHash());
REQUIRE(pBlockChain->GetHeight(EChainType::CANDIDATE) == 1);
REQUIRE(pBlockChain->GetTipBlockHeader(EChainType::CANDIDATE)->GetHash() == block_a.block.GetHash());
////////////////////////////////////////
// 2. header_b
////////////////////////////////////////
REQUIRE(pBlockChain->AddBlockHeader(block_b.block.GetHeader()) == EBlockChainStatus::SUCCESS);
REQUIRE(pBlockChain->GetHeight(EChainType::CONFIRMED) == 1);
REQUIRE(pBlockChain->GetTipBlockHeader(EChainType::CONFIRMED)->GetHash() == block_a.block.GetHash());
REQUIRE(pBlockChain->GetHeight(EChainType::CANDIDATE) == 2);
REQUIRE(pBlockChain->GetTipBlockHeader(EChainType::CANDIDATE)->GetHash() == block_b.block.GetHash());
////////////////////////////////////////
// 3. header_b_fork
////////////////////////////////////////
REQUIRE(pBlockChain->AddBlockHeader(block_b_fork.block.GetHeader()) == EBlockChainStatus::SUCCESS);
REQUIRE(pBlockChain->GetHeight(EChainType::CONFIRMED) == 1);
REQUIRE(pBlockChain->GetTipBlockHeader(EChainType::CONFIRMED)->GetHash() == block_a.block.GetHash());
REQUIRE(pBlockChain->GetHeight(EChainType::CANDIDATE) == 2);
REQUIRE(pBlockChain->GetTipBlockHeader(EChainType::CANDIDATE)->GetHash() == block_b.block.GetHash());
////////////////////////////////////////
// 4. block_b_fork
////////////////////////////////////////
REQUIRE(pBlockChain->AddBlock(block_b_fork.block) == EBlockChainStatus::SUCCESS);
REQUIRE(pBlockChain->GetHeight(EChainType::CONFIRMED) == 2);
REQUIRE(pBlockChain->GetTipBlockHeader(EChainType::CONFIRMED)->GetHash() == block_b_fork.block.GetHash());
REQUIRE(pBlockChain->GetHeight(EChainType::CANDIDATE) == 2);
REQUIRE(pBlockChain->GetTipBlockHeader(EChainType::CANDIDATE)->GetHash() == block_b.block.GetHash());
////////////////////////////////////////
// 5. block_b
////////////////////////////////////////
REQUIRE(pBlockChain->AddBlock(block_b.block) == EBlockChainStatus::SUCCESS);
REQUIRE(pBlockChain->GetHeight(EChainType::CONFIRMED) == 2);
REQUIRE(pBlockChain->GetTipBlockHeader(EChainType::CONFIRMED)->GetHash() == block_b_fork.block.GetHash());
REQUIRE(pBlockChain->GetHeight(EChainType::CANDIDATE) == 2);
REQUIRE(pBlockChain->GetTipBlockHeader(EChainType::CANDIDATE)->GetHash() == block_b.block.GetHash());
////////////////////////////////////////
// 6. block_c
////////////////////////////////////////
REQUIRE(pBlockChain->AddBlock(block_c.block) == EBlockChainStatus::SUCCESS);
REQUIRE(pBlockChain->GetHeight(EChainType::CONFIRMED) == 3);
REQUIRE(pBlockChain->GetTipBlockHeader(EChainType::CONFIRMED)->GetHash() == block_c.block.GetHash());
REQUIRE(pBlockChain->GetHeight(EChainType::CANDIDATE) == 3);
REQUIRE(pBlockChain->GetTipBlockHeader(EChainType::CANDIDATE)->GetHash() == block_c.block.GetHash());
}
//
// a - b - c
// \
// - b'
//
// Process in the following order -
// 1. block_a
// 2. header_b
// 3. header_c
// 4. block_b_fork
// 5. block_c
// 6. block_b
//
TEST_CASE("REORG 2")
{
TestServer::Ptr pTestServer = TestServer::Create();
KeyChain keyChain = KeyChain::FromRandom(*pTestServer->GetConfig());
TxBuilder txBuilder(keyChain);
auto pBlockChain = pTestServer->GetBlockChain();
TestChain chain1(pTestServer);
Test::Tx coinbase_a = txBuilder.BuildCoinbaseTx(KeyChainPath({ 0, 1 }));
MinedBlock block_a = chain1.AddNextBlock({ coinbase_a });
Test::Tx coinbase_b = txBuilder.BuildCoinbaseTx(KeyChainPath({ 0, 2 }));
MinedBlock block_b = chain1.AddNextBlock({ coinbase_b });
Test::Tx coinbase_c = txBuilder.BuildCoinbaseTx(KeyChainPath({ 0, 3 }));
MinedBlock block_c = chain1.AddNextBlock({ coinbase_c });
chain1.Rewind(2);
Test::Tx coinbase_b_fork = txBuilder.BuildCoinbaseTx(KeyChainPath({ 1, 2 }));
MinedBlock block_b_fork = chain1.AddNextBlock({ coinbase_b_fork });
////////////////////////////////////////
// 1. block_a
////////////////////////////////////////
REQUIRE(pBlockChain->AddBlock(block_a.block) == EBlockChainStatus::SUCCESS);
REQUIRE(pBlockChain->GetHeight(EChainType::CONFIRMED) == 1);
REQUIRE(pBlockChain->GetTipBlockHeader(EChainType::CONFIRMED)->GetHash() == block_a.block.GetHash());
REQUIRE(pBlockChain->GetHeight(EChainType::CANDIDATE) == 1);
REQUIRE(pBlockChain->GetTipBlockHeader(EChainType::CANDIDATE)->GetHash() == block_a.block.GetHash());
////////////////////////////////////////
// 2. header_b
////////////////////////////////////////
REQUIRE(pBlockChain->AddBlockHeader(block_b.block.GetHeader()) == EBlockChainStatus::SUCCESS);
REQUIRE(pBlockChain->GetHeight(EChainType::CONFIRMED) == 1);
REQUIRE(pBlockChain->GetTipBlockHeader(EChainType::CONFIRMED)->GetHash() == block_a.block.GetHash());
REQUIRE(pBlockChain->GetHeight(EChainType::CANDIDATE) == 2);
REQUIRE(pBlockChain->GetTipBlockHeader(EChainType::CANDIDATE)->GetHash() == block_b.block.GetHash());
////////////////////////////////////////
// 3. header_c
////////////////////////////////////////
REQUIRE(pBlockChain->AddBlockHeader(block_c.block.GetHeader()) == EBlockChainStatus::SUCCESS);
REQUIRE(pBlockChain->GetHeight(EChainType::CONFIRMED) == 1);
REQUIRE(pBlockChain->GetTipBlockHeader(EChainType::CONFIRMED)->GetHash() == block_a.block.GetHash());
REQUIRE(pBlockChain->GetHeight(EChainType::CANDIDATE) == 3);
REQUIRE(pBlockChain->GetTipBlockHeader(EChainType::CANDIDATE)->GetHash() == block_c.block.GetHash());
////////////////////////////////////////
// 4. block_b_fork
////////////////////////////////////////
REQUIRE(pBlockChain->AddBlock(block_b_fork.block) == EBlockChainStatus::SUCCESS);
REQUIRE(pBlockChain->GetHeight(EChainType::CONFIRMED) == 2);
REQUIRE(pBlockChain->GetTipBlockHeader(EChainType::CONFIRMED)->GetHash() == block_b_fork.block.GetHash());
REQUIRE(pBlockChain->GetHeight(EChainType::CANDIDATE) == 3);
REQUIRE(pBlockChain->GetTipBlockHeader(EChainType::CANDIDATE)->GetHash() == block_c.block.GetHash());
////////////////////////////////////////
// 5. block_c
////////////////////////////////////////
REQUIRE(pBlockChain->AddBlock(block_c.block) == EBlockChainStatus::ORPHANED);
REQUIRE(pBlockChain->GetHeight(EChainType::CONFIRMED) == 2);
REQUIRE(pBlockChain->GetTipBlockHeader(EChainType::CONFIRMED)->GetHash() == block_b_fork.block.GetHash());
REQUIRE(pBlockChain->GetHeight(EChainType::CANDIDATE) == 3);
REQUIRE(pBlockChain->GetTipBlockHeader(EChainType::CANDIDATE)->GetHash() == block_c.block.GetHash());
////////////////////////////////////////
// 6. block_b
////////////////////////////////////////
REQUIRE(pBlockChain->AddBlock(block_b.block) == EBlockChainStatus::SUCCESS);
REQUIRE(pBlockChain->GetHeight(EChainType::CONFIRMED) == 2);
REQUIRE(pBlockChain->GetTipBlockHeader(EChainType::CONFIRMED)->GetHash() == block_b_fork.block.GetHash());
REQUIRE(pBlockChain->GetHeight(EChainType::CANDIDATE) == 3);
REQUIRE(pBlockChain->GetTipBlockHeader(EChainType::CANDIDATE)->GetHash() == block_c.block.GetHash());
////////////////////////////////////////
// 7. Process orphans
////////////////////////////////////////
REQUIRE(pBlockChain->ProcessNextOrphanBlock());
REQUIRE(pBlockChain->GetHeight(EChainType::CONFIRMED) == 3);
REQUIRE(pBlockChain->GetTipBlockHeader(EChainType::CONFIRMED)->GetHash() == block_c.block.GetHash());
REQUIRE(pBlockChain->GetHeight(EChainType::CANDIDATE) == 3);
REQUIRE(pBlockChain->GetTipBlockHeader(EChainType::CANDIDATE)->GetHash() == block_c.block.GetHash());
}
//
// a - b - c
// \
// - b'
//
// Process in the following order -
// 1. block_a
// 2. block_b
// 3. block_c
// 4. block_b_fork - higher difficulty
//
TEST_CASE("REORG 3")
{
TestServer::Ptr pTestServer = TestServer::Create();
KeyChain keyChain = KeyChain::FromRandom(*pTestServer->GetConfig());
TxBuilder txBuilder(keyChain);
auto pBlockChain = pTestServer->GetBlockChain();
TestChain chain1(pTestServer);
Test::Tx coinbase_a = txBuilder.BuildCoinbaseTx(KeyChainPath({ 0, 1 }));
MinedBlock block_a = chain1.AddNextBlock({ coinbase_a });
Test::Tx coinbase_b = txBuilder.BuildCoinbaseTx(KeyChainPath({ 0, 2 }));
MinedBlock block_b = chain1.AddNextBlock({ coinbase_b });
Test::Tx coinbase_c = txBuilder.BuildCoinbaseTx(KeyChainPath({ 0, 3 }));
MinedBlock block_c = chain1.AddNextBlock({ coinbase_c });
chain1.Rewind(2);
Test::Tx coinbase_b_fork = txBuilder.BuildCoinbaseTx(KeyChainPath({ 1, 2 }));
MinedBlock block_b_fork = chain1.AddNextBlock({ coinbase_b_fork }, 10);
////////////////////////////////////////
// Process block_a, block_b, block_c
////////////////////////////////////////
REQUIRE(pBlockChain->AddBlock(block_a.block) == EBlockChainStatus::SUCCESS);
REQUIRE(pBlockChain->AddBlock(block_b.block) == EBlockChainStatus::SUCCESS);
REQUIRE(pBlockChain->AddBlock(block_c.block) == EBlockChainStatus::SUCCESS);
REQUIRE(pBlockChain->GetTipBlockHeader(EChainType::CONFIRMED)->GetHash() == block_c.block.GetHash());
REQUIRE(pBlockChain->GetTipBlockHeader(EChainType::CANDIDATE)->GetHash() == block_c.block.GetHash());
////////////////////////////////////////
// Process forked block_b with higher difficulty
////////////////////////////////////////
REQUIRE(pBlockChain->AddBlock(block_b_fork.block) == EBlockChainStatus::SUCCESS);
REQUIRE(pBlockChain->GetTipBlockHeader(EChainType::CONFIRMED)->GetHash() == block_b_fork.block.GetHash());
REQUIRE(pBlockChain->GetTipBlockHeader(EChainType::CANDIDATE)->GetHash() == block_b_fork.block.GetHash());
}
//
// a - b - c
// \
// - b'
//
// Process in the following order -
// 1. block_a
// 2. block_b_fork
// 3. block_b
// 4. block_c
//
TEST_CASE("REORG 4")
{
TestServer::Ptr pTestServer = TestServer::Create();
KeyChain keyChain = KeyChain::FromRandom(*pTestServer->GetConfig());
TxBuilder txBuilder(keyChain);
auto pBlockChain = pTestServer->GetBlockChain();
TestChain chain1(pTestServer);
Test::Tx coinbase_a = txBuilder.BuildCoinbaseTx(KeyChainPath({ 0, 1 }));
MinedBlock block_a = chain1.AddNextBlock({ coinbase_a });
Test::Tx coinbase_b = txBuilder.BuildCoinbaseTx(KeyChainPath({ 0, 2 }));
MinedBlock block_b = chain1.AddNextBlock({ coinbase_b });
Test::Tx coinbase_c = txBuilder.BuildCoinbaseTx(KeyChainPath({ 0, 3 }));
MinedBlock block_c = chain1.AddNextBlock({ coinbase_c });
chain1.Rewind(2);
Test::Tx coinbase_b_fork = txBuilder.BuildCoinbaseTx(KeyChainPath({ 1, 2 }));
MinedBlock block_b_fork = chain1.AddNextBlock({ coinbase_b_fork });
////////////////////////////////////////
// 1. block_a
////////////////////////////////////////
REQUIRE(pBlockChain->AddBlock(block_a.block) == EBlockChainStatus::SUCCESS);
REQUIRE(pBlockChain->GetHeight(EChainType::CONFIRMED) == 1);
REQUIRE(pBlockChain->GetTipBlockHeader(EChainType::CONFIRMED)->GetHash() == block_a.block.GetHash());
REQUIRE(pBlockChain->GetHeight(EChainType::CANDIDATE) == 1);
REQUIRE(pBlockChain->GetTipBlockHeader(EChainType::CANDIDATE)->GetHash() == block_a.block.GetHash());
////////////////////////////////////////
// 2. block_b_fork
////////////////////////////////////////
REQUIRE(pBlockChain->AddBlock(block_b_fork.block) == EBlockChainStatus::SUCCESS);
REQUIRE(pBlockChain->GetHeight(EChainType::CONFIRMED) == 2);
REQUIRE(pBlockChain->GetTipBlockHeader(EChainType::CONFIRMED)->GetHash() == block_b_fork.block.GetHash());
REQUIRE(pBlockChain->GetHeight(EChainType::CANDIDATE) == 2);
REQUIRE(pBlockChain->GetTipBlockHeader(EChainType::CANDIDATE)->GetHash() == block_b_fork.block.GetHash());
////////////////////////////////////////
// 3. block_b
////////////////////////////////////////
REQUIRE(pBlockChain->AddBlock(block_b.block) == EBlockChainStatus::SUCCESS);
REQUIRE(pBlockChain->GetHeight(EChainType::CONFIRMED) == 2);
REQUIRE(pBlockChain->GetTipBlockHeader(EChainType::CONFIRMED)->GetHash() == block_b_fork.block.GetHash());
REQUIRE(pBlockChain->GetHeight(EChainType::CANDIDATE) == 2);
REQUIRE(pBlockChain->GetTipBlockHeader(EChainType::CANDIDATE)->GetHash() == block_b_fork.block.GetHash());
////////////////////////////////////////
// 4. block_c
////////////////////////////////////////
REQUIRE(pBlockChain->AddBlock(block_c.block) == EBlockChainStatus::SUCCESS);
REQUIRE(pBlockChain->GetHeight(EChainType::CONFIRMED) == 3);
REQUIRE(pBlockChain->GetTipBlockHeader(EChainType::CONFIRMED)->GetHash() == block_c.block.GetHash());
REQUIRE(pBlockChain->GetHeight(EChainType::CANDIDATE) == 3);
REQUIRE(pBlockChain->GetTipBlockHeader(EChainType::CANDIDATE)->GetHash() == block_c.block.GetHash());
}
TEST_CASE("Reorg Chain")
{
TestServer::Ptr pTestServer = TestServer::Create();
TestMiner miner(pTestServer);
KeyChain keyChain = KeyChain::FromRandom(*pTestServer->GetConfig());
TxBuilder txBuilder(keyChain);
auto pBlockChain = pTestServer->GetBlockChain();
////////////////////////////////////////
// Mine a chain with 30 blocks (Coinbase maturity for tests is only 25)
////////////////////////////////////////
std::vector<MinedBlock> minedChain = miner.MineChain(keyChain, 30);
REQUIRE(minedChain.size() == 30);
// Create a transaction that spends the coinbase from block 1
TransactionOutput outputToSpend = minedChain[1].block.GetOutputs().front();
Test::Input input({
{ outputToSpend.GetFeatures(), outputToSpend.GetCommitment() },
minedChain[1].coinbasePath.value(),
minedChain[1].coinbaseAmount
});
Test::Output newOutput({
KeyChainPath({ 1, 0 }),
(uint64_t)10'000'000
});
Test::Output changeOutput({
KeyChainPath({ 1, 1 }),
(uint64_t)(minedChain[1].coinbaseAmount - 10'000'000)
});
Transaction spendTransaction = txBuilder.BuildTx(Fee(), { input }, { newOutput, changeOutput });
// Create block 30a
Test::Tx coinbaseTx30a = txBuilder.BuildCoinbaseTx(KeyChainPath({ 0, 30 }));
// Combine transaction with coinbase transaction for block 29b
TransactionPtr pCombinedTx30a = TransactionUtil::Aggregate({
coinbaseTx30a.pTransaction,
std::make_shared<Transaction>(spendTransaction)
});
FullBlock block30a = miner.MineNextBlock(
minedChain.back().block.GetHeader(),
*pCombinedTx30a
);
REQUIRE(pBlockChain->AddBlock(block30a) == EBlockChainStatus::SUCCESS);
////////////////////////////////////////
// Create "reorg" chain with block 28b that spends an earlier coinbase, and blocks 29b, 30b, and 31b on top
////////////////////////////////////////
Test::Tx coinbaseTx28b = txBuilder.BuildCoinbaseTx(KeyChainPath({ 1, 28 }));
// Combine transaction with coinbase transaction for block 29b
TransactionPtr pCombinedTx28b = TransactionUtil::Aggregate({
coinbaseTx28b.pTransaction,
std::make_shared<Transaction>(spendTransaction)
});
// Create block 28b
FullBlock block28b = miner.MineNextBlock(
minedChain[27].block.GetHeader(),
*pCombinedTx28b
);
// Create block 29b
Test::Tx coinbaseTx29b = txBuilder.BuildCoinbaseTx(KeyChainPath({ 1, 29 }));
FullBlock block29b = miner.MineNextBlock(
minedChain[27].block.GetHeader(),
*coinbaseTx29b.pTransaction,
{ block28b }
);
// Create block 30b
Test::Tx coinbaseTx30b = txBuilder.BuildCoinbaseTx(KeyChainPath({ 1, 30 }));
FullBlock block30b = miner.MineNextBlock(
minedChain[27].block.GetHeader(),
*coinbaseTx30b.pTransaction,
{ block28b, block29b }
);
// Create block 31b
Test::Tx coinbaseTx31b = txBuilder.BuildCoinbaseTx(KeyChainPath({ 1, 31 }));
FullBlock block31b = miner.MineNextBlock(
minedChain[27].block.GetHeader(),
*coinbaseTx31b.pTransaction,
{ block28b, block29b, block30b }
);
////////////////////////////////////////
// Verify that block31a is added, but then replaced successfully with block31b & block32b
////////////////////////////////////////
REQUIRE(pBlockChain->AddBlock(block28b) == EBlockChainStatus::SUCCESS);
REQUIRE(pBlockChain->AddBlock(block29b) == EBlockChainStatus::SUCCESS);
REQUIRE(pBlockChain->AddBlock(block30b) == EBlockChainStatus::SUCCESS);
REQUIRE(pBlockChain->AddBlock(block31b) == EBlockChainStatus::SUCCESS);
REQUIRE(pBlockChain->GetBlockByHeight(31)->GetHash() == block31b.GetHash());
// TODO: Assert unspent positions in leafset and in database.
}
|
Wassaf-Shahzad/micromasters
|
static/js/DashboardRouter.js
|
// @flow
/* global SETTINGS: false */
import React from "react"
import { Router } from "react-router"
import { Provider } from "react-redux"
import { MuiThemeProvider, createMuiTheme } from "@material-ui/core/styles"
import type { Store } from "redux"
// eslint-disable-next-line require-jsdoc
export default class DashboardRouter extends React.Component {
props: {
browserHistory: Object,
onRouteUpdate: () => void,
store: Store,
routes: Object
}
// eslint-disable-next-line require-jsdoc
render() {
const { browserHistory, onRouteUpdate, store, routes } = this.props
return (
<div>
<MuiThemeProvider theme={createMuiTheme()}>
<Provider store={store}>
<Router
history={browserHistory}
onUpdate={onRouteUpdate}
routes={routes}
/>
</Provider>
</MuiThemeProvider>
</div>
)
}
}
|
marcopeg/skill-matrix
|
app/src/lib/strings.test.js
|
import { composeUrl } from "./strings";
describe("lib", () => {
describe("strings", () => {
describe("composeUrl", () => {
test("it should concatenate base and uri", () => {
const res = composeUrl("http://google.com", "foobar");
expect(res).toBe("http://google.com/foobar");
});
test("it should deduplicate slashes", () => {
const res = composeUrl("http://google.com/", "/foobar");
expect(res).toBe("http://google.com/foobar");
});
test("it should ignore base for full urls", () => {
const res = composeUrl("http://google.com/", "https://foobar");
expect(res).toBe("https://foobar");
});
});
});
});
|
pulumi/pulumi-yandex
|
sdk/python/pulumi_yandex/get_resourcemanager_cloud.py
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
__all__ = [
'GetResourcemanagerCloudResult',
'AwaitableGetResourcemanagerCloudResult',
'get_resourcemanager_cloud',
'get_resourcemanager_cloud_output',
]
@pulumi.output_type
class GetResourcemanagerCloudResult:
"""
A collection of values returned by getResourcemanagerCloud.
"""
def __init__(__self__, cloud_id=None, created_at=None, description=None, id=None, name=None):
if cloud_id and not isinstance(cloud_id, str):
raise TypeError("Expected argument 'cloud_id' to be a str")
pulumi.set(__self__, "cloud_id", cloud_id)
if created_at and not isinstance(created_at, str):
raise TypeError("Expected argument 'created_at' to be a str")
pulumi.set(__self__, "created_at", created_at)
if description and not isinstance(description, str):
raise TypeError("Expected argument 'description' to be a str")
pulumi.set(__self__, "description", description)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
@property
@pulumi.getter(name="cloudId")
def cloud_id(self) -> str:
return pulumi.get(self, "cloud_id")
@property
@pulumi.getter(name="createdAt")
def created_at(self) -> str:
"""
Cloud creation timestamp.
"""
return pulumi.get(self, "created_at")
@property
@pulumi.getter
def description(self) -> Optional[str]:
"""
Description of the cloud.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter
def id(self) -> str:
"""
The provider-assigned unique ID for this managed resource.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def name(self) -> str:
"""
Name of the cloud.
"""
return pulumi.get(self, "name")
class AwaitableGetResourcemanagerCloudResult(GetResourcemanagerCloudResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetResourcemanagerCloudResult(
cloud_id=self.cloud_id,
created_at=self.created_at,
description=self.description,
id=self.id,
name=self.name)
def get_resourcemanager_cloud(cloud_id: Optional[str] = None,
description: Optional[str] = None,
name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetResourcemanagerCloudResult:
"""
Use this data source to get cloud details.
For more information, see [Cloud](https://cloud.yandex.com/docs/resource-manager/concepts/resources-hierarchy#cloud).
## Example Usage
```python
import pulumi
import pulumi_yandex as yandex
foo = yandex.get_resourcemanager_cloud(name="foo-cloud")
pulumi.export("cloudCreateTimestamp", foo.created_at)
```
:param str cloud_id: ID of the cloud.
:param str description: Description of the cloud.
:param str name: Name of the cloud.
"""
__args__ = dict()
__args__['cloudId'] = cloud_id
__args__['description'] = description
__args__['name'] = name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('yandex:index/getResourcemanagerCloud:getResourcemanagerCloud', __args__, opts=opts, typ=GetResourcemanagerCloudResult).value
return AwaitableGetResourcemanagerCloudResult(
cloud_id=__ret__.cloud_id,
created_at=__ret__.created_at,
description=__ret__.description,
id=__ret__.id,
name=__ret__.name)
@_utilities.lift_output_func(get_resourcemanager_cloud)
def get_resourcemanager_cloud_output(cloud_id: Optional[pulumi.Input[Optional[str]]] = None,
description: Optional[pulumi.Input[Optional[str]]] = None,
name: Optional[pulumi.Input[Optional[str]]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetResourcemanagerCloudResult]:
"""
Use this data source to get cloud details.
For more information, see [Cloud](https://cloud.yandex.com/docs/resource-manager/concepts/resources-hierarchy#cloud).
## Example Usage
```python
import pulumi
import pulumi_yandex as yandex
foo = yandex.get_resourcemanager_cloud(name="foo-cloud")
pulumi.export("cloudCreateTimestamp", foo.created_at)
```
:param str cloud_id: ID of the cloud.
:param str description: Description of the cloud.
:param str name: Name of the cloud.
"""
...
|
mitraj/Gooru-Web
|
src/main/java/org/ednovo/gooru/shared/model/content/ResourceDo.java
|
<reponame>mitraj/Gooru-Web
/*******************************************************************************
* Copyright 2013 Ednovo d/b/a Gooru. All rights reserved.
*
* http://www.goorulearning.org/
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
* LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
* OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
* WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
******************************************************************************/
package org.ednovo.gooru.shared.model.content;
import java.util.ArrayList;
import java.util.TreeSet;
import org.ednovo.gooru.shared.util.ResourceImageUtil;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonInclude.Include;
@JsonInclude(Include.NON_NULL)
public class ResourceDo extends ContentDo {
private static final long serialVersionUID = 3421728485329612613L;
private String assetURI;
private String title;
private String description;
private String folder;
private String category;
private String explanation;
private ThumbnailDo thumbnails;
private ResourceTypeDo resourceType;
private ResourceSourceDo resourceSource;
private String views;
private Integer votesUp;
private LicenseDo license;
private String grade;
private String mediaType;
private String encodedUrl;
private ResourceFormatDo resourceFormat;
public ResourceDo(){}
private TreeSet<QuestionAnswerDo> answers;
private ArrayList<AssetsDo> assets;
private TrackActivityDo trackActivity;
private TreeSet<QuestionHintsDo> hints;
private Integer type;
private String label; //Some api's give title in label key
private String nativeurl; //Some api's give url in nativeUrl key
private String id;
private String goals;
private Boolean hasFrameBreaker;
private String questionText;
private Integer userRating;
/**
* This method is to get the encodedUrl
*/
public String getEncodedUrl() {
return encodedUrl;
}
/**
* This method is to set the encodedUrl
*/
public void setEncodedUrl(String encodedUrl) {
this.encodedUrl = encodedUrl;
}
/**
* This method is to get the goals
*/
public String getGoals() {
return goals;
}
/**
* This method is to set the goals
*/
public void setGoals(String goals) {
this.goals = goals;
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getNativeurl() {
return nativeurl;
}
public String getMediaType() {
return mediaType;
}
public void setMediaType(String mediaType) {
this.mediaType = mediaType;
}
public void setNativeurl(String nativeurl) {
this.nativeurl = nativeurl;
}
public String getLabel() {
return label;
}
public void setLabel(String label) {
this.label = label;
}
public ResourceTypeDo getResourceType() {
return resourceType;
}
public void setResourceType(ResourceTypeDo resourceType) {
this.resourceType = resourceType;
}
private String url;
public String getAssetURI() {
return assetURI;
}
public void setAssetURI(String assertURI) {
this.assetURI = assertURI;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public String getFolder() {
return folder;
}
public void setFolder(String folder) {
this.folder = folder;
}
public void setThumbnails(ThumbnailDo thumbnails) {
this.thumbnails = thumbnails;
}
public ThumbnailDo getThumbnails() {
return thumbnails;
}
public void setCategory(String category) {
this.category = category;
}
public String getCategory() {
return category;
}
public String getUrl() {
return url;
}
public String getThumbnailUrl() {
if (getCategory() != null && getCategory().equalsIgnoreCase("Video")) {
return ResourceImageUtil.youtubeImageLink(ResourceImageUtil.getYoutubeVideoId(getUrl()));
} else if (getThumbnails() != null) {
return getThumbnails().getUrl();
}
return null;
}
public void setUrl(String url) {
this.url = url;
}
public String getExplanation() {
return explanation;
}
public void setExplanation(String explanation) {
this.explanation = explanation;
}
/*public ArrayList<QuestionAnswerDo> getAnswers() {
return answers;
}
public void setAnswers(ArrayList<QuestionAnswerDo> answers) {
this.answers = answers;
}*/
public Integer getType() {
return type;
}
public void setType(Integer type) {
this.type = type;
}
/*public ArrayList<QuestionHintsDo> getHints() {
return hints;
}
public void setHints(ArrayList<QuestionHintsDo> hints) {
this.hints = hints;
}*/
public TreeSet<QuestionAnswerDo> getAnswers() {
return answers;
}
public void setAnswers(TreeSet<QuestionAnswerDo> answers) {
this.answers = answers;
}
public TreeSet<QuestionHintsDo> getHints() {
return hints;
}
public void setHints(TreeSet<QuestionHintsDo> hints) {
this.hints = hints;
}
public ArrayList<AssetsDo> getAssets() {
return assets;
}
public void setAssets(ArrayList<AssetsDo> assets) {
this.assets = assets;
}
/**
* This method is to get the trackActivity
*/
public TrackActivityDo getTrackActivity() {
return trackActivity;
}
/**
* This method is to set the trackActivity
*/
public void setTrackActivity(TrackActivityDo trackActivity) {
this.trackActivity = trackActivity;
}
public String getViews() {
return views;
}
public void setViews(String views) {
this.views = views;
}
public Integer getVotesUp() {
return votesUp;
}
public void setVotesUp(Integer votesUp) {
this.votesUp = votesUp;
}
public LicenseDo getLicense() {
return license;
}
public void setLicense(LicenseDo license) {
this.license = license;
}
public String getGrade() {
return grade;
}
public void setGrade(String grade) {
this.grade = grade;
}
public ResourceSourceDo getResourceSource() {
return resourceSource;
}
public void setResourceSource(ResourceSourceDo resourceSource) {
this.resourceSource = resourceSource;
}
public Boolean getHasFrameBreaker() {
return hasFrameBreaker;
}
public void setHasFrameBreaker(Boolean hasFrameBreaker) {
this.hasFrameBreaker = hasFrameBreaker;
}
public String getQuestionText() {
return questionText;
}
public void setQuestionText(String questionText) {
this.questionText = questionText;
}
public Integer getUserRating() {
return userRating;
}
public void setUserRating(Integer userRating) {
this.userRating = userRating;
}
public ResourceFormatDo getResourceFormat() {
return resourceFormat;
}
public void setResourceFormat(ResourceFormatDo resourceFormat) {
this.resourceFormat = resourceFormat;
}
}
|
juckele/Vivarium
|
vivarium-db/src/main/groovy/io/vivarium/persistence/RunSimulationJobModel.java
|
package io.vivarium.persistence;
import java.sql.Timestamp;
import java.util.Collection;
import java.util.Map;
import com.google.common.base.Preconditions;
import io.vivarium.util.UUID;
import lombok.EqualsAndHashCode;
import lombok.ToString;
@EqualsAndHashCode(callSuper = true)
@ToString
public class RunSimulationJobModel extends JobModel
{
// Column names
protected static final String END_TICK_PROPERTY = "end_tick";
private final Long endTick;
public RunSimulationJobModel(UUID jobID, JobStatus status, int priority, UUID checkoutOutByWorkerID,
Timestamp checkoutOutTime, Timestamp completedTime, long endTick, Collection<UUID> inputResources,
Collection<UUID> outputResources, Collection<UUID> jobDependencies)
{
super(jobID, JobType.RUN_SIMULATION, status, priority, checkoutOutByWorkerID, checkoutOutTime, completedTime,
inputResources, outputResources, jobDependencies);
this.endTick = endTick;
}
public RunSimulationJobModel(Map<String, Object> relation, Map<String, String> properties,
Collection<UUID> inputResources, Collection<UUID> outputResources, Collection<UUID> jobDependencies)
{
super(relation, properties, inputResources, outputResources, jobDependencies);
Preconditions.checkNotNull(properties.get(END_TICK_PROPERTY));
this.endTick = Long.parseLong(properties.get(END_TICK_PROPERTY));
}
@Override
protected Map<String, String> buildProperties()
{
Map<String, String> properties = super.buildProperties();
properties.put(END_TICK_PROPERTY, endTick.toString());
return properties;
}
}
|
miguelvazq/HPCC-Platform
|
esp/files/scripts/tree.js
|
/*##############################################################################
# HPCC SYSTEMS software Copyright (C) 2012 HPCC Systems®.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
############################################################################## */
//--------------------------------------
// All browser specific code goes here
function createXmlHttpRequestObject()
{
var xmlhttp = null;
// code for Mozilla, etc.
if (window.XMLHttpRequest) {
xmlhttp=new XMLHttpRequest();
}
// code for IE
else if (window.ActiveXObject) {
try {
xmlhttp=new ActiveXObject("Msxml2.XMLHTTP.4.0");
} catch (e) {
try {
xmlhttp=new ActiveXObject("Msxml2.XMLHTTP");
} catch (e) {
xmlhttp=new ActiveXObject("Microsoft.XMLHTTP");
}
}
}
if (xmlhttp == null)
alert("Can not create XMLHttpRequest object in your browser!");
return xmlhttp;
}
get_element = function(s_id) { return document.all ? document.all[s_id] : document.getElementById(s_id) };
function createXmlDomObject()
{
var xmlDom = null;
if (window.ActiveXObject) {
xmlDom = new ActiveXObject("Microsoft.XMLDOM");
} else if (document.implementation && document.implementation.createDocument) {
xmlDom = document.implementation.createDocument("","",null);
}
if (xmlDom == null)
alert("Can not create XML DOM object in your browser!");
return xmlDom;
}
// emulate IE selectNodes(), selectSingleNode()
if(document.implementation && document.implementation.hasFeature("XPath", "3.0"))
{
// NodeList
function XmlNodeList(i) {
this.length = i;
}
XmlNodeList.prototype = new Array(0);
XmlNodeList.prototype.constructor = Array;
XmlNodeList.prototype.item = function(i) {
return (i < 0 || i >= this.length)?null:this[i];
};
XmlNodeList.prototype.expr = "";
XMLDocument.prototype.setProperty = function(x,y){};
// IE: XMLDocument.selectNodes()
XMLDocument.prototype.selectNodes = function(sExpr, contextNode){
var nsDoc = this;
var nsResolver = this.createNSResolver(this.documentElement);
var oResult = this.evaluate(sExpr,
(contextNode?contextNode:this),
nsResolver,
XPathResult.ORDERED_NODE_SNAPSHOT_TYPE, null);
var nodeList = new XmlNodeList(oResult.snapshotLength);
nodeList.expr = sExpr;
for(var i=0;i<nodeList.length;i++)
nodeList[i] = oResult.snapshotItem(i);
return nodeList;
}
// IE: Element.selectNodes()
Element.prototype.selectNodes = function(sExpr){
var doc = this.ownerDocument;
if(doc.selectNodes)
return doc.selectNodes(sExpr, this);
else
throw "Method selectNodes is only supported by XML Elements";
}
// IE: XMLDocument.selectSingleNodes()
XMLDocument.prototype.selectSingleNode = function(sExpr, contextNode){
var ctx = contextNode?contextNode:null;
sExpr = "("+sExpr+")[1]";
var nodeList = this.selectNodes(sExpr, ctx);
if(nodeList.length > 0)
return nodeList.item(0);
else
return null;
}
// IE: Element.selectSingleNode()
Element.prototype.selectSingleNode = function(sExpr){
var doc = this.ownerDocument;
if(doc.selectSingleNode)
return doc.selectSingleNode(sExpr, this);
else
throw "Method selectNodes is only supported by XML Elements";
}
}
function serializeDomNode(xmlDoc)
{
if (xmlDoc.xml) {
return xmlDoc.xml;
} else if(window.XMLSerializer) {
return (new XMLSerializer()).serializeToString(xmlDoc);
}
return null;
}
// END: browser specific code
//--------------------------------------
var ie = document.all ? 1 : 0;
var ns = document.layers ? 1 : 0;
/*
- multiple selection
- ability to dynamically add items
- context menu with both item and tree specific commands
- ability to add on_sel_changed, on_insert_child and on_expanding handlers
- find xpaths for items and find items with given xpaths
- demand loading of children
- custom menus for individual items
*/
function tree (a_items, a_template) {
this.n_tid = trees.length;
trees[this.n_tid] = this;
this.a_tpl = a_template;
this.a_config = a_items;
this.o_root = this;
this.a_index = [];
this.a_selected = [];
this.a_deleted = [];
this.b_selected = false;
this.n_depth = -1;
this.select_all = select_all;
this.select_none= select_none;
this.upstatus = function() {};//nothing
this.o_parent = null;
this.a_custom_menus = [];
this.a_custom_menu_ids = [];
this.n_items = 0;
this.b_cache_children = true;
this.get_item = function(id) { return this.a_index[id]; };
this.item_count = function() { return this.n_items; };
this.get_first_item = get_first_item;
this.get_next_item = get_next_item;
this.deleteAllChildren = deleteAllChildren;
this.action = null;
this.border = null;
this.a_columns= null;
this.a_columnWidths = null;
this.add_tree_column = add_tree_column;
this.timeoutId = 0;
var o_icone = new Image();
var o_iconl = new Image();
o_icone.src = a_template['icon_e'];
o_iconl.src = a_template['icon_l'];
a_template['im_e'] = o_icone;
a_template['im_l'] = o_iconl;
for (var i = 0; i < 64; i++)
if (a_template['icon_' + i])
{
var o_icon = new Image();
a_template['im_' + i] = o_icon;
o_icon.src = a_template['icon_' + i];
}
this.expand = item_expand;
this.toggle = toggle;
this.select = function (n_id, e, treeid) { return this.get_item(n_id).select(e, treeid); };
this.mover = mouse_over_item;
this.mout = mouse_left_item;
this.oncontextmenu = function (n_id, e, treeid) { return this.get_item(n_id).oncontextmenu(e, treeid) };
this.show_context_menu = show_context_menu;
this.handle_command = handle_command;
this.on_sel_changed = null;
this.on_expanding = null;
this.on_context_menu= null;
this.on_command = null;
this.get_custom_menu = function(menu_name) {
var index = this.a_custom_menu_ids[menu_name];
return (index != undefined) ? this.a_custom_menus[index] : null;
}
this.add_custom_menu = function(menu_name, a_menu) {
if (!this.get_custom_menu(menu_name))
{
var index = this.a_custom_menus.length;
this.a_custom_menus[index] = a_menu;
this.a_custom_menu_ids[menu_name] = index;
}
return index;
}
this.a_children = [];
for (var i = 0; i < a_items.length; i++)
new tree_item(this, i, this.a_config[i + (this.n_depth + 1 ? 2 : 0)]);
var n_children = this.a_children.length;
if (n_children) {
var div = document.getElementById('tree_' + this.n_tid);
var a_html = [];
for (var i=0; i<n_children; i++) {
var child = this.a_children[i];
a_html[i] = child.init();
child.expand();
}
div.innerHTML = a_html.join('');
}
}
function loadXMLDoc(url)
{
var xmlhttp = createXmlHttpRequestObject();
if (xmlhttp)
{
xmlhttp.open("GET", url, false);
xmlhttp.send(null);
}
return xmlhttp;
}
function tree_item (o_parent, n_position, config) {
this.n_depth = o_parent.n_depth + 1;
this.a_config = config;
if (!this.a_config) return;
this.o_root = o_parent.o_root;
this.o_parent = o_parent;
this.n_position = n_position;
this.b_expanded = !this.n_depth;
this.b_selected= false;
this.b_load_on_demand = false;
this.b_cache_children = this.o_root.b_cache_children;
this.action = null;
this.a_columns = null;
this.add_column = add_column;
this.b_checkbox = false;
this.b_checked = false;
if (this.o_root.a_deleted.length > 0)//reclaim a previously deleted id
this.n_id = this.o_root.a_deleted.shift();
else
this.n_id = this.o_root.item_count(); //a_children is contiguous so pick next index
this.o_root.a_index[this.n_id] = this;
this.o_root.n_items++;
o_parent.a_children[n_position] = this;
this.a_children = [];
for (var i = 0; i < this.a_config.length - 2; i++)
new tree_item(this, i, this.a_config[i + (this.n_depth + 1 ? 2 : 0)]);
this.get_icon = item_get_icon;
this.expand = item_expand;
this.select = handle_selection;
this.init = item_init;
this.upstatus = item_upstatus;
this.oncontextmenu=context_menu;
this.name = function () { return this.a_config[0]; }
this.data = null;
this.on_insert_child= null;
this.is_last =
function () {
return this.n_position == this.o_parent.a_children.length - 1
};
if (o_parent.on_insert_child)
o_parent.on_insert_child(this);
}
function item_init () {
var o_tree = this.o_root;
var treeId = o_tree.n_tid;
var itemId = this.n_id;
var tree = 'trees[' + treeId + ']';
var s = [];
var i = 0;
s[i++] = '<table cellpadding="0" cellspacing="0"'
if (o_tree.border)
s[i++] = ' style="border-bottom:1px groove lightgray"';
s[i++] = '>';
if (o_tree.a_columns)
s[i++] = get_column_header_html(o_tree, false);
s[i++] = '<tr><td nowrap valign="bottom">';
if (this.n_depth)
{
var o_current_item = this.o_parent;
for (var j = this.n_depth; j > 1; j--) {
s[j+i] = '<img src="' + this.o_root.a_tpl[o_current_item.is_last() ? 'icon_e' : 'icon_l'] + '" border="0" align="absbottom">';
o_current_item = o_current_item.o_parent;
}
i = s.length;
if (this.b_load_on_demand || this.a_children.length)
{
s[i++] = '<a class="tree" href="javascript:';
s[i++] = tree;
s[i++] = '.toggle(';
s[i++] = itemId;
// s[i++] = ')" onmouseover="';
// s[i++] = tree;
// s[i++] = '.mover(';
// s[i++] = itemId;
// s[i++] = ')" onmouseout="';
// s[i++] = tree;
// s[i++] = '.mout(';
// s[i++] = itemId;
s[i++] = ')"><img src="';
s[i++] = this.get_icon(true);
s[i++] = '" border="0" align="absbottom" name="j_img';
s[i++] = treeId;
s[i++] = '_';
s[i++] = itemId;
s[i++] = '"></a>';
}
else
{
s[i++] = '<img src="';
s[i++] = this.get_icon(true);
s[i++] = '" border="0" align="absbottom">';
}
}
if (this.b_checkbox)
{
s[i++] = '<input type="checkbox" style="zoom:0.8"';
if (this.b_checked)
s[i++] = ' checked="true"'
s[i++] = '>';
}
s[i++] = '<a href="';
s[i++] = this.a_config[1] == null ? 'javascript:void(0)' : this.a_config[1];
s[i++] = '" target="';
s[i++] = this.o_root.a_tpl['target']=='_self' ? 'javacript:void(0)' : this.o_root.a_tpl['target'];
s[i++] = '" onclick="return ';
s[i++] = tree;
s[i++] = '.select(';
s[i++] = itemId;
s[i++] = ', event, ';
s[i++] = '\'i_txt';
s[i++] = treeId;
s[i++] = '_';
s[i++] = itemId;
s[i++] = '\')" ondblclick="';
s[i++] = tree;
s[i++] = '.toggle(';
s[i++] = itemId;
// s[i++] = ')" onmouseover="alert(\'here\');" ';
// s[i++] = tree;
// s[i++] = '.mover(';
// s[i++] = itemId;
// s[i++] = ')" onmouseout="';
// s[i++] = tree;
// s[i++] = '.mout(';
// s[i++] = itemId;
s[i++] = ')" oncontextmenu="return ';
s[i++] = tree;
s[i++] = '.oncontextmenu(';
s[i++] = itemId;
s[i++] = ', event, ';
s[i++] = '\'i_txt';
s[i++] = treeId;
s[i++] = '_';
s[i++] = itemId;
s[i++] = '\')" class="t';
s[i++] = treeId;
s[i++] = 'i" id="i_txt';
s[i++] = treeId;
s[i++] = '_';
s[i++] = itemId;
s[i++] = '"';
if (this.n_depth == 0)
s[i++] = ' style="font-weight:bold"';
s[i++] = '>';
s[i++] = '<img src="';
s[i++] = this.get_icon();
s[i++] = '" border="0" align="absbottom" name="i_img';
s[i++] = treeId;
s[i++] = '_';
s[i++] = itemId;
s[i++] = '" class="t';
s[i++] = treeId;
s[i++] = 'im"/>';
s[i++] = this.name();
s[i++] = '</a></td>';
if (o_tree.a_columns)
{
//the a_columns array of item only holds non-empty
//column contents for columns after first column and
//may not have any columns even though the tree may
//have more columns defined
var n_columns = this.a_columns ? this.a_columns.length : 0;
for (var c=0; c<n_columns; c++)
{
s[i++] = '<td class="small">';
s[i++] = this.a_columns[c];
s[i++] = '</td>';
}
//now create table cells for empty columns
n_columns = o_tree.a_columns.length;
c++;
for (; c<n_columns; c++)
s[i++] = '<td/>';
}
s[i++] = '</tr></table>';
s[i++] = '<div id="i_div';
s[i++] = treeId;
s[i++] = '_';
s[i++] = itemId;
s[i++] = '" style="display:';
s[i++] = this.b_expanded ? 'block' : 'none';
s[i++] = '">';
if (this.b_expanded)
for (var j = 0; j < this.a_children.length; j++)
s[i++] = this.a_children[j].init();
s[i++] = '</div>';
return s.join('');
}
//add column for a tree
//
function add_tree_column(column)
{
if (this.a_columns == null)
{
this.a_columns = new Array();
this.a_columnWidths = new Array();
}
this.a_columns.push( column );
var width = column['width'];
this.a_columnWidths[ this.a_columnWidths.length ] = typeof width != 'undefined' ? width : 300;
}
//add column for a tree item
//
function add_column(innerHTML)
{
if (this.a_columns == null)
this.a_columns = new Array();
this.a_columns.push( innerHTML );
}
function load_children_on_demand(item)
{
var rc = false;
if (item.b_load_on_demand && item.a_children.length==0)
{
window.status = 'Loading child items ...';
var url = '/esp/navdata';
if (item.params)
url += '?' + item.params;
var xmlHttp = loadXMLDoc(url);
if (xmlHttp.status==200)
{
addChildItemsFromNavData(xmlHttp.responseXML.documentElement, item);
rc = true;
}
else
alert("Error in dynamically loading the children:\n" + xmlHttp.statusText);
}
window.status = '';
return rc;//don't expand if error occurred
}
function item_expand (b_expand, b_recursive) {
if (this.a_children.length == 0)
return;
var treeId = this.o_root.n_tid;
var o_idiv = get_element('i_div' + treeId + '_' + this.n_id);
if (o_idiv)
{
if (!o_idiv.innerHTML) {
var a_children = [];
for (var i = 0; i < this.a_children.length; i++)
a_children[i] = this.a_children[i].init();
o_idiv.innerHTML = a_children.join('');
}
o_idiv.style.display = b_expand ? 'block' : 'none';
this.b_expanded = b_expand;
var o_jicon = document.images['j_img' + treeId + '_' + this.n_id],
o_iicon = document.images['i_img' + treeId + '_' + this.n_id];
if (o_jicon)
o_jicon.src = this.get_icon(true);
if (o_iicon && this.get_icon)
o_iicon.src = this.get_icon();
if (this.upstatus)
this.upstatus();
}
if (b_recursive)
for (var i = 0; i < this.a_children.length; i++)
this.a_children[i].expand(b_expand, b_recursive);
}
function get_column_header_html(table, bCaption)
{
var a_columns = table.a_columns;
var n_columns = a_columns ? a_columns.length : 0;
if (!n_columns)
return '';
var a_html = [];
a_html.push(bCaption ? '<table><thead class="sort-table"><tr>' : '<colgroup>');
for (var i=0; i<n_columns; i++)
{
var column = a_columns[i];
a_html.push(bCaption ? '<th class="sort-table"' : '<col');
for (attrib in column)
if (attrib != 'innerHTML')
a_html.push(' ' + attrib + '="' + column[attrib] + '"');
a_html.push('>');
if (bCaption)
{
var innerHTML = column['innerHTML'];
if (typeof innerHTML != 'undefined')
a_html.push(innerHTML);
}
a_html.push(bCaption ? '</th>' : '</col>');
}
a_html.push(bCaption ? '</tr></thead></table>' : '</colgroup>');
return a_html.join('');
}
function redo_item(item)
{
var o_tree = item.o_root;
if (item == o_tree)
{
var a_html = [];
var n_children = item.a_children.length;
for (var i = 0; i < n_children; i++)
{
var child = item.a_children[i];
a_html[i] = child.init();
child.expand();
}
var div = get_element('tree_'+item.n_tid);
if (o_tree.a_columns)
{
a_html.unshift(get_column_header_html(item, true));
var width = 0;
var n_columns = o_tree.a_columns.length
for (i=0; i<n_columns; i++)
width += Number(o_tree.a_columnWidths[i]);
div.style.width = width;
}
div.innerHTML = a_html.join('');
}
else
{
var o_idiv = get_element('i_div' + o_tree.n_tid + '_' + item.n_id);
if (o_idiv)
{
var a_html = [];
for (var i = 0; i < item.a_children.length; i++)
a_html[i] = item.a_children[i].init();
o_idiv.innerHTML = a_html.join('');
return o_idiv.innerHTML;
}
}
}
function insert_item(parent, item_name, b_load_on_demand)
{
if (!parent.a_children)
parent.a_children = new Array();
var pos = parent.a_children.length;
var new_item = new tree_item(parent, pos, [item_name, null]);
new_item.b_load_on_demand = b_load_on_demand;
//window.status = '';
redo_item(parent.o_parent != parent.o_root ? parent.o_parent : parent);
if (!parent.b_expanded)
parent.expand(true);
new_item.select(false);
return new_item;
}
function add_item(tree, parent_id, item_type, name, tooltip, menu, params)
{
var parent = parent_id == -1 ? tree.o_root : tree.a_index[parent_id];
var pos = parent.a_children.length;
var new_item = new tree_item(parent, pos, [name, null]);
if (tooltip != '')
new_item.tooltip = tooltip;
if (item_type == 'DynamicFolder')
new_item.b_load_on_demand = true;
if (menu != '')
{
var menu_id = tree.a_custom_menu_ids[menu];
if (menu_id != -1)
new_item.n_custom_menu = menu_id;
}
if (params != '')
new_item.params = params; //unescape(params);
return new_item;
}
function delete_item(tree, item_id, b_refresh)
{
if (typeof b_refresh == 'undefined')
b_refresh = true;
//remove children of this item first, if any
//
var item = tree.get_item(item_id);
var children = item.a_children;
var n_children = children.length;
for (var i=0; i<n_children; i++)
delete_item(tree, children[i].n_id, false);
if (item.b_selected)
{
selected = tree.a_selected;
n_selected = selected.length;
for (i=0; i<n_selected; i++)
if (selected[i] == item)
{
selected.splice(i, 1);
break;
}
}
tree.a_deleted[ tree.a_deleted.length ] = item_id;
tree.a_index[ item_id ] = null;
tree.n_items--;
//update this item's parent's a_children - only for top most item getting deleted
//
var o_parent = item.o_parent;
if (b_refresh)
{
//remove this item from its parent's a_children array
//
children = o_parent.a_children;
n_children = children.length;
for (i=0; i<n_children; i++)
if (children[i] == item)
{
children.splice(i, 1);
break;
}
redo_item(o_parent.o_parent != o_parent.o_root ? o_parent.o_parent : o_parent);
}
}
function deleteAllChildren(item)
{
var children = item.a_children;
var n_children = children.length;
for (var i=0; i<n_children; i++)
delete_item(this, children[i].n_id, false);
children.length=0;
}
function refresh_item(tree, item_id)
{
var item = tree.get_item(item_id);
if (!item.b_load_on_demand)
return;
var b_expanded = item.b_expanded;
if (b_expanded)
item.expand(false);
tree.deleteAllChildren(item);
if (b_expanded && load_children_on_demand(item))
item.expand(true);
return true;
}
function import_xml_nodeset(parent, objNodes)
{
var len = objNodes.length;
for (var i=0; i<len; i++)
{
var pos = parent.a_children.length;
var item_name = objNodes[i].text;
new tree_item(parent, pos, [item_name, null]);
}
redo_item(parent.o_parent != parent.o_root ? parent.o_parent : parent);
}
function get_xml_attrib(xmlNode, attribName, defaultValue)
{
var attr = xmlNode.attributes.getNamedItem(attribName);
return attr ? attr.nodeValue : defaultValue;
}
function import_xml(parent, objNode)
{
var a_nodes = objNode.childNodes;
var n_nodes = a_nodes.length;
for (var i=0; i<n_nodes; i++)
{
var name = get_xml_attrib(a_nodes[i], 'name', 'unknown')
var pos = parent.a_children.length;
var item = new tree_item(parent, pos, [name, null]);
if (a_nodes[i].tagName == 'Folder')
import_xml(item, a_nodes[i]);
}
redo_item(parent.o_parent != parent.o_root ? parent.o_parent : parent);
}
function get_first_item()
{
return get_next_item(-1);
}
function get_next_item(item_id)
{
var n_items = this.item_count();
if (n_items > 0)
{
var size = this.a_children.length;
for (var i = item_id+1; i<size; )
if (this.a_children[i])
return i;
}
return -1;
}
function select_all()
{
this.o_root.expand(true, true);
this.o_root.a_selected.length = 0;
var i=0;
for ( var item_id = this.o_root.get_first_item();
item_id != -1;
item_id = this.o_root.get_next_item(item_id))
{
this.o_root.a_selected[i] = this.o_root.get_item(item_id);
select_item(this.o_root.a_selected[i++], true);
}
this.o_root.upstatus();
}
function select_none()
{
var n_selected = this.a_selected.length;
for (var i=0; i<n_selected; i++)
select_item(this.a_selected[i], false);
this.a_selected.length = 0;
this.o_root.upstatus();
}
function handle_selection(e)
{
var n_selected = this.o_root.a_selected.length;
var b_multiselect;
var b_select;
var o_tree = this.o_root;
//if either the control key is pressed or an already selected item is clicked
//upon with right mouse button then this is the case of multiple selection
//
if (!e) {
e = window.event;
}
if ((e && e.ctrlKey) || (this.b_selected && o_tree.b_rightMouseButton))
{
b_multiselect = true;
//select this item unless the control key is pressed without right mouse button
//and object is already selected
b_select = !(e.ctrlKey && this.b_selected);
}
else
{
b_multiselect = false;
b_select = true;
//if either multiple items are currently selected or some other item is then unselect all
if (n_selected > 1 || (n_selected==1 && !this.b_selected))
o_tree.select_none();
}
if (b_select != this.b_selected)
{
if (b_select)
o_tree.a_selected.push(this);
else
{
//delete this object from the root's a_selected array
var n_selected = o_tree.a_selected.length;
for (var i=0; i<n_selected; i++)
if (o_tree.a_selected[i] == this)
{
o_tree.a_selected.splice(i, 1);
break;
}
}
select_item(this, b_select);
if (o_tree.on_sel_changed)
o_tree.on_sel_changed(o_tree);
this.upstatus();
}
if ((o_tree.action || this.action) && !o_tree.b_rightMouseButton && !b_multiselect)
o_tree.handle_command(o_tree.n_tid, this, 'click', this.action ? this.action : o_tree.action);
return Boolean(this.a_config[1]);
}
function select_item (item, b_select) {
item.b_selected = b_select;
var o_iicon = document.images['i_img' + item.o_root.n_tid + '_' + item.n_id];
if (o_iicon)
o_iicon.src = item.get_icon();
var obj = get_element('i_txt' + item.o_root.n_tid + '_' + item.n_id);
obj.style.backgroundColor = b_select ? 'highlight' : 'window';
obj.style.color = b_select ? 'white' : 'black';
}
function item_upstatus (b_clear) {
//window.setTimeout('window.status="' + (b_clear ? '' : this.name() +
// (this.a_config[1] ? ' ('+ this.a_config[1] + ')' : '')) + '"', 10);
}
function item_get_icon (b_junction) {
var b_has_children = this.b_load_on_demand || this.a_children.length;
var icon_id = this.n_depth ? 0 : 32;
if (b_has_children)
{
icon_id += 16;
if (this.b_expanded)
icon_id += 8;
}
if (b_junction)
{
icon_id += 2;
if (this.is_last())
icon_id++;
}
//else
// if (this.b_selected)
// icon_id += 4;
return this.o_root.a_tpl['icon_' + icon_id];
}
/* not used yet...
function get_xpath(item)
{
var xpath = null;
while (item != item.o_root)
{
xpath = xpath ? item.name() + '/' + xpath : item.name();
item = item.o_parent;
}
return xpath ? xpath : "";
}
*/
function get_items_with_name(parent, name)
{
var a_children = [];
var nChildren = parent.a_children.length;
for (var i = 0; i < nChildren; i++)
if (name == '*' || parent.a_children[i].name() == name)
a_children.push(parent.a_children[i]);
return a_children;
}
/* unused for now...
function get_items_at_xpath(parent, xpath)
{
var items = [];
var ndx = xpath.indexOf('/');
var prefix;
var suffix;
if (ndx == -1)// no slash found in xpath
{
prefix = xpath;
suffix = '';
}
else
{
prefix = xpath.substring(0, ndx);
suffix = xpath.substring(ndx+1);
}
if (prefix.length)
{
var found = get_items_with_name(parent, prefix);
if (suffix == '')
return found;
else
for (var i=0; i<found.length; i++)
{
var found2 = get_items_at_xpath(found[i], suffix);
for (var j=0; j<found2.length; j++)
items.push( found2[j] );
}
}
return items;
}
*/
function handle_command(tree_id, focus_item_id, cmd, action) {
//hide_popup_menu();
var tree = this.o_root;
if (cmd == 'Select All')
{
tree.select_all();
if (tree.on_sel_changed)
tree.on_sel_changed(tree);
}
else if (cmd == 'Select None')
{
tree.select_none();
if (tree.on_sel_changed)
tree.on_sel_changed(tree);
}
else if (cmd == 'Expand All')
{
tree.expand(true, true);
}
else if (cmd == 'Collapse All')
{
tree.expand(false, true);
}
else if (cmd == 'Expand')
{
for (var i=0; i<tree.a_selected.length; i++)
tree.a_selected[i].expand(true, true);
}
else if (cmd == 'Collapse')
{
for (var i=0; i<tree.a_selected.length; i++)
tree.a_selected[i].expand(false, true);
}
else if (cmd == 'New')
{
var item_name = prompt('Enter name of new item:', 'New Item');
if (item_name)
{
var item = tree.get_item(focus_item_id);
insert_item(item, item_name, true);
}
}
else if (cmd == 'Delete')
{
delete_item(tree, focus_item_id);
}
else if (cmd == 'Refresh')
{
refresh_item(tree, focus_item_id);
}
else if (tree.on_command)
{
tree.on_command(tree, cmd, action);
}
return true;
}
function search_array(a_array, item)
{
var len = a_array.length;
for (var i=0; i<len; i++)
if (a_array[i] == item)
return i;
return -1;
}
function context_menu(e, treeid)
{
//document.all.menu.innerHTML = '';
//document.all.src.value = '';
//document.all.src.value = document.body.innerHTML;
//debugger;
//hack: the web page does not remember that right mouse button
//is pressed to remember it so item selection can use that fact
this.o_root.b_rightMouseButton = true;
this.select(e);
if (!this.b_selected)
this.select(e);
this.o_root.b_rightMouseButton = false;
/* define a menu as an array of menu items. Each menu item is as follows:
null - separator
command [, action [, caption [, handler [, showCheckbox]]] //only command is required
where:
command: the command that is passed to the handler
action : the parameter string needed to run the command like a url
caption: the menu item text. If this is not supplied then command is shown as text
handler: of the form function handler(item_id, command, action) {...} or null to disable
showCheckbox: if null, no checkbox is shown, true/false indicate check states otherwise
for instance:
var menu=[["cmd_1", "action_1", "Caption 1", "menuHandler", true], //true for checked item
null, //separator
["cmd_2", "action_2", "Caption 2", null, false]];//false for unchecked item
*/
//do a dedup of all custom menus of selected items
var a_custom_menu_ids = [];
var n_custom_menu_id = -1;
var b_all_dynamic_folders = true;
for (var i=0; i<this.o_root.a_selected.length; i++)
{
var sel_item = this.o_root.a_selected[i];
if (!sel_item.b_load_on_demand)
b_all_dynamic_folders = false;
var menu_id = sel_item.n_custom_menu;
if (menu_id != undefined)
{
if (a_custom_menu_ids.length == 0)//none set yet
n_custom_menu_id = menu_id;
else
if (n_custom_menu_id != -1)//multiple selected items already had different menus
{
if (n_custom_menu_id != menu_id)
n_custom_menu_id = -1;
}
if (search_array(a_custom_menu_ids, menu_id) == -1)
a_custom_menu_ids.push(menu_id);
}
else//some item does not have custom menu so disallow all custom menus
break;
}
//define default menu...
var default_menu = [
/*
["New"],
["Delete"],
["Refresh"],
null,
["Expand"],
["Collapse"],
null,
["Select All"],
["Select None"],
["Expand All"],
["Collapse All"]
*/
];
var menu;
if (n_custom_menu_id != -1)//all selected items had same menu
{
//menu = this.o_root.a_custom_menus[n_custom_menu_id].concat([null], default_menu);
menu = this.o_root.a_custom_menus[n_custom_menu_id].concat(default_menu);
}
else
{
if (a_custom_menu_ids.length == 0)//none of the selected items had any custom menus
menu = default_menu;
else
{
//the selected items had different custom menus
//so make a menu with items that are intersection of these menus
var n_first_id = a_custom_menu_ids[0];
var a_first_menu = this.o_root.a_custom_menus[n_first_id];//start with first menu
var new_custom_menu = [];
for (var i=0; i<a_first_menu.length; i++)//enumerate first menu's items
{
var menu_item_cmd = a_first_menu[i][0];
var b_add = true;
for (var j=1; j<a_custom_menu_ids.length; j++)
{
var a_menu = this.o_root.a_custom_menus[ a_custom_menu_ids[j] ];
var b_found = false;
for (var k=0; k<a_menu.length; k++)
if (a_menu[k][0] == menu_item_cmd)
{
b_found = true;
break;
}
if (!b_found)
{
b_add = false;
break;
}
}
if (b_add)
new_custom_menu.push( a_first_menu[i] );
}
menu = new_custom_menu;
}
}
if (b_all_dynamic_folders)
{
var hasRefreshCmd = false;
for (var i=0; i<menu.length; i++)//enumerate first menu's items
if (menu[i][0] == 'Refresh')
{
hasRefreshCmd = true;
break;
}
if (!hasRefreshCmd)
{
if (menu.length)
menu.push(null);
menu.push(['Refresh']);
}
}
if (this.o_root.on_context_menu)
this.o_root.on_context_menu(this.o_root, menu); //allow callback to override the default menu
if (menu.length)
{
this.o_root.show_context_menu(this.n_id, menu, null, false, 1, treeid);
return false; //don't let even propagate up to allow browser's menu
}
return true;//let event propagate to show browser's menu
}
var oMenu;
var menuItems;
function show_context_menu(item_id, menu, popup_caption, addCloseButton, itemsPerLine, treeid)//"View Columns:"
{
var s = new Array();
var i = 0;
s[i++] = '<table style="font:bold 8pt verdana, arial, helvetica, sans-serif" id="tab" width="10" oncontextmenu="return false" onselectstart="return false">';
//s[i++] = 'onmouseover="parent.clear_popup_timer()" onmouseout="parent.set_popup_timer()">';
var iCaption = i; //leave an item as place holder for possible caption to be added later below
s[i++] = ' '; //caption
s[i++] = '<tr>';
var horizLine = false;
var columns = 0;
for(var item in menu)
if(menu[item])
{
var menuItem= menu[item];
var cmd = menuItem[0];
var action = menuItem[1];
var caption = menuItem[2] ? menuItem[2] : menuItem[0];
var handler = menuItem[3] ? menuItem[3] : 'trees['+this.o_root.n_tid+'].handle_command';
var checked = menuItem[4];
var disabled=!menuItem[1];
if ((columns++ % itemsPerLine == 0) && columns>0)
s[i++] = '</tr><tr>';
s[i++] = '<td nowrap style="paddiyesng:0';
if (horizLine)
{
s[i++] = ';border-top:solid 1px gray';
horizLine = false;
}
s[i++] = '" onmouseover=\'this.runtimeStyle.cssText="background-color:highlight; color:';
s[i++] = disabled ? 'graytext' : 'highlighttext';
s[i++] = ';";\' onmouseout=\'this.runtimeStyle.cssText=""\'';
if (checked != null)
{
s[i++] = '>'; //close <td> tag
s[i++] = ' <input type="checkbox" ';
if (checked)
s[i++] = 'checked="true" ';
if (!disabled)
{
s[i++] = 'onclick="return parent.';
s[i++] = handler;
s[i++] = '(';
s[i++] = this.n_tid;
s[i++] = ',';
s[i++] = item_id;
s[i++] = ',\'';
s[i++] = cmd;
s[i++] = '\', ';
s[i++] = action;
s[i++] = '\')"';
}
s[i++] = ' style="background-color:transparent;cursor:default" valign="center" >';
s[i++] = caption;
s[i++] = '</input>';
}
else
{
s[i++] = ' onclick="return parent.' + handler + '(' + this.n_tid + ',' + item_id + ',\'' + cmd + '\', \'' + action + '\')">';
s[i++] = caption;
}
s[i++] = '</td>';
}
else
horizLine = true;
var numColumns = columns > itemsPerLine ? itemsPerLine : columns;
if (popup_caption)
s[iCaption] = '<tr><th align="center" colspan="' + numColumns + '">' + popup_caption + '<hr/></th></tr>';
if (addCloseButton)
s[i++] = '<tr><th align="center" colspan="' + numColumns + '"><hr/>' +
'<input type="button" value="Close" onclick="parent.contextMenu.hide()"/>' +
'</th></tr>'
s[i++] ='</tr></table>';
//alert(s);
var xypos = YAHOO.util.Dom.getXY(treeid);
xypos[0] = 0;
if (oMenu) {
oMenu.destroy();
}
oMenu = new YAHOO.widget.Menu("treemenu", { position: "dynamic", xy: xypos });
oMenu.clearContent();
menuItems = null;
menuItems = new Array();
for (var item in menu) {
if (menu[item]) {
var menuItem = menu[item];
var ocmd = menuItem[0];
var oaction = menuItem[1];
var ocaption = menuItem[2] ? menuItem[2] : menuItem[0];
var ohandler = menuItem[3] ? menuItem[3] : 'trees[' + this.o_root.n_tid + '].handle_command';
menuItems[menuItems.length] = { text: ocaption.toString(), onclick: { fn: function() { context_Menu_Select(this.index); } }, treeid: this.n_tid, itemid: item_id, command: ocmd, action: oaction };
}
}
function context_Menu_Select(Menu_Index) {
trees[menuItems[Menu_Index].treeid].handle_command(menuItems[Menu_Index].treeid, menuItems[Menu_Index].itemid, menuItems[Menu_Index].command, menuItems[Menu_Index].action);
}
oMenu.addItems(menuItems);
oMenu.render("menu");
oMenu.show();
return;
}
function test ()
{
alert('test');
}
/* this function takes a string of the form "prefix{javascript}suffix" and
returns prefixXYZsuffix, where XYZ is the result of execution of javascript
within braces after any 'this' occurrences have been substituted with 'item'.
Multiple {javascript} blocks may be embedded within the string.
*/
/* unused for now...
function process_embedded_scripts(url, item)
{
if (url.length == 0)
return '';
var open_brace = url.indexOf('{');
if (open_brace == -1)
return url;
var close_brace = url.indexOf('}', open_brace+1);
if (close_brace == -1)
close_brace = url.length+1;
var raw_script = url.substring(open_brace+1, close_brace);
var output;
if (raw_script.length)
{
var script = raw_script.replace('this.', 'item.');
var Fn = new Function('item', 'return ' + script);
output = Fn(item);
}
return url.substring(0, open_brace) + output + process_embedded_scripts( url.substring(close_brace+1), item );
}
*/
function addToXmlDoc(xmlDoc, item)
{
if (item.o_parent == item.o_root)
parentNode = xmlDoc.documentElement;
else
parentNode = addToXmlDoc(xmlDoc, item.o_parent);
var name = item.name();
var node = parentNode.selectSingleNode( "*[@name='"+name+"']" );
if (!node)
{
var nodeName = item.b_load_on_demand ? 'DynamicFolder' : (item.a_children.length ? 'Folder' : 'Link');
node = xmlDoc.createElement( nodeName );
parentNode.appendChild(node);
var attr = xmlDoc.createAttribute("name");
attr.value = name;
node.setAttributeNode(attr);
if (item.params)
{
attr = xmlDoc.createAttribute("params");
//attr.value = escape(item.params);
attr.value = item.params;
node.setAttributeNode(attr);
}
}
return node;
}
function selectionToXmlDoc(tree)
{
var xmlDoc = createXmlDomObject();
var docElement = xmlDoc.createElement('EspNavigationData');
xmlDoc.appendChild(docElement);
xmlDoc.async="false";
var nSelected = tree.o_root.a_selected.length;
for (var i=0; i<nSelected; i++)
{
var item = tree.o_root.a_selected[i];
var node = addToXmlDoc(xmlDoc, item);
var attr = xmlDoc.createAttribute("selected");
attr.value = 'true';
node.setAttributeNode(attr);
}
return xmlDoc;
}
function postForm(xmlDoc, url)
{
var xmlhttp = createXmlHttpRequestObject();
xmlhttp.open("POST", url, false);
xmlhttp.SetRequestHeader("Content-Type", "text/xml");
xmlhttp.send(xmlDoc);
//Confirm HTTP request succeeded.
var resp = '';
if ((xmlhttp.status == 200) || (xmlhttp.status == 300 /*multiple choices*/))
resp = xmlhttp.responseText;
else
alert('Failed to post data to service!');
return resp;
}
/*
<EspNavigationData>
<Folder name="Attribute Servers" tooltip="Attribute Servers">
<DynamicFolder menu="rcmenu1"
name="DefaultAttrServer"
params="type=repository&subtype=as&name=DefaultAttrServer&netAddress=http://10.150.29.202:8145"
tooltip="DefaultAttrServer"/>
<DynamicFolder menu="rcmenu1"
name="Configured Attribute Server"
params="type=repository&subtype=as&name=Configured Attribute Server&netAddress=http://10.150.29.202:8145"
tooltip="Configured Attribute Server"/>
<DynamicFolder menu="rcmenu1"
name="attrSvr1"
params="type=repository&subtype=as&name=attrSvr1&netAddress=http://10.150.64.208:8145"
tooltip="attrSvr1"/>
</Folder>
<Folder name="Roxie Clusters" tooltip="Roxie Clusters">
<DynamicFolder menu="rcmenu1"
name="roxie1"
params="type=repository&subtype=rc&name=roxie1&netAddress=roxieAddr1"
tooltip="roxie1"/>
</Folder>
<Menu name="rcmenu1">
<MenuItem action="/ws_roxieconfig/NavMenuPublishEvent?parm1=y" name="Publish" tooltip="Publish"/>
</Menu>
</EspNavigationData>
*/
function addChildItemsFromNavData(xmlNode, parent)
{
var tree = parent.o_root;
var a_nodes = xmlNode.getElementsByTagName('Menu');
var n_nodes = a_nodes.length;
for (var i=0; i<n_nodes; i++)
{
var node = a_nodes[i];
var menuName = get_xml_attrib(node, 'name', null);
if (menuName)
{
var o_menu = new Array;
var a_menu_items = node.getElementsByTagName('MenuItem');
var n_menu_items = a_menu_items.length;
for (var j=0; j<n_menu_items; j++)
{
var menu_item = a_menu_items[j];
var item_name = get_xml_attrib(menu_item, 'name', null);
var item_tip = get_xml_attrib(menu_item, 'tooltip', null);
var item_action = get_xml_attrib(menu_item, 'action', null);//handler
if (item_tip == null)
item_tip = item_name;
o_menu_item = new Array;
o_menu_item.push(item_name, item_action, item_tip);
o_menu.push( o_menu_item );
}
tree.add_custom_menu(menu_name, o_menu);
}
}
a_nodes = xmlNode.childNodes;
n_nodes = a_nodes.length;
var n_children = n_nodes;
for (var i=0; i<n_nodes; i++)
{
var node = a_nodes[i];
if (node.nodeName == 'Folder' || node.nodeName == 'DynamicFolder' || node.nodeName == 'Link')
{
var item;
var name = get_xml_attrib(node, 'name', 'unknown');
var tooltip = get_xml_attrib(node, 'tooltip', '');
var params = get_xml_attrib(node, 'params', '');
var menu_name = get_xml_attrib(node, 'menu', '');
//item.nodeName can either be DynamicFolder, Folder or Link
var item = add_item(tree, parent.n_id, node.nodeName, name, tooltip, menu_name, params);
if (node.nodeName == 'Folder')
addChildItemsFromNavData(node, item);
if (menu_name)
{
var menu_id = tree.a_custom_menu_ids[menu_name];
if (menu_id != undefined)
item.menu_id = menu_id;
}
}
}//for
redo_item(parent);
a_nodes = xmlNode.getElementsByTagName('Exception');
n_nodes = a_nodes.length;
if (n_nodes > 0)
{
var s = new Array();
var j = 0;
s[j++] = 'Exception';
if (n_nodes>1)
s[j++] = 's';
s[j++] = ' encountered:\n';
for (var i=0; i<n_nodes; i++)
{
var node = a_nodes[i];
var msg = get_xml_attrib(node, 'message', null);
if (msg)
{
var source = get_xml_attrib(node, 'source', null);
if (source)
{
s[j++] = source;
s[j++] = ': ';
}
s[j++] = msg;
s[j++] = '\n';
}
}
alert(s.join(''));
}
return n_children;
}
function toggle(n_id)
{
var o_item = this.get_item(n_id);
var b_expand = !o_item.b_expanded;
if (this.on_expanding && !this.on_expanding(o_item, b_expand))
return;
if (b_expand && o_item.b_load_on_demand)
{
if (o_item.a_children.length && !o_item.b_cache_children)
this.deleteAllChildren(o_item);
load_children_on_demand(o_item);
}
o_item.expand(b_expand);
//document.all['textarea'].value = document.all['tree_0'].outerHTML;
}
function set_popup_timer()
{
/*
if (this.timeoutId)
clearTimeout(this.timeoutId);
this.timeoutId = window.setTimeout('hide_popup_menu()', 2000);
*/
}
function clear_popup_timer()
{
/*
if (this.timeoutId)
clearTimeout(this.timeoutId);
*/
}
function hide_popup_menu()
{
if (ie)
{
if (parent.contextMenu && parent.contextMenu.isOpen)
parent.contextMenu.hide();
else
if (contextMenu && contextMenu.isOpen)
contextMenu.hide();
}
else
if (ns)
contextMenu.style.visibility = 'hidden';
}
function get_screen_coords(obj, xy)
{
if (obj == document.body)
{
xy[0] += window.screenLeft;
xy[1] += window.screenTop;
}
else
{
get_screen_coords(obj.parentNode, xy);
xy[0] += obj.offsetLeft;
xy[1] += obj.offsetTop;
}
}
function mouse_over_item(n_id)
{
if (!window.event)
return;
var item = this.get_item(n_id);
var img = document.getElementById('i_img' + this.n_tid + '_' + n_id);
var link = document.getElementById('i_txt' + this.n_tid + '_' + n_id);
var tree_div = document.getElementById('tree_' + this.n_tid);
var parentOfTree = tree_div.parentNode;
if (link.offsetLeft + link.offsetWidth > parentOfTree.offsetWidth)
{
var xy = [0, 0];
get_screen_coords(link, xy)
EnterContent('ToolTip',null, link.innerText, true);
Activate(null, xy[0], xy[1]);
}
else
deActivate();
/*
if (!contextMenu || !contextMenu.isOpen)
this.oncontextmenu(n_id);
else
{
this.select(n_id, e);
var popupBody = contextMenu.document.body;
contextMenu.show(link.offsetWidth, 0, popupBody.offsetWidth, popupBody.offsetHeight, link);
}
set_popup_timer();
*/
item.upstatus(true);
}
function mouse_left_item(n_id)
{
if (!window.event)
return;
deActivate()
set_popup_timer();
this.get_item(n_id).upstatus();
}
var contextMenu = null;
var trees = [];
|
silenc3502/MYSQL-Arch-Doc-Summary
|
mysql-server/plugin/udf_services/test_udf_services.cc
|
<reponame>silenc3502/MYSQL-Arch-Doc-Summary<gh_stars>0
/* Copyright (c) 2016, 2020, Oracle and/or its affiliates. All rights reserved.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License, version 2.0,
as published by the Free Software Foundation.
This program is also distributed with certain software (including
but not limited to OpenSSL) that is licensed under separate terms,
as designated in a particular file or component or in included license
documentation. The authors of MySQL hereby grant you an additional
permission to link the program and your derivative works with the
separately licensed software that they have included with MySQL.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License, version 2.0, for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA */
#include <mysql/plugin.h>
#include <mysql_com.h>
#include <mysql_version.h>
#include <stddef.h>
#include "my_compiler.h"
#include "my_inttypes.h"
static int test_udf_registration_init(MYSQL_PLUGIN p);
static int test_udf_registration_deinit(MYSQL_PLUGIN p);
static int test_udf_extension_init(MYSQL_PLUGIN p);
static int test_udf_extension_deinit(MYSQL_PLUGIN p);
/**
@file test_udf_services.cc
This is a test suite plugin to verify :
(1) Plugins can co-exist with UDFs
The file defines one DAEMON plugin @ref test_udf_services_plugin and one
UDF function: @ref test_udf_services_udf.
The test then checks if the plugin can be unloaded and loaded while the
UDF is defined.
(2) UDF extension attributes
The file defines one DAEMON plugin @ref test_udf_extension_services and
a few UDF functions to test the UDF extension arguments. UDF functions
tests for character set and collation extension arguments right now.
No user-facing functionality in this plugin. Just test material !
*/
static struct st_mysql_daemon test_udf_services_plugin = {
MYSQL_DAEMON_INTERFACE_VERSION};
static struct st_mysql_daemon test_udf_registration_plugin = {
MYSQL_DAEMON_INTERFACE_VERSION};
static struct st_mysql_daemon test_udf_extension_services_plugin = {
MYSQL_DAEMON_INTERFACE_VERSION};
mysql_declare_plugin(test_udf_services){
MYSQL_DAEMON_PLUGIN,
&test_udf_services_plugin,
"test_udf_services",
PLUGIN_AUTHOR_ORACLE,
"MySQL mtr test framework",
PLUGIN_LICENSE_GPL,
nullptr, /* Plugin Init */
nullptr, /* Plugin Check uninstall */
nullptr, /* Plugin Deinit */
0x0100, /* Plugin version: 1.0 */
nullptr, /* status variables */
nullptr, /* system variables */
nullptr, /* config options */
0, /* flags */
},
{
MYSQL_DAEMON_PLUGIN,
&test_udf_registration_plugin,
"test_udf_registration",
PLUGIN_AUTHOR_ORACLE,
"MySQL mtr test framework",
PLUGIN_LICENSE_GPL,
test_udf_registration_init, /* Plugin Init */
nullptr, /* Plugin Check uninstall */
test_udf_registration_deinit, /* Plugin Deinit */
0x0100, /* Plugin version: 1.0 */
nullptr, /* status variables */
nullptr, /* system variables */
nullptr, /* config options */
0, /* flags */
},
{
MYSQL_DAEMON_PLUGIN,
&test_udf_extension_services_plugin,
"test_udf_extension_services",
PLUGIN_AUTHOR_ORACLE,
"MySQL mtr test framework",
PLUGIN_LICENSE_GPL,
test_udf_extension_init, /* Plugin Init */
nullptr, /* Plugin Check uninstall */
test_udf_extension_deinit, /* Plugin Deinit */
0x0100, /* Plugin version: 1.0 */
nullptr, /* status variables */
nullptr, /* system variables */
nullptr, /* config options */
0, /* flags */
} mysql_declare_plugin_end;
#ifdef WIN32
#define PLUGIN_EXPORT extern "C" __declspec(dllexport)
#else
#define PLUGIN_EXPORT extern "C"
#endif
/**
Initialization function for @ref test_udf_services_udf
Must be present otherwise the server refuses to load
@param initid Return value from xxxx_init
@param args Array of arguments
@param[out] message Error message in case of error.
@retval false success
@retval true Failure. Error in the message argument
*/
PLUGIN_EXPORT bool test_udf_services_udf_init(
UDF_INIT *initid MY_ATTRIBUTE((unused)),
UDF_ARGS *args MY_ATTRIBUTE((unused)),
char *message MY_ATTRIBUTE((unused))) {
return false;
}
/**
A UDF function returning 0.
@param initid Return value from xxxx_init
@param args Array of arguments
@param[out] is_null If the result is null, store 1 here
@param[out] error On error store 1 here
*/
PLUGIN_EXPORT longlong
test_udf_services_udf(UDF_INIT *initid MY_ATTRIBUTE((unused)),
UDF_ARGS *args MY_ATTRIBUTE((unused)),
unsigned char *is_null MY_ATTRIBUTE((unused)),
unsigned char *error MY_ATTRIBUTE((unused))) {
char buffer[10];
*is_null = 0;
*error = 0;
/* use a plugin service function */
snprintf(buffer, sizeof(buffer), "test");
return 0;
}
#include <mysql/components/my_service.h>
#include <mysql/components/services/udf_registration.h>
#include <mysql/service_plugin_registry.h>
using udf_registration_t = SERVICE_TYPE_NO_CONST(udf_registration);
/** Sample plugin init function that registers a UDF */
static int test_udf_registration_init(MYSQL_PLUGIN /*p */) {
SERVICE_TYPE(registry) * reg;
SERVICE_TYPE(udf_registration) * udf;
bool ret = false;
reg = mysql_plugin_registry_acquire();
if (!reg) {
ret = true;
goto end;
}
reg->acquire("udf_registration",
reinterpret_cast<my_h_service *>(
const_cast<udf_registration_t **>(&udf)));
if (!udf) {
ret = true;
goto end;
}
ret = udf->udf_register("test_udf_registration_udf", INT_RESULT,
(Udf_func_any)test_udf_services_udf,
test_udf_services_udf_init, nullptr);
reg->release(
reinterpret_cast<my_h_service>(const_cast<udf_registration_t *>(udf)));
end:
if (reg) mysql_plugin_registry_release(reg);
return ret ? 1 : 0;
}
/** Sample plugin init function that unregisters a UDF */
static int test_udf_registration_deinit(MYSQL_PLUGIN /* p */) {
SERVICE_TYPE(registry) * reg;
SERVICE_TYPE(udf_registration) *udf = nullptr;
bool ret = false;
int was_present;
reg = mysql_plugin_registry_acquire();
if (!reg) {
ret = true;
goto end;
}
reg->acquire("udf_registration",
reinterpret_cast<my_h_service *>(
const_cast<udf_registration_t **>(&udf)));
if (!udf) {
ret = true;
goto end;
}
ret = udf->udf_unregister("test_udf_registration_udf", &was_present);
end:
if (reg) {
if (udf)
reg->release(reinterpret_cast<my_h_service>(
const_cast<udf_registration_t *>(udf)));
mysql_plugin_registry_release(reg);
}
return ret ? 1 : 0;
}
#include "services_required.h"
#include "udf_extension_test_functions.h"
/**
Plugin init function that registers a UDF.
A newly created UDF must be registered here.
@retval false UDF registered successfully.
@retval true Otherwise.
*/
static int test_udf_extension_init(MYSQL_PLUGIN /*p */) {
bool ret = true;
if (Registry_service::acquire() || Udf_registration::acquire()) {
goto end;
}
/*
Demonstrates how to set and get the charset extension argument of
return value. It also demonstrate how to perforn the charset
conversion on return value.
This UDF takes two STRING arguments. It returns the value of first
argument. But before returning the value, it converts the return
value into the character set of the second argument.
*/
if (Udf_registration::add("test_result_charset", STRING_RESULT,
(Udf_func_any)test_result_charset,
test_result_charset_init,
test_result_charset_deinit)) {
goto end;
}
/*
Demonstrates how to set the expected charset of a UDF argument.
Users sets the charset of a UDF argument at the init() time, server
detects that and provided the converted value at the UDF() time.
This UDF takes two STRING arguments. It sets the charset of first UDF
argument as charset of second argument.
*/
if (Udf_registration::add("test_args_charset", STRING_RESULT,
(Udf_func_any)test_args_charset,
test_args_charset_init, test_args_charset_deinit)) {
goto end;
}
/*
Demonstrates how to set and get the collation extension argument of
return value. It also demonstrate how to perforn the charset
conversion on return value.
This UDF takes two STRING arguments. It returns the value of first
argument. But before returning the value, it converts the return
value into the character set of the second argument. It determines
the charset of first argument from the collation name as it was set
during init() time.
*/
if (Udf_registration::add("test_result_collation", STRING_RESULT,
(Udf_func_any)test_result_collation,
test_result_collation_init,
test_result_collation_deinit)) {
goto end;
}
/*
Demonstrates how to set the expected collation of a UDF argument.
Users sets the collation of a UDF argument at the init() time, server
detects that and provided the converted value at the UDF() time.
This UDF takes two STRING arguments. It sets the collation of first UDF
argument as collation of second argument.
*/
if (Udf_registration::add("test_args_collation", STRING_RESULT,
(Udf_func_any)test_args_collation,
test_args_collation_init,
test_args_collation_deinit)) {
goto end;
}
/*
Demonstrates how to set and get the charset extension argument of
return value. It also demonstrate how to perforn the charset conversion
on return value.
This UDF takes two STRING arguments. It returns the value of first
argument. But before returning the value, it converts the return
value into the character set as it was specified by the user in the second
argument.
*/
if (Udf_registration::add("test_result_charset_with_value", STRING_RESULT,
(Udf_func_any)test_result_charset_with_value,
test_result_charset_with_value_init,
test_result_charset_with_value_deinit)) {
goto end;
}
/*
Demonstrates how to set the expected charset of a UDF argument.
Users sets the charset of a UDF argument at the init() time, server
detects that and provided the converted value at the UDF() time.
This UDF takes two STRING arguments. It sets the charset of first UDF
argument as charset provided by the user in the second argument.
*/
if (Udf_registration::add("test_args_charset_with_value", STRING_RESULT,
(Udf_func_any)test_args_charset_with_value,
test_args_charset_with_value_init,
test_args_charset_with_value_deinit)) {
goto end;
}
/*
Demonstrates how to set and get the collation extension argument of
return value. It also demonstrate how to perforn the charset
conversion on return value.
This UDF takes two STRING arguments. It returns the value of first
argument. But before returning the value, it converts the return
value into the character set of the second argument. It determines
the charset of first argument from the collation name as provided
by the user in the second argument.
*/
if (Udf_registration::add("test_result_collation_with_value", STRING_RESULT,
(Udf_func_any)test_result_collation_with_value,
test_result_collation_with_value_init,
test_result_collation_with_value_deinit)) {
goto end;
}
/*
Demonstrates how to set the expected collation of a UDF argument.
Users sets the collation of a UDF argument at the init() time, server
detects that and provided the converted value at the UDF() time.
This UDF takes two STRING arguments. It sets the collation of first UDF
argument as collation provided by the user in the second argument.
*/
if (Udf_registration::add("test_args_collation_with_value", STRING_RESULT,
(Udf_func_any)test_args_collation_with_value,
test_args_collation_with_value_init,
test_args_collation_with_value_deinit)) {
goto end;
}
ret = false; // Successfully initialized the plugin
end:
if (ret) {
Udf_registration::release();
Registry_service::release();
}
return ret ? 1 : 0;
}
/**
Plugin deinit function that unregisters a UDF
@retval false UDF unregistered successfully.
@retval true Otherwise.
*/
static int test_udf_extension_deinit(MYSQL_PLUGIN /* p */) {
bool ret = true;
int was_present;
if (Registry_service::acquire() || Udf_registration::acquire()) {
goto end;
}
if (Udf_registration::remove("test_result_charset", &was_present) ||
Udf_registration::remove("test_args_charset", &was_present) ||
Udf_registration::remove("test_result_collation", &was_present) ||
Udf_registration::remove("test_args_collation", &was_present) ||
Udf_registration::remove("test_result_charset_with_value",
&was_present) ||
Udf_registration::remove("test_args_charset_with_value", &was_present) ||
Udf_registration::remove("test_result_collation_with_value",
&was_present) ||
Udf_registration::remove("test_args_collation_with_value",
&was_present)) {
goto end;
}
ret = false;
end:
Udf_registration::release();
Registry_service::release();
return ret ? 1 : 0;
}
|
PingHuskar/hackerrank
|
algorithms/arrays-and-sorting/correctness-invariant.py
|
# Algorithms > Sorting > Correctness and the Loop Invariant
# How do you demonstrate the correctness of an algorithm? You can use the loop invariant.
#
# https://www.hackerrank.com/challenges/correctness-invariant/problem
#
def insertion_sort(l):
for i in range(1, len(l)):
j = i- 1
key = l[i]
while j >= 0 and l[j] > key:
l[j + 1] = l[j]
j -= 1
l[j+1] = key
# (template_tail) ----------------------------------------------------------------------
m = int(input().strip())
ar = [int(i) for i in input().strip().split()]
insertion_sort(ar)
print(" ".join(map(str,ar)))
|
hariPrasad525/Nitya_Annaccounting
|
src/com/nitya/accounter/mobile/commands/VendorGroupListCommand.java
|
<filename>src/com/nitya/accounter/mobile/commands/VendorGroupListCommand.java
package com.nitya.accounter.mobile.commands;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import com.nitya.accounter.core.VendorGroup;
import com.nitya.accounter.mobile.CommandList;
import com.nitya.accounter.mobile.Context;
import com.nitya.accounter.mobile.Record;
import com.nitya.accounter.mobile.Requirement;
import com.nitya.accounter.mobile.requirements.ShowListRequirement;
public class VendorGroupListCommand extends AbstractCommand {
@Override
protected String initObject(Context context, boolean isUpdate) {
return null;
}
@Override
protected String getWelcomeMessage() {
return null;
}
@Override
protected String getDetailsMessage() {
return null;
}
@Override
protected void setDefaultValues(Context context) {
}
@Override
public String getSuccessMessage() {
return getMessages().success();
}
@Override
public String getId() {
return null;
}
@Override
protected void addRequirements(List<Requirement> list) {
list.add(new ShowListRequirement<VendorGroup>(getMessages().Vendor()
+ " " + getMessages().group(), "", 20) {
@Override
protected String onSelection(VendorGroup value) {
return "updateVendorGroup " + value.getName();
}
@Override
protected String getShowMessage() {
return getMessages().payeeList(
getMessages().Vendor() + " " + getMessages().group());
}
@Override
protected String getEmptyString() {
return getMessages().youDontHaveAny(
getMessages().Vendor() + " " + getMessages().group());
}
@Override
protected Record createRecord(VendorGroup value) {
Record vendorGroupRec = new Record(value);
vendorGroupRec.add(getMessages().name(), value.getName());
return vendorGroupRec;
}
@Override
protected void setCreateCommand(CommandList list) {
list.add("newVendorGroup");
}
@Override
protected boolean filter(VendorGroup e, String name) {
return false;
}
@Override
protected List<VendorGroup> getLists(Context context) {
return getVendorGroups(context);
}
});
}
/**
* get vendor groups
*
* @param context
* @return
*/
private List<VendorGroup> getVendorGroups(Context context) {
Set<VendorGroup> vendorGroups = context.getCompany().getVendorGroups();
List<VendorGroup> result = new ArrayList<VendorGroup>(vendorGroups);
return result;
}
}
|
camment/sdk-ios
|
CammentSDK/Classes/Core/Store/CMStore.h
|
<reponame>camment/sdk-ios
//
// Created by <NAME> on 16.05.17.
// Copyright (c) 2017 Sportacam. All rights reserved.
//
#import <Foundation/Foundation.h>
#import <ReactiveObjC/ReactiveObjC.h>
#import "CMUser.h"
#import "CMUsersGroup.h"
#import <Tweaks/FBTweak.h>
#import "CMIdentityProvider.h"
@class CMShow;
@class CMShowMetadata;
@class CMInvitation;
@class CMCammentOverlayLayoutConfig;
@class CMAuthStatusChangedEventContext;
@class CMServerMessage;
@protocol CMCammentOverlayControllerDelegate;
@class CMAppConfig;
extern NSString *kCMStoreCammentIdIfNotPlaying;
typedef NS_ENUM(NSInteger, CMCammentRecordingState) {
CMCammentRecordingStateNotRecording,
CMCammentRecordingStateRecording,
CMCammentRecordingStateFinished,
CMCammentRecordingStateCancelled
};
@interface CMStore: NSObject <FBTweakObserver>
@property (nonatomic, strong) CMAppConfig *appConfig;
@property (nonatomic, assign) NSTimeInterval currentShowTimeInterval;
@property (nonatomic, copy) NSString *playingCammentId;
@property (nonatomic, assign) CMCammentRecordingState cammentRecordingState;
@property (nonatomic, copy) CMUsersGroup *activeGroup;
@property (nonatomic, strong) NSArray<CMUser *> *activeGroupUsers;
@property(nonatomic) BOOL isOnboardingFinished;
@property(nonatomic) BOOL isOnboardingSkipped;
@property(nonatomic) BOOL isFetchingGroupUsers;
@property(nonatomic) BOOL isFetchingGroupList;
@property(nonatomic) CMShowMetadata *currentShowMetadata;
@property RACSubject<CMAuthStatusChangedEventContext *> *authentificationStatusSubject;
@property RACSubject<CMServerMessage *> *serverMessagesSubject;
@property RACSubject<NSNumber *> *reloadActiveGroupSubject;
@property RACSubject<NSNumber *> *inviteFriendsActionSubject;
@property RACSubject<NSNumber *> *startTutorial;
@property RACSubject<NSNumber *> *userHasJoinedSignal;
@property RACSubject<NSNumber *> *cleanUpSignal;
@property RACSubject<NSNumber *> *fetchUpdatesSubject;
@property RACSubject<NSNumber *> *requestPlayerStateFromHostAppSignal;
@property(nonatomic) BOOL isOfflineMode;
@property(nonatomic) BOOL awsServicesConfigured;
@property(nonatomic, strong) NSArray *avoidTouchesInViews;
@property(nonatomic) BOOL connectionAvailable;
@property(nonatomic, strong) NSDate *lastTimestampUploaded;
@property(nonatomic, weak) id <CMCammentOverlayControllerDelegate> overlayDelegate;
@property(nonatomic) BOOL shoudForceSynced;
@property(nonatomic) NSTimeInterval showTimestamp;
+ (CMStore *)instance;
- (void)setupTweaks;
- (void)cleanUp;
- (void)updateUserDataOnIdentityChangeOldIdentity:(NSString *)oldIdentity newIdentity:(NSString *)newIdentity;
- (void)cleanUpCurrentChatGroup;
- (void)refetchUsersInActiveGroup;
@end
|
Ewpratten/frc_971_mirror
|
motors/print/itm.cc
|
<gh_stars>0
#include "motors/print/itm.h"
#include "motors/core/itm.h"
namespace frc971 {
namespace motors {
namespace {
template<int kPort> void WriteToPort(gsl::span<const char> buffer) {
// This ignores memory barriers etc, because it will be called by
// CreatePrinting which must be called before any interrupts are enabled. That
// means the only thing we need to worry about is actually getting it
// initialized with a minimal number of cycles.
static bool is_initialized = false;
if (__builtin_expect(!is_initialized, false)) {
is_initialized = true;
itm::Initialize();
}
const char *next_address = buffer.data();
int remaining_bytes = buffer.size();
// Write small chunks to make the address even.
if (remaining_bytes >= 1 && (reinterpret_cast<uintptr_t>(next_address) & 1)) {
uint8_t value;
memcpy(&value, next_address, 1);
itm::Write8(kPort, value);
next_address += 1;
remaining_bytes -= 1;
}
if (remaining_bytes >= 2 && (reinterpret_cast<uintptr_t>(next_address) & 2)) {
uint16_t value;
memcpy(&value, next_address, 2);
itm::Write16(kPort, value);
next_address += 2;
remaining_bytes -= 2;
}
// Write big chunks while we can.
while (remaining_bytes >= 4) {
uint32_t value;
memcpy(&value, next_address, 4);
itm::Write32(kPort, value);
next_address += 4;
remaining_bytes -= 4;
}
// Write out any remaining uneven bytes on the end.
if (remaining_bytes >= 2) {
uint16_t value;
memcpy(&value, next_address, 2);
itm::Write16(kPort, value);
next_address += 2;
remaining_bytes -= 2;
}
if (remaining_bytes >= 1) {
uint8_t value;
memcpy(&value, next_address, 1);
itm::Write8(kPort, value);
next_address += 1;
remaining_bytes -= 1;
}
}
} // namespace
::std::unique_ptr<PrintingImplementation> CreatePrinting(
const PrintingParameters & /*parameters*/) {
return ::std::unique_ptr<PrintingImplementation>(new ItmPrinting());
}
extern "C" int _write(const int /*file*/, char *const ptr, const int len) {
WriteToPort<0>(gsl::span<const char>(ptr, len));
return len;
}
ItmPrinting::ItmPrinting() {
// Make sure we run the one-time initialization. It's important to do it here
// to ensure it's complete before interrupts are enabled, because it's not
// interrupt-safe.
_write(0, nullptr, 0);
}
int ItmPrinting::WriteStdout(gsl::span<const char> buffer) {
WriteToPort<0>(buffer);
return buffer.size();
}
int ItmPrinting::WriteDebug(gsl::span<const char> buffer) {
WriteToPort<1>(buffer);
return buffer.size();
}
} // namespace motors
} // namespace frc971
|
jay-most/azure-sdk-for-go
|
profiles/preview/cognitiveservices/customsearch/models.go
|
//go:build go1.9
// +build go1.9
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for license information.
// This code was auto-generated by:
// github.com/Azure/azure-sdk-for-go/tools/profileBuilder
package customsearch
import original "github.com/Azure/azure-sdk-for-go/services/cognitiveservices/v1.0/customsearch"
const (
DefaultEndpoint = original.DefaultEndpoint
)
type ErrorCode = original.ErrorCode
const (
InsufficientAuthorization ErrorCode = original.InsufficientAuthorization
InvalidAuthorization ErrorCode = original.InvalidAuthorization
InvalidRequest ErrorCode = original.InvalidRequest
None ErrorCode = original.None
RateLimitExceeded ErrorCode = original.RateLimitExceeded
ServerError ErrorCode = original.ServerError
)
type ErrorSubCode = original.ErrorSubCode
const (
AuthorizationDisabled ErrorSubCode = original.AuthorizationDisabled
AuthorizationExpired ErrorSubCode = original.AuthorizationExpired
AuthorizationMissing ErrorSubCode = original.AuthorizationMissing
AuthorizationRedundancy ErrorSubCode = original.AuthorizationRedundancy
Blocked ErrorSubCode = original.Blocked
HTTPNotAllowed ErrorSubCode = original.HTTPNotAllowed
NotImplemented ErrorSubCode = original.NotImplemented
ParameterInvalidValue ErrorSubCode = original.ParameterInvalidValue
ParameterMissing ErrorSubCode = original.ParameterMissing
ResourceError ErrorSubCode = original.ResourceError
UnexpectedError ErrorSubCode = original.UnexpectedError
)
type SafeSearch = original.SafeSearch
const (
Moderate SafeSearch = original.Moderate
Off SafeSearch = original.Off
Strict SafeSearch = original.Strict
)
type TextFormat = original.TextFormat
const (
HTML TextFormat = original.HTML
Raw TextFormat = original.Raw
)
type Type = original.Type
const (
TypeAnswer Type = original.TypeAnswer
TypeCreativeWork Type = original.TypeCreativeWork
TypeErrorResponse Type = original.TypeErrorResponse
TypeIdentifiable Type = original.TypeIdentifiable
TypeResponse Type = original.TypeResponse
TypeResponseBase Type = original.TypeResponseBase
TypeSearchResponse Type = original.TypeSearchResponse
TypeSearchResultsAnswer Type = original.TypeSearchResultsAnswer
TypeThing Type = original.TypeThing
TypeWebPage Type = original.TypeWebPage
TypeWebWebAnswer Type = original.TypeWebWebAnswer
)
type Answer = original.Answer
type BaseClient = original.BaseClient
type BasicAnswer = original.BasicAnswer
type BasicCreativeWork = original.BasicCreativeWork
type BasicIdentifiable = original.BasicIdentifiable
type BasicResponse = original.BasicResponse
type BasicResponseBase = original.BasicResponseBase
type BasicSearchResultsAnswer = original.BasicSearchResultsAnswer
type BasicThing = original.BasicThing
type CreativeWork = original.CreativeWork
type CustomInstanceClient = original.CustomInstanceClient
type Error = original.Error
type ErrorResponse = original.ErrorResponse
type Identifiable = original.Identifiable
type Query = original.Query
type QueryContext = original.QueryContext
type Response = original.Response
type ResponseBase = original.ResponseBase
type SearchResponse = original.SearchResponse
type SearchResultsAnswer = original.SearchResultsAnswer
type Thing = original.Thing
type WebMetaTag = original.WebMetaTag
type WebPage = original.WebPage
type WebWebAnswer = original.WebWebAnswer
func New() BaseClient {
return original.New()
}
func NewCustomInstanceClient() CustomInstanceClient {
return original.NewCustomInstanceClient()
}
func NewWithoutDefaults(endpoint string) BaseClient {
return original.NewWithoutDefaults(endpoint)
}
func PossibleErrorCodeValues() []ErrorCode {
return original.PossibleErrorCodeValues()
}
func PossibleErrorSubCodeValues() []ErrorSubCode {
return original.PossibleErrorSubCodeValues()
}
func PossibleSafeSearchValues() []SafeSearch {
return original.PossibleSafeSearchValues()
}
func PossibleTextFormatValues() []TextFormat {
return original.PossibleTextFormatValues()
}
func PossibleTypeValues() []Type {
return original.PossibleTypeValues()
}
func UserAgent() string {
return original.UserAgent() + " profiles/preview"
}
func Version() string {
return original.Version()
}
|
clouthink-in/nextoa-backend
|
audit/impl/src/main/java/in/clouthink/synergy/audit/domain/model/AggregationType.java
|
<gh_stars>1-10
package in.clouthink.synergy.audit.domain.model;
/**
* @author dz
*/
public enum AggregationType {
MONTH, DAY
}
|
sriThariduSangeeth/kore-chatbot-modernization-mobile
|
BotsSDK/korebotsdklib/src/main/java/kore/botssdk/models/BotTableListTextModel.java
|
package kore.botssdk.models;
public class BotTableListTextModel
{
private String title;
private String subtitle;
public void setTitle(String title)
{
this.title = title;
}
public String getTitle()
{
return title;
}
public void setSubtitle(String subtitle)
{
this.subtitle = subtitle;
}
public String getSubtitle()
{
return subtitle;
}
}
|
aTiKhan/nuxeo
|
modules/platform/nuxeo-automation/nuxeo-automation-core/src/main/java/org/nuxeo/ecm/automation/core/operations/document/GetLiveDocument.java
|
<reponame>aTiKhan/nuxeo
/*
* (C) Copyright 2006-2013 Nuxeo SA (http://nuxeo.com/) and others.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Contributors:
* <NAME> <<EMAIL>>
*/
package org.nuxeo.ecm.automation.core.operations.document;
import org.nuxeo.ecm.automation.core.Constants;
import org.nuxeo.ecm.automation.core.annotations.Context;
import org.nuxeo.ecm.automation.core.annotations.Operation;
import org.nuxeo.ecm.automation.core.annotations.OperationMethod;
import org.nuxeo.ecm.automation.core.collectors.DocumentModelCollector;
import org.nuxeo.ecm.core.api.CoreSession;
import org.nuxeo.ecm.core.api.DocumentModel;
/**
* @author <a href="mailto:<EMAIL>"><NAME></a>
* @since 5.7
*/
@Operation(id = GetLiveDocument.ID, category = Constants.CAT_DOCUMENT, label = "Get Live Document", description = "Get the live document even if this is a Proxy or Version Document.", aliases = { "GetLiveDocument" })
public class GetLiveDocument {
public static final String ID = "Proxy.GetSourceDocument";
private static int MAX_ITERATION = 5;
@Context
protected CoreSession session;
@OperationMethod(collector = DocumentModelCollector.class)
public DocumentModel run(DocumentModel input) {
DocumentModel doc = session.getSourceDocument(input.getRef());
for (int i = 0; i < MAX_ITERATION && !isLive(doc); i++) {
doc = session.getSourceDocument(doc.getRef());
}
return doc;
}
private boolean isLive(DocumentModel doc) {
return !doc.isVersion() && !doc.isProxy();
}
}
|
Jiaweihu08/qbeast-spark
|
src/test/scala/io/qbeast/spark/keeper/ProtocolMockTest.scala
|
package io.qbeast.spark.keeper
import io.qbeast.core.keeper.{Keeper, LocalKeeper}
class ProtocolMockTest extends ProtocolMockTestSpec {
"the qbeast-spark client" should
"throw an execution when an inconstant state is found" in withContext(LocalKeeper) {
context =>
implicit val keeper: Keeper = LocalKeeper
val initProcess = new InitProcess(context)
val announcer = new AnnouncerProcess(context, Seq("", "A", "AA", "AAA"))
val writer = new WritingProcess(context)
val badOptimizer = new OptimizingProcessBad(context, Seq("gA", "g"))
initProcess.startTransactionAndWait()
initProcess.finishTransaction()
announcer.start()
announcer.join()
writer.startTransactionAndWait()
badOptimizer.startTransactionAndWait()
badOptimizer.finishTransaction()
writer.finishTransaction()
writer.succeeded shouldBe Some(false)
}
"A faulty keeper" should "not cause inconsistency with conflicts" in withContext(RandomKeeper) {
context =>
implicit val keeper: Keeper = RandomKeeper
val initProcess = new InitProcess(context)
val announcer = new AnnouncerProcess(context, Seq("", "A", "AA", "AAA"))
val writer = new WritingProcess(context)
val optim = new OptimizingProcessGood(context)
initProcess.startTransactionAndWait()
initProcess.finishTransaction()
announcer.start()
announcer.join()
writer.startTransactionAndWait()
optim.startTransactionAndWait()
optim.finishTransaction()
writer.finishTransaction()
writer.succeeded shouldBe Some(false)
}
it should "not cause inconsistency when there are not conflicts" in withContext(RandomKeeper) {
context =>
implicit val keeper: Keeper = RandomKeeper
val initProcess = new InitProcess(context)
val announcer = new AnnouncerProcess(context, Seq("", "A", "AA", "AAA"))
val writer = new WritingProcess(context)
val optim = new OptimizingProcessGood(context)
initProcess.startTransactionAndWait()
initProcess.finishTransaction()
announcer.start()
announcer.join()
writer.startTransactionAndWait()
writer.finishTransaction()
optim.startTransactionAndWait()
optim.finishTransaction()
writer.succeeded shouldBe Some(true)
}
"A crashed with timeouts" should "not cause inconsistency in normal scenario" in withContext(
LocalKeeper) { context =>
implicit val keeper: Keeper = LocalKeeper
val initProcess = new InitProcess(context)
val announcer = new AnnouncerProcess(context, Seq("", "A", "AA"))
val writer = new WritingProcess(context)
val optim = new OptimizingProcessGood(context)
initProcess.startTransactionAndWait()
initProcess.finishTransaction()
announcer.start()
announcer.join()
writer.startTransactionAndWait()
writer.finishTransaction()
optim.startTransactionAndWait()
optim.finishTransaction()
writer.succeeded shouldBe Some(true)
}
"A write timout" should
"not cause inconsistency when a a timeout may interfere with an optimization" in withContext(
LocalKeeper) { context =>
implicit val keeper = LocalKeeper
val initProcess = new InitProcess(context)
val announcer = new AnnouncerProcess(context, Seq("", "A", "AA"))
val writer = new WritingProcess(context)
val optim = new OptimizingProcessGood(context)
initProcess.startTransactionAndWait()
initProcess.finishTransaction()
writer.startTransactionAndWait()
Thread.sleep(1000) // We make sure the keeper forgot about this write operations
announcer.start() // so that when we announce, we are not aware of a running write operation
announcer.join()
// which should lead the optim to optimize something it should not be touched.
optim.startTransactionAndWait()
optim.finishTransaction()
// But the write should detect it and fail
writer.finishTransaction()
writer.succeeded shouldBe Some(false)
}
"A crashed optimization" should "not caused problems" in withContext(LocalKeeper) { context =>
implicit val keeper = LocalKeeper
val initProcess = new InitProcess(context)
val announcer = new AnnouncerProcess(context, Seq("", "A", "AA"))
val writer = new WritingProcess(context)
val optim1 = new OptimizingProcessGood(context)
initProcess.startTransactionAndWait()
initProcess.finishTransaction()
writer.startTransactionAndWait()
announcer.start() // so that when we announce, we are not aware of a running write operation
announcer.join()
// which should lead the optim to optimize something it should not be touched.
optim1.startTransactionAndWait()
optim1.killMe()
Thread.sleep(1000) // this should ensure the client cleans the pending optimization
writer.finishTransaction()
writer.succeeded shouldBe Some(true)
}
}
|
PrivacyAmp/cardinality_estimation_evaluation_framework
|
tests/interoperability_test.py
|
# Copyright 2020 The Private Cardinality Estimation Framework Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for wfa_cardinality_estimation_evaluation_framework.
The goals are to make sure that (1) all of the estimators defined in estimators/
work with the simulation and set generation code in simulations/, and (2) the
estimator, set generator and simulator works with the evaluator, analyzer and
report generator.
"""
import math
from absl.testing import absltest
import numpy as np
from wfa_cardinality_estimation_evaluation_framework.estimators.bloom_filters import BlipNoiser
from wfa_cardinality_estimation_evaluation_framework.estimators.bloom_filters import BloomFilter
from wfa_cardinality_estimation_evaluation_framework.estimators.bloom_filters import ExponentialBloomFilter
from wfa_cardinality_estimation_evaluation_framework.estimators.bloom_filters import FirstMomentEstimator
from wfa_cardinality_estimation_evaluation_framework.estimators.bloom_filters import GeometricBloomFilter
from wfa_cardinality_estimation_evaluation_framework.estimators.bloom_filters import LogarithmicBloomFilter
from wfa_cardinality_estimation_evaluation_framework.estimators.bloom_filters import SurrealDenoiser
from wfa_cardinality_estimation_evaluation_framework.estimators.bloom_filters import UnionEstimator
from wfa_cardinality_estimation_evaluation_framework.estimators.cascading_legions import CascadingLegions
from wfa_cardinality_estimation_evaluation_framework.estimators.cascading_legions import Estimator
from wfa_cardinality_estimation_evaluation_framework.estimators.cascading_legions import Noiser
from wfa_cardinality_estimation_evaluation_framework.estimators.estimator_noisers import GeometricEstimateNoiser
from wfa_cardinality_estimation_evaluation_framework.estimators.exact_set import AddRandomElementsNoiser
from wfa_cardinality_estimation_evaluation_framework.estimators.exact_set import ExactMultiSet
from wfa_cardinality_estimation_evaluation_framework.estimators.exact_set import LosslessEstimator
from wfa_cardinality_estimation_evaluation_framework.estimators.hyper_log_log import HllCardinality
from wfa_cardinality_estimation_evaluation_framework.estimators.hyper_log_log import HyperLogLogPlusPlus
from wfa_cardinality_estimation_evaluation_framework.estimators.vector_of_counts import LaplaceNoiser
from wfa_cardinality_estimation_evaluation_framework.estimators.vector_of_counts import SequentialEstimator
from wfa_cardinality_estimation_evaluation_framework.estimators.vector_of_counts import VectorOfCounts
from wfa_cardinality_estimation_evaluation_framework.evaluations import run_evaluation
from wfa_cardinality_estimation_evaluation_framework.evaluations.configs import SketchEstimatorConfig
from wfa_cardinality_estimation_evaluation_framework.evaluations.data import evaluation_configs
from wfa_cardinality_estimation_evaluation_framework.simulations import set_generator
from wfa_cardinality_estimation_evaluation_framework.simulations.simulator import Simulator
class InteroperabilityTest(absltest.TestCase):
def setUp(self):
super(InteroperabilityTest, self).setUp()
self.number_of_trials = 2
self.universe_size = 2000
self.set_size_list = [5, 7, 9]
self.large_set_size = 6
self.small_set_size = 3
self.sketch_size = 128
self.number_of_sets = 3
self.set_size = 50
self.num_large_sets = 1
self.num_small_sets = 3
self.order = set_generator.ORDER_RANDOM
self.user_activity_association = (
set_generator.USER_ACTIVITY_ASSOCIATION_INDEPENDENT)
self.shared_prop = 0.2
self.num_bloom_filter_hashes = 2
self.exponential_bloom_filter_decay_rate = 10
self.geometic_bloom_filter_probability = 0.08
self.noiser_epsilon = np.log(3)
self.noiser_flip_probability = .25
self.set_random_state = np.random.RandomState(42)
self.sketch_random_state = np.random.RandomState(137)
self.noise_random_state = np.random.RandomState(3)
# non-noised estimators
estimator_config_cascading_legions = SketchEstimatorConfig(
name='cascading_legions',
sketch_factory=CascadingLegions.get_sketch_factory(
self.sketch_size, self.sketch_size),
estimator=Estimator())
estimator_config_bloom_filter = SketchEstimatorConfig(
name='bloom_filter-union_estimator',
sketch_factory=BloomFilter.get_sketch_factory(
self.sketch_size, self.num_bloom_filter_hashes),
estimator=UnionEstimator())
estimator_config_geometric_bloom_filter = SketchEstimatorConfig(
name='geo_bloom_filter-first_moment_geo',
sketch_factory=GeometricBloomFilter.get_sketch_factory(
self.sketch_size, self.geometic_bloom_filter_probability),
estimator=FirstMomentEstimator(method='geo'))
estimator_config_logarithmic_bloom_filter = SketchEstimatorConfig(
name='log_bloom_filter-first_moment_log',
sketch_factory=LogarithmicBloomFilter.get_sketch_factory(
self.sketch_size),
estimator=FirstMomentEstimator(method='log'))
estimator_config_exponential_bloom_filter = SketchEstimatorConfig(
name='exp_bloom_filter-first_moment_exp',
sketch_factory=ExponentialBloomFilter.get_sketch_factory(
self.sketch_size, self.exponential_bloom_filter_decay_rate),
estimator=FirstMomentEstimator(method='exp'))
estimator_config_voc = SketchEstimatorConfig(
name='vector_of_counts-sequential',
sketch_factory=VectorOfCounts.get_sketch_factory(self.sketch_size),
estimator=SequentialEstimator())
estimator_config_exact = SketchEstimatorConfig(
name='exact_set-lossless',
sketch_factory=ExactMultiSet.get_sketch_factory(),
estimator=LosslessEstimator())
estimator_config_hll = SketchEstimatorConfig(
name='hyper_log_log',
sketch_factory=HyperLogLogPlusPlus.get_sketch_factory(self.sketch_size),
estimator=HllCardinality())
estimator_config_expadbf_first_moment_global_dp = SketchEstimatorConfig(
name='estimator_config_expadbf_first_moment_global_d',
sketch_factory=ExponentialBloomFilter.get_sketch_factory(
length=10**5, decay_rate=10),
estimator=FirstMomentEstimator(
method=FirstMomentEstimator.METHOD_EXP,
noiser=GeometricEstimateNoiser(epsilon=math.log(3))))
config_list = [
estimator_config_exact,
estimator_config_cascading_legions,
estimator_config_bloom_filter,
estimator_config_logarithmic_bloom_filter,
estimator_config_exponential_bloom_filter,
estimator_config_geometric_bloom_filter,
estimator_config_voc,
estimator_config_hll,
estimator_config_expadbf_first_moment_global_dp,
]
self.name_to_non_noised_estimator_config = {
config.name: config for config in config_list
}
# noised estimators
noised_estimator_config_cascading_legions = SketchEstimatorConfig(
name='cascading_legions',
sketch_factory=CascadingLegions.get_sketch_factory(
self.sketch_size, self.sketch_size),
estimator=Estimator(),
sketch_noiser=Noiser(self.noiser_flip_probability))
noised_estimator_config_bloom_filter = SketchEstimatorConfig(
name='bloom_filter-union_estimator',
sketch_factory=BloomFilter.get_sketch_factory(
self.sketch_size, self.num_bloom_filter_hashes),
estimator=UnionEstimator(),
sketch_noiser=BlipNoiser(self.noiser_epsilon, self.noise_random_state))
noised_estimator_config_geometric_bloom_filter = SketchEstimatorConfig(
name='geo_bloom_filter-first_moment_geo',
sketch_factory=GeometricBloomFilter.get_sketch_factory(
self.sketch_size, self.geometic_bloom_filter_probability),
estimator=FirstMomentEstimator(
method='geo',
denoiser=SurrealDenoiser(epsilon=math.log(3))),
sketch_noiser=BlipNoiser(self.noiser_epsilon, self.noise_random_state))
noised_estimator_config_logarithmic_bloom_filter = SketchEstimatorConfig(
name='log_bloom_filter-first_moment_log',
sketch_factory=LogarithmicBloomFilter.get_sketch_factory(
self.sketch_size),
estimator=FirstMomentEstimator(
method='log',
denoiser=SurrealDenoiser(epsilon=math.log(3))),
sketch_noiser=BlipNoiser(self.noiser_epsilon, self.noise_random_state))
noised_estimator_config_exponential_bloom_filter = SketchEstimatorConfig(
name='exp_bloom_filter-first_moment_exp',
sketch_factory=ExponentialBloomFilter.get_sketch_factory(
self.sketch_size, self.exponential_bloom_filter_decay_rate),
estimator=FirstMomentEstimator(
method='exp',
denoiser=SurrealDenoiser(epsilon=math.log(3))),
sketch_noiser=BlipNoiser(self.noiser_epsilon, self.noise_random_state))
noised_estimator_config_voc = SketchEstimatorConfig(
name='vector_of_counts-sequential',
sketch_factory=VectorOfCounts.get_sketch_factory(self.sketch_size),
estimator=SequentialEstimator(),
sketch_noiser=LaplaceNoiser())
noised_estimator_config_exact = SketchEstimatorConfig(
name='exact_set-lossless',
sketch_factory=ExactMultiSet.get_sketch_factory(),
estimator=LosslessEstimator(),
sketch_noiser=AddRandomElementsNoiser(1, self.noise_random_state))
noised_config_list = [
noised_estimator_config_exact,
noised_estimator_config_cascading_legions,
noised_estimator_config_bloom_filter,
noised_estimator_config_logarithmic_bloom_filter,
noised_estimator_config_exponential_bloom_filter,
noised_estimator_config_geometric_bloom_filter,
noised_estimator_config_voc,
]
self.name_to_noised_estimator_config = {
config.name: config for config in noised_config_list
}
def simulate_with_set_generator(self, set_generator_factory, config_dict):
for _, estimator_method_config in config_dict.items():
simulator = Simulator(
num_runs=self.number_of_trials,
set_generator_factory=set_generator_factory,
sketch_estimator_config=estimator_method_config,
set_random_state=self.set_random_state,
sketch_random_state=self.sketch_random_state)
_, _ = simulator.run_all_and_aggregate()
def test_with_independent_set_generator_non_noised(self):
set_generator_factory = (
set_generator.IndependentSetGenerator.
get_generator_factory_with_num_and_size(
universe_size=self.universe_size,
num_sets=self.number_of_sets,
set_size=self.set_size))
self.simulate_with_set_generator(set_generator_factory,
self.name_to_non_noised_estimator_config)
def test_with_independent_set_generator_non_noised_different_sizes(self):
set_generator_factory = (
set_generator.IndependentSetGenerator.
get_generator_factory_with_set_size_list(
universe_size=self.universe_size,
set_size_list=self.set_size_list))
self.simulate_with_set_generator(set_generator_factory,
self.name_to_non_noised_estimator_config)
def test_with_independent_set_generator_noised(self):
set_generator_factory = (
set_generator.IndependentSetGenerator.
get_generator_factory_with_num_and_size(
universe_size=self.universe_size,
num_sets=self.number_of_sets,
set_size=self.set_size))
self.simulate_with_set_generator(set_generator_factory,
self.name_to_noised_estimator_config)
def test_with_independent_set_generator_noised_different_sizes(self):
set_generator_factory = (
set_generator.IndependentSetGenerator.
get_generator_factory_with_set_size_list(
universe_size=self.universe_size,
set_size_list=self.set_size_list))
self.simulate_with_set_generator(set_generator_factory,
self.name_to_noised_estimator_config)
def test_with_exponential_bow_set_generator_non_noised(self):
# Choose special sizes here because Exponential Bow requires minimum size
# See set_generator.ExponentialBowSetGenerator for details
set_generator_factory = (
set_generator.ExponentialBowSetGenerator.
get_generator_factory_with_num_and_size(
user_activity_association=self.user_activity_association,
universe_size=200, num_sets=2, set_size=50))
self.simulate_with_set_generator(set_generator_factory,
self.name_to_non_noised_estimator_config)
def test_with_exponential_bow_set_generator_non_noised_different_sizes(self):
# Choose special sizes here because Exponential Bow requires minimum size
# See set_generator.ExponentialBowSetGenerator for details
set_generator_factory = (
set_generator.ExponentialBowSetGenerator.
get_generator_factory_with_set_size_list(
user_activity_association=self.user_activity_association,
universe_size=200, set_size_list=[50, 60, 70]))
self.simulate_with_set_generator(set_generator_factory,
self.name_to_non_noised_estimator_config)
def test_with_exponential_bow_set_generator_noised(self):
set_generator_factory = (
set_generator.ExponentialBowSetGenerator.
get_generator_factory_with_num_and_size(
user_activity_association=self.user_activity_association,
universe_size=200, num_sets=2, set_size=50))
self.simulate_with_set_generator(set_generator_factory,
self.name_to_noised_estimator_config)
def test_with_exponential_bow_set_generator_noised_different_sizes(self):
set_generator_factory = (
set_generator.ExponentialBowSetGenerator.
get_generator_factory_with_set_size_list(
user_activity_association=self.user_activity_association,
universe_size=200, set_size_list=[50, 60, 70]))
self.simulate_with_set_generator(set_generator_factory,
self.name_to_noised_estimator_config)
def test_with_fully_overlap_set_generator_non_noised(self):
set_generator_factory = (
set_generator.FullyOverlapSetGenerator.
get_generator_factory_with_num_and_size(
universe_size=self.universe_size,
num_sets=self.number_of_sets,
set_size=self.set_size))
self.simulate_with_set_generator(set_generator_factory,
self.name_to_non_noised_estimator_config)
def test_with_fully_overlap_set_generator_noised(self):
set_generator_factory = (
set_generator.FullyOverlapSetGenerator.
get_generator_factory_with_num_and_size(
universe_size=self.universe_size,
num_sets=self.number_of_sets,
set_size=self.set_size))
self.simulate_with_set_generator(set_generator_factory,
self.name_to_noised_estimator_config)
def test_with_sub_set_generator_non_noised(self):
set_generator_factory = (
set_generator.SubSetGenerator.get_generator_factory_with_num_and_size(
order=self.order,
universe_size=self.universe_size,
num_large_sets=self.num_large_sets,
num_small_sets=self.num_small_sets,
large_set_size=self.large_set_size,
small_set_size=self.small_set_size))
self.simulate_with_set_generator(set_generator_factory,
self.name_to_non_noised_estimator_config)
def test_with_sub_set_generator_noised(self):
set_generator_factory = (
set_generator.SubSetGenerator.get_generator_factory_with_num_and_size(
order=self.order,
universe_size=self.universe_size,
num_large_sets=self.num_large_sets,
num_small_sets=self.num_small_sets,
large_set_size=self.large_set_size,
small_set_size=self.small_set_size))
self.simulate_with_set_generator(set_generator_factory,
self.name_to_noised_estimator_config)
def test_with_sequentially_correlated_all_set_generator_non_noised(self):
set_generator_factory = (
set_generator.SequentiallyCorrelatedSetGenerator.
get_generator_factory_with_num_and_size(
order=self.order,
correlated_sets=set_generator.CORRELATED_SETS_ALL,
shared_prop=self.shared_prop,
num_sets=self.number_of_sets,
set_size=self.set_size))
self.simulate_with_set_generator(set_generator_factory,
self.name_to_non_noised_estimator_config)
# Using abbreviation seq_corr here, otherwise the function name is too long.
def test_with_seq_corr_all_set_generator_non_noised_different_sizes(self):
set_generator_factory = (
set_generator.SequentiallyCorrelatedSetGenerator.
get_generator_factory_with_set_size_list(
order=self.order,
correlated_sets=set_generator.CORRELATED_SETS_ALL,
shared_prop=self.shared_prop,
set_size_list=self.set_size_list))
self.simulate_with_set_generator(set_generator_factory,
self.name_to_non_noised_estimator_config)
def test_with_sequentially_correlated_one_set_generator_non_noised(self):
set_generator_factory = (
set_generator.SequentiallyCorrelatedSetGenerator.
get_generator_factory_with_num_and_size(
order=self.order,
correlated_sets=set_generator.CORRELATED_SETS_ONE,
shared_prop=self.shared_prop,
num_sets=self.number_of_sets,
set_size=self.set_size))
self.simulate_with_set_generator(set_generator_factory,
self.name_to_noised_estimator_config)
def test_with_seq_corr_one_set_generator_non_noised_different_sizes(self):
set_generator_factory = (
set_generator.SequentiallyCorrelatedSetGenerator.
get_generator_factory_with_set_size_list(
order=self.order,
correlated_sets=set_generator.CORRELATED_SETS_ONE,
shared_prop=self.shared_prop,
set_size_list=self.set_size_list))
self.simulate_with_set_generator(set_generator_factory,
self.name_to_noised_estimator_config)
def test_run_evaluation_for_cardinality_estimator_workflow_compatible(self):
"""Test the compatibility of evaluator, analyzer and report_generator.
This is a test to check if the evaluator, the analyzer and the
report_generator is compatible with the rest of the evaluation codebase,
eg, the estimators, the set generators, and the simulator. The test runs
the evaluation, analyzes results, and generates a report, which should not
run into any error.
"""
sketch_estimator_configs = [conf.name for conf in (
evaluation_configs._generate_cardinality_estimator_configs())]
run_evaluation._run(
run_evaluation=True,
run_analysis=True,
generate_html_report=True,
evaluation_out_dir=self.create_tempdir('evaluator').full_path,
analysis_out_dir=self.create_tempdir('analyzer').full_path,
report_out_dir=self.create_tempdir('report').full_path,
evaluation_config='smoke_test',
sketch_estimator_configs=sketch_estimator_configs,
evaluation_run_name='interoperability_test_for_evaluator_cardinality',
num_runs=1,
universe_size=1000,
num_workers=0,
error_margin=[0.05],
proportion_of_runs=[0.95],
boxplot_xlabel_rotate=90,
boxplot_size_width_inch=6,
boxplot_size_height_inch=4,
analysis_type='cardinality',
max_frequency=10
)
def test_run_evaluation_for_frequency_estimator_workflow_compatible(self):
"""Test the compatibility of evaluator, analyzer and report_generator.
This is a test to check if the evaluator, the analyzer and the
report_generator is compatible with the rest of the evaluation codebase,
eg, the estimators, the set generators, and the simulator. The test runs
the evaluation, analyzes results, and generates a report, which should not
run into any error.
"""
max_frequency = 3
sketch_estimator_configs = [conf.name for conf in (
evaluation_configs._generate_frequency_estimator_configs(max_frequency)
)]
run_evaluation._run(
run_evaluation=True,
run_analysis=True,
generate_html_report=True,
evaluation_out_dir=self.create_tempdir('evaluator').full_path,
analysis_out_dir=self.create_tempdir('analyzer').full_path,
report_out_dir=self.create_tempdir('report').full_path,
evaluation_config='frequency_end_to_end_test',
sketch_estimator_configs=sketch_estimator_configs,
evaluation_run_name='interoperability_test_for_evaluator_frequency',
num_runs=1,
universe_size=1000,
num_workers=0,
error_margin=[0.05],
proportion_of_runs=[0.95],
boxplot_xlabel_rotate=90,
boxplot_size_width_inch=6,
boxplot_size_height_inch=4,
barplot_size_width_inch=6,
barplot_size_height_inch=4,
analysis_type='frequency',
max_frequency=max_frequency,
)
if __name__ == '__main__':
absltest.main()
|
danneamtu/joblify
|
client/src/components/Jobs/JobDescription/PieChart/TheChart.js
|
<gh_stars>0
import React from 'react'
import { Doughnut } from 'react-chartjs-2'
import { accentGreen, lightDarker } from '../../../../styled-components/typography/colors'
const TheChart = ({ scoreFormula }) => {
const data = {
datasets: [
{
data: [scoreFormula, scoreFormula - 100],
backgroundColor: [accentGreen, '#292A33'],
borderWidth: 0,
},
],
labels: ['Your Skills', 'Diference'],
}
return (
<>
<Doughnut data={data} />
</>
)
}
export const TheChartMemoized = React.memo(TheChart)
|
vdmeer/skb-java-base
|
src/main/java/de/vandermeer/skb/base/shell/AbstractShellCommand.java
|
<reponame>vdmeer/skb-java-base<filename>src/main/java/de/vandermeer/skb/base/shell/AbstractShellCommand.java
/* Copyright 2014 <NAME> <<EMAIL>>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.vandermeer.skb.base.shell;
import java.util.Map;
import java.util.TreeMap;
import de.vandermeer.skb.interfaces.FormattingTupleWrapper;
/**
* An abstract, default implementation of a shell command, use the {@link SkbShellFactory} to create a new object.
*
* @author <NAME> <<EMAIL>>
* @version v0.2.0 build 170404 (04-Apr-17) for Java 1.8
* @since v0.0.10
*/
public class AbstractShellCommand implements SkbShellCommand {
/** The command, cannot be null. */
private final String command;
/** The command category, can be null. */
private final SkbShellCommandCategory category;
/** The command's arguments, can be null. */
private final SkbShellArgument[] arguments;
/** The command's description. */
private final String description;
/** Additional help if any set. */
private final String addedHelp;
/**
* Returns a new shell command, use the {@link SkbShellFactory} to create a new object.
* @param command the actual command
* @param arguments the command's arguments, can be null
* @param category the command's category, can be null
* @param description the command's description
* @param addedHelp additional help, can be null
* @throws IllegalArgumentException if command or description was null
*/
AbstractShellCommand(String command, SkbShellArgument[] arguments, SkbShellCommandCategory category, String description, String addedHelp){
if(command==null){
throw new IllegalArgumentException("command cannot be null");
}
if(description==null){
throw new IllegalArgumentException("description cannot be null");
}
this.command = command;
this.arguments = arguments;
this.category = category;
this.description = description;
this.addedHelp = addedHelp;
}
@Override
public String getCommand() {
return this.command;
}
@Override
public SkbShellArgument[] getArguments() {
return this.arguments;
}
@Override
public String getDescription(){
return this.description;
}
@Override
public SkbShellCommandCategory getCategory() {
return this.category;
}
@Override
public String addedHelp(){
return this.addedHelp;
}
@Override
public String toString(){
Map<String, SkbShellArgument> args = new TreeMap<>();
for(SkbShellArgument arg : this.getArguments()){
args.put(arg.getKey(), arg);
}
FormattingTupleWrapper ftw = FormattingTupleWrapper.create(
"<{}> cat <{}> args {} descr <{}>",
new Object[]{
this.getCommand(), this.getCategory(), args.values(), this.getDescription()
}
);
return ftw.getMessage();
}
}
|
wfxxy/ARMv8-M
|
secure_fw/services/secure_storage/sst_core_interface.c
|
/*
* Copyright (c) 2017, Arm Limited. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*
*/
#include <string.h>
#include "sst_core_interface.h"
#include "assets/sst_asset_defs.h"
#include "sst_core.h"
#include "sst_utils.h"
#define ASSET_ID_ERROR 0xFFFF
#define SST_SYSTEM_READY 1
#define SST_SYSTEM_NOT_READY 0
/* Set to 1 once sst_object_prepare has been called */
static uint8_t sst_system_ready = SST_SYSTEM_NOT_READY;
enum tfm_sst_err_t sst_object_prepare(void)
{
enum tfm_sst_err_t err;
sst_global_lock();
err = sst_core_prepare();
if (err == TFM_SST_ERR_SUCCESS) {
sst_system_ready = SST_SYSTEM_READY;
}
sst_global_unlock();
return err;
}
enum tfm_sst_err_t sst_object_handle(uint16_t asset_uuid, uint32_t *handle)
{
enum tfm_sst_err_t err = TFM_SST_ERR_SYSTEM_ERROR;
if (sst_system_ready == SST_SYSTEM_READY) {
sst_global_lock();
err = sst_core_object_handle(asset_uuid, handle);
sst_global_unlock();
}
return err;
}
enum tfm_sst_err_t sst_object_read(uint32_t asset_handle, uint8_t *data,
uint32_t offset, uint32_t size)
{
enum tfm_sst_err_t err = TFM_SST_ERR_SYSTEM_ERROR;
if (sst_system_ready == SST_SYSTEM_READY) {
sst_global_lock();
err = sst_core_object_read(asset_handle, data, offset, size);
sst_global_unlock();
}
return err;
}
enum tfm_sst_err_t sst_object_create(uint16_t uuid, uint32_t size)
{
/* Check if it already exists */
uint32_t hdl = 0;
enum tfm_sst_err_t err = TFM_SST_ERR_SYSTEM_ERROR;
if (sst_system_ready == SST_SYSTEM_READY) {
sst_global_lock();
err = sst_core_object_handle(uuid, &hdl);
if (err == TFM_SST_ERR_ASSET_NOT_FOUND) {
/* Find free space */
err = sst_core_object_create(uuid, size);
}
sst_global_unlock();
}
return err;
}
enum tfm_sst_err_t sst_object_write(uint32_t asset_handle, const uint8_t *data,
uint32_t offset, uint32_t size)
{
enum tfm_sst_err_t err = TFM_SST_ERR_SYSTEM_ERROR;
if (sst_system_ready == SST_SYSTEM_READY) {
sst_global_lock();
err = sst_core_object_write(asset_handle, data, offset, size);
sst_global_unlock();
}
return err;
}
enum tfm_sst_err_t sst_object_get_attributes(uint32_t asset_handle,
struct tfm_sst_attribs_t *attributes)
{
enum tfm_sst_err_t err = TFM_SST_ERR_SYSTEM_ERROR;
struct sst_assetmeta tmp_metadata;
uint32_t object_index;
uint16_t uuid;
if (sst_system_ready == SST_SYSTEM_READY) {
sst_global_lock();
/* Get the meta data index */
object_index = sst_utils_extract_index_from_handle(asset_handle);
/* Read object metadata */
err = sst_meta_read_object_meta(object_index, &tmp_metadata);
if (err == 0) {
/* Check if index is still referring to same asset */
uuid = sst_utils_extract_uuid_from_handle(asset_handle);
if (uuid != tmp_metadata.unique_id) {
/* Likely the asset has been deleted in another context
* this handle isn't valid anymore.
*/
err = TFM_SST_ERR_INVALID_HANDLE;
} else {
attributes->size_max = tmp_metadata.max_size;
attributes->size_current = tmp_metadata.cur_size;
}
}
sst_global_unlock();
}
return err;
}
enum tfm_sst_err_t sst_object_delete(uint32_t asset_handle)
{
enum tfm_sst_err_t err = TFM_SST_ERR_SYSTEM_ERROR;
if (sst_system_ready == SST_SYSTEM_READY) {
sst_global_lock();
err = sst_core_object_delete(asset_handle);
sst_global_unlock();
}
return err;
}
enum tfm_sst_err_t sst_object_wipe_all(void)
{
enum tfm_sst_err_t err;
/* This function may get called as a corrective action
* if a system level security violation is detected.
* This could be asynchronous to normal system operation
* and state of the sst system lock is unknown. Hence
* this function doesn't block on the lock and directly
* moves to erasing the flash instead.
*/
err = sst_core_wipe_all();
return err;
}
|
ChuanleiGuo/AlgorithmsPlayground
|
Recursion/NQueens.java
|
<gh_stars>1-10
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Random;
public class NQueens {
public static List<List<Integer>> nQueens(int n) {
List<List<Integer>> result = new ArrayList<>();
solveNQueens(n, 0, new ArrayList<Integer>(), result);
return result;
}
private static void solveNQueens(int n, int row, List<Integer> colPlacement, List<List<Integer>> result) {
if (row == n) {
result.add(new ArrayList<>(colPlacement));
} else {
for (int col = 0; col < n; col++) {
colPlacement.add(col);
if (isValid(colPlacement)) {
solveNQueens(n, row + 1, colPlacement, result);
}
colPlacement.remove(colPlacement.size() - 1);
}
}
}
private static boolean isValid(List<Integer> colPlacement) {
int rowID = colPlacement.size() - 1;
for (int i = 0; i < rowID; i++) {
int diff = Math.abs(colPlacement.get(i) - colPlacement.get(rowID));
if (diff == 0 || diff == rowID - i) {
return false;
}
}
return true;
}
private static List<String> toTextRepresentation(List<Integer> colPlacement) {
List<String> sol = new ArrayList<>();
for (int aColPlacement : colPlacement) {
char[] line = new char[colPlacement.size()];
Arrays.fill(line, '.');
line[aColPlacement] = 'Q';
sol.add(new String(line));
}
return sol;
}
private static void simpleTest() {
List<List<Integer>> result = nQueens(2);
assert (0 == result.size());
result = nQueens(3);
assert (0 == result.size());
result = nQueens(4);
assert (2 == result.size());
assert (result.get(0).equals(Arrays.asList(1, 3, 0, 2)) || result.get(0).equals(Arrays.asList(2, 0, 3, 1)));
assert (result.get(1).equals(Arrays.asList(1, 3, 0, 2)) || result.get(1).equals(Arrays.asList(2, 0, 3, 1)));
assert (!result.get(0).equals(result.get(1)));
}
public static void main(String[] args) {
simpleTest();
Random r = new Random();
int n;
if (args.length == 1) {
n = Integer.parseInt(args[0]);
} else {
n = r.nextInt(10) + 1;
}
System.out.println("n = " + n);
List<List<Integer>> result = nQueens(n);
for (List<Integer> vec : result) {
List<String> textRep = toTextRepresentation(vec);
for (String s : textRep) {
System.out.println(s);
}
System.out.println();
}
}
}
|
alloytech/pubsubbeat
|
vendor/github.com/elastic/beats/libbeat/common/flags.go
|
<gh_stars>10-100
package common
import (
"flag"
"strings"
ucfg "github.com/elastic/go-ucfg"
cfgflag "github.com/elastic/go-ucfg/flag"
)
// StringsFlag collects multiple usages of the same flag into an array of strings.
// Duplicate values will be ignored.
type StringsFlag struct {
list *[]string
isDefault bool
flag *flag.Flag
}
// SettingsFlag captures key/values pairs into an Config object.
// The flag backed by SettingsFlag can be used multiple times.
// Values are overwritten by the last usage of a key.
type SettingsFlag cfgflag.FlagValue
// flagOverwrite provides a flag value, which always overwrites the same setting
// in an Config object.
type flagOverwrite struct {
config *ucfg.Config
path string
value string
}
// StringArrFlag creates and registers a new StringsFlag with the given FlagSet.
// If no FlagSet is passed, flag.CommandLine will be used as target FlagSet.
func StringArrFlag(fs *flag.FlagSet, name, def, usage string) *StringsFlag {
var arr *[]string
if def != "" {
arr = &[]string{def}
} else {
arr = &[]string{}
}
return StringArrVarFlag(fs, arr, name, usage)
}
// StringArrVarFlag creates and registers a new StringsFlag with the given
// FlagSet. Results of the flag usage will be appended to `arr`. If the slice
// is not initially empty, its first value will be used as default. If the flag
// is used, the slice will be emptied first. If no FlagSet is passed,
// flag.CommandLine will be used as target FlagSet.
func StringArrVarFlag(fs *flag.FlagSet, arr *[]string, name, usage string) *StringsFlag {
if fs == nil {
fs = flag.CommandLine
}
f := NewStringsFlag(arr)
f.Register(fs, name, usage)
return f
}
// NewStringsFlag creates a new, but unregistered StringsFlag instance.
// Results of the flag usage will be appended to `arr`. If the slice is not
// initially empty, its first value will be used as default. If the flag is
// used, the slice will be emptied first.
func NewStringsFlag(arr *[]string) *StringsFlag {
if arr == nil {
panic("No target array")
}
return &StringsFlag{list: arr, isDefault: true}
}
// Register registers the StringsFlag instance with a FlagSet.
// A valid FlagSet must be used.
// Register panics if the flag is already registered.
func (f *StringsFlag) Register(fs *flag.FlagSet, name, usage string) {
if f.flag != nil {
panic("StringsFlag is already registered")
}
fs.Var(f, name, usage)
f.flag = fs.Lookup(name)
if f.flag == nil {
panic("Failed to lookup registered flag")
}
if len(*f.list) > 0 {
f.flag.DefValue = (*f.list)[0]
}
}
// String joins all it's values set into a comma-separated string.
func (f *StringsFlag) String() string {
if f == nil || f.list == nil {
return ""
}
l := *f.list
return strings.Join(l, ", ")
}
// SetDefault sets the flags new default value.
// This overwrites the contents in the backing array.
func (f *StringsFlag) SetDefault(v string) {
if f.flag != nil {
f.flag.DefValue = v
}
*f.list = []string{v}
f.isDefault = true
}
// Set is used to pass usage of the flag to StringsFlag. Set adds the new value
// to the backing array. The array will be emptied on Set, if the backing array
// still contains the default value.
func (f *StringsFlag) Set(v string) error {
// Ignore duplicates, can be caused by multiple flag parses
if f.isDefault {
*f.list = []string{v}
} else {
for _, old := range *f.list {
if old == v {
return nil
}
}
*f.list = append(*f.list, v)
}
f.isDefault = false
return nil
}
// Get returns the backing slice its contents as interface{}. The type used is
// `[]string`.
func (f *StringsFlag) Get() interface{} {
return f.List()
}
// List returns the current set values.
func (f *StringsFlag) List() []string {
return *f.list
}
// Type reports the type of contents (string) expected to be parsed by Set.
// It is used to build the CLI usage string.
func (f *StringsFlag) Type() string {
return "string"
}
// SettingFlag defines a setting flag, name and it's usage. The return value is
// the Config object settings are applied to.
func SettingFlag(fs *flag.FlagSet, name, usage string) *Config {
cfg := NewConfig()
SettingVarFlag(fs, cfg, name, usage)
return cfg
}
// SettingVarFlag defines a setting flag, name and it's usage.
// Settings are applied to the Config object passed.
func SettingVarFlag(fs *flag.FlagSet, def *Config, name, usage string) {
if fs == nil {
fs = flag.CommandLine
}
f := NewSettingsFlag(def)
fs.Var(f, name, usage)
}
// NewSettingsFlag creates a new SettingsFlag instance, not registered with any
// FlagSet.
func NewSettingsFlag(def *Config) *SettingsFlag {
opts := append(
[]ucfg.Option{
ucfg.MetaData(ucfg.Meta{Source: "command line flag"}),
},
configOpts...,
)
tmp := cfgflag.NewFlagKeyValue(def.access(), true, opts...)
return (*SettingsFlag)(tmp)
}
func (f *SettingsFlag) access() *cfgflag.FlagValue {
return (*cfgflag.FlagValue)(f)
}
// Config returns the config object the SettingsFlag stores applied settings to.
func (f *SettingsFlag) Config() *Config {
return fromConfig(f.access().Config())
}
// Set sets a settings value in the Config object. The input string must be a
// key-value pair like `key=value`. If the value is missing, the value is set
// to the boolean value `true`.
func (f *SettingsFlag) Set(s string) error {
return f.access().Set(s)
}
// Get returns the Config object used to store values.
func (f *SettingsFlag) Get() interface{} {
return f.Config()
}
// String always returns an empty string. It is required to fulfil
// the flag.Value interface.
func (f *SettingsFlag) String() string {
return ""
}
// Type reports the type of contents (setting=value) expected to be parsed by Set.
// It is used to build the CLI usage string.
func (f *SettingsFlag) Type() string {
return "setting=value"
}
// ConfigOverwriteFlag defines a new flag updating a setting in an Config
// object. The name is used as the flag its name the path parameter is the
// full setting name to be used when the flag is set.
func ConfigOverwriteFlag(
fs *flag.FlagSet,
config *Config,
name, path, def, usage string,
) *string {
if config == nil {
panic("Missing configuration")
}
if path == "" {
panic("empty path")
}
if fs == nil {
fs = flag.CommandLine
}
if def != "" {
err := config.SetString(path, -1, def)
if err != nil {
panic(err)
}
}
f := newOverwriteFlag(config, path, def)
fs.Var(f, name, usage)
return &f.value
}
func newOverwriteFlag(config *Config, path, def string) *flagOverwrite {
return &flagOverwrite{config: config.access(), path: path, value: def}
}
func (f *flagOverwrite) String() string {
return f.value
}
func (f *flagOverwrite) Set(v string) error {
opts := append(
[]ucfg.Option{
ucfg.MetaData(ucfg.Meta{Source: "command line flag"}),
},
configOpts...,
)
err := f.config.SetString(f.path, -1, v, opts...)
if err != nil {
return err
}
f.value = v
return nil
}
func (f *flagOverwrite) Get() interface{} {
return f.value
}
func (f *flagOverwrite) Type() string {
return "string"
}
|
stevedonovan/llib
|
llib-p/select.h
|
#ifndef LLIB_SELECT_H
#define LLIB_SELECT_H
typedef bool (*SelectTimerProc)(void *data);
typedef int (*SelectReadProc)(const char *line,...);
typedef struct Select_ Select;
typedef struct SelectTimer_ SelectTimer;
typedef struct SelectFile_ SelectFile;
typedef struct SelectChan_ SelectChan;
enum {
SelectRead = 1,
SelectWrite = 2,
SelectReadWrite = 4,
SelectNonBlock = 8,
SelectReOpen = 16
};
Select *select_new();
int select_thread(SelectTimerProc callback, void *data);
void select_sleep(int msec);
SelectFile *select_add_read(Select *s, int fd);
int select_open(Select *s, const char *str, int flags);
bool select_remove_read(Select *s, int fd);
bool select_can_read(Select *s, int fd);
bool select_can_write(Select *s, int fd);
bool select_can_read_chan(Select *s, SelectChan *chan);
SelectFile *select_add_write(Select *s, int fd);
SelectFile *select_add_write_chan(Select *s, SelectChan *chan);
bool select_can_write_chan(Select *s, SelectChan *chan);
int *select_read_fds(Select *s);
void select_add_reader(Select *s, int fd, bool close, SelectReadProc reader, void *data);
void select_timer_kill(SelectTimer *st);
void select_set_timeout(Select *s, int msecs);
SelectTimer *select_add_timer(Select *s, int secs, SelectTimerProc callback, void *data);
bool select_do_later(Select *s, int msec, SelectTimerProc callback, void *data);
bool select_do_later_again(Select *s, int msecs, SelectTimerProc callback, void *data, bool once_off);
int select_select(Select *s);
SelectChan *chan_new();
SelectFile *select_add_read_chan(Select *s, SelectChan *chan);
int chan_write(SelectChan *chan, void *buff, int sz);
int chan_read(SelectChan *chan, void *buff, int sz);
int chan_close_read(SelectChan *chan);
int chan_close_write(SelectChan *chan) ;
#endif
|
logginghub/core
|
logginghub-integrationtests/src/test/java/com/logginghub/logging/modules/TestLoggingBridgeModule.java
|
package com.logginghub.logging.modules;
import com.logginghub.integrationtests.logging.HubTestFixture;
import com.logginghub.integrationtests.logging.HubTestFixture.HubFixture;
import com.logginghub.logging.LogEvent;
import com.logginghub.logging.LogEventBuilder;
import com.logginghub.logging.exceptions.ConnectorException;
import com.logginghub.logging.exceptions.LoggingMessageSenderException;
import com.logginghub.logging.hub.configuration.FilterConfiguration;
import com.logginghub.logging.messages.LogEventMessage;
import com.logginghub.logging.messaging.SocketClient;
import com.logginghub.logging.servers.SocketHub;
import com.logginghub.utils.Bucket;
import com.logginghub.utils.ThreadUtils;
import com.logginghub.utils.logging.Logger;
import org.junit.Ignore;
import org.junit.Test;
import java.io.IOException;
import java.util.EnumSet;
import java.util.concurrent.Callable;
import static org.hamcrest.MatcherAssert.*;
import static org.hamcrest.Matchers.*;
// TODO : fix the race conditions in the socket connector that mean we leak threads
//@RunWith(CustomRunner.class)
public class TestLoggingBridgeModule extends BaseHub {
@Test public void test_default_configuration_import() throws IOException, ConnectorException, LoggingMessageSenderException {
final HubFixture hubAFixture = fixture.createSocketHub(EnumSet.noneOf(HubTestFixture.Features.class));
HubFixture hubBFixture = fixture.createSocketHub(EnumSet.of(HubTestFixture.Features.Bridge));
hubBFixture.getLoggingBridgeConfiguration().setPort(hubAFixture.getSocketHubConfiguration().getPort());
final SocketHub hubA = hubAFixture.start();
final SocketHub hubB = hubBFixture.start();
ThreadUtils.repeatUntilTrue(new Callable<Boolean>() {
@Override public Boolean call() throws Exception {
return hubA.getConnectionsList().size() == 1;
}
});
SocketClient clientA = fixture.createClient("clientA", hubA);
SocketClient clientB = fixture.createClientAutoSubscribe("clientB", hubB);
clientA.send(new LogEventMessage(LogEventBuilder.create(0, Logger.info, "Test message")));
Bucket<LogEvent> events = fixture.createEventBucketFor(clientB);
events.waitForMessages(1);
assertThat(events.get(0).getMessage(), is("Test message"));
}
@Test public void test_import() throws IOException, ConnectorException, LoggingMessageSenderException {
final HubFixture hubAFixture = fixture.createSocketHub(EnumSet.noneOf(HubTestFixture.Features.class));
HubFixture hubBFixture = fixture.createSocketHub(EnumSet.of(HubTestFixture.Features.Bridge));
hubBFixture.getLoggingBridgeConfiguration().setImportEvents(true);
hubBFixture.getLoggingBridgeConfiguration().setExportEvents(false);
hubBFixture.getLoggingBridgeConfiguration().setPort(hubAFixture.getSocketHubConfiguration().getPort());
final SocketHub hubA = hubAFixture.start();
final SocketHub hubB = hubBFixture.start();
ThreadUtils.repeatUntilTrue(new Callable<Boolean>() {
@Override public Boolean call() throws Exception {
return hubA.getConnectionsList().size() == 1;
}
});
SocketClient clientA = fixture.createClientAutoSubscribe("clientA", hubA);
SocketClient clientB = fixture.createClientAutoSubscribe("clientB", hubB);
clientA.send(new LogEventMessage(LogEventBuilder.create(0, Logger.info, "Test message from client A to hub A")));
clientB.send(new LogEventMessage(LogEventBuilder.create(0, Logger.info, "Test message from client B to hub B")));
Bucket<LogEvent> eventsA = fixture.createEventBucketFor(clientA);
Bucket<LogEvent> eventsB = fixture.createEventBucketFor(clientB);
eventsB.waitForMessages(1);
assertThat(eventsB.get(0).getMessage(), is("Test message from client A to hub A"));
assertThat(eventsA.size(), is(0));
}
@Test public void test_export() throws IOException, ConnectorException, LoggingMessageSenderException {
final HubFixture hubAFixture = fixture.createSocketHub(EnumSet.noneOf(HubTestFixture.Features.class));
HubFixture hubBFixture = fixture.createSocketHub(EnumSet.of(HubTestFixture.Features.Bridge));
hubBFixture.getLoggingBridgeConfiguration().setImportEvents(false);
hubBFixture.getLoggingBridgeConfiguration().setExportEvents(true);
hubBFixture.getLoggingBridgeConfiguration().setPort(hubAFixture.getSocketHubConfiguration().getPort());
final SocketHub hubA = hubAFixture.start();
final SocketHub hubB = hubBFixture.start();
ThreadUtils.repeatUntilTrue(new Callable<Boolean>() {
@Override public Boolean call() throws Exception {
return hubA.getConnectionsList().size() == 1;
}
});
SocketClient clientA = fixture.createClientAutoSubscribe("clientA", hubA);
SocketClient clientB = fixture.createClientAutoSubscribe("clientB", hubB);
clientA.send(new LogEventMessage(LogEventBuilder.create(0, Logger.info, "Test message from client A to hub A")));
clientB.send(new LogEventMessage(LogEventBuilder.create(0, Logger.info, "Test message from client B to hub B")));
Bucket<LogEvent> eventsA = fixture.createEventBucketFor(clientA);
Bucket<LogEvent> eventsB = fixture.createEventBucketFor(clientB);
eventsA.waitForMessages(1);
assertThat(eventsA.get(0).getMessage(), is("Test message from client B to hub B"));
assertThat(eventsB.size(), is(0));
}
@Ignore // jshaw - I'm pretty certain this test has race conditions
@Test public void test_import_and_export_one_way() throws IOException, ConnectorException, LoggingMessageSenderException {
final HubFixture hubAFixture = fixture.createSocketHub(EnumSet.noneOf(HubTestFixture.Features.class));
HubFixture hubBFixture = fixture.createSocketHub(EnumSet.of(HubTestFixture.Features.Bridge));
hubBFixture.getLoggingBridgeConfiguration().setImportEvents(true);
hubBFixture.getLoggingBridgeConfiguration().setExportEvents(true);
hubBFixture.getLoggingBridgeConfiguration().setPort(hubAFixture.getSocketHubConfiguration().getPort());
final SocketHub hubA = hubAFixture.start();
hubA.setName("HubA");
final SocketHub hubB = hubBFixture.start();
hubB.setName("HubB");
// Wait for hubB to connect to hubA
ThreadUtils.repeatUntilTrue(new Callable<Boolean>() {
@Override public Boolean call() throws Exception {
return hubA.getConnectionsList().size() == 1;
}
});
assertThat(hubA.getConnectionsList().size(), is(1));
assertThat(hubB.getConnectionsList().size(), is(0));
SocketClient clientA = fixture.createClientAutoSubscribe("clientA", hubA);
SocketClient clientB = fixture.createClientAutoSubscribe("clientB", hubB);
assertThat(hubA.getConnectionsList().size(), is(2));
assertThat(hubB.getConnectionsList().size(), is(1));
Bucket<LogEvent> eventsA = fixture.createEventBucketFor(clientA);
Bucket<LogEvent> eventsB = fixture.createEventBucketFor(clientB);
clientA.send(new LogEventMessage(LogEventBuilder.create(0, Logger.info, "Test message from client A to hub A")));
clientB.send(new LogEventMessage(LogEventBuilder.create(0, Logger.info, "Test message from client B to hub B")));
eventsA.waitForMessages(1);
assertThat(eventsA.get(0).getMessage(), is("Test message from client B to hub B"));
eventsB.waitForMessages(1);
assertThat(eventsB.get(0).getMessage(), is("Test message from client A to hub A"));
assertThat(eventsA.size(), is(1));
assertThat(eventsB.size(), is(1));
}
@Test public void test_import_filter() throws IOException, ConnectorException, LoggingMessageSenderException {
final HubFixture hubAFixture = fixture.createSocketHub(EnumSet.noneOf(HubTestFixture.Features.class));
HubFixture hubBFixture = fixture.createSocketHub(EnumSet.of(HubTestFixture.Features.Bridge));
hubBFixture.getLoggingBridgeConfiguration().setImportEvents(true);
hubBFixture.getLoggingBridgeConfiguration().setExportEvents(false);
hubBFixture.getLoggingBridgeConfiguration().getFilters().add(FilterConfiguration.contains("orange"));
hubBFixture.getLoggingBridgeConfiguration().setPort(hubAFixture.getSocketHubConfiguration().getPort());
final SocketHub hubA = hubAFixture.start();
final SocketHub hubB = hubBFixture.start();
ThreadUtils.repeatUntilTrue(new Callable<Boolean>() {
@Override public Boolean call() throws Exception {
return hubA.getConnectionsList().size() == 1;
}
});
SocketClient clientA = fixture.createClientAutoSubscribe("clientA", hubA);
SocketClient clientB = fixture.createClientAutoSubscribe("clientB", hubB);
clientA.send(new LogEventMessage(LogEventBuilder.create(0, Logger.info, "Test message orange from client B to hub B")));
clientA.send(new LogEventMessage(LogEventBuilder.create(0, Logger.info, "Test message apple from client A to hub A")));
Bucket<LogEvent> eventsA = fixture.createEventBucketFor(clientA);
Bucket<LogEvent> eventsB = fixture.createEventBucketFor(clientB);
eventsB.waitForMessages(1);
assertThat(eventsB.get(0).getMessage(), is("Test message apple from client A to hub A"));
assertThat(eventsA.size(), is(0));
}
}
|
tobyweston/learn-scala-java-devs
|
src/main/scala/s4j/scala/chapter20/Example.scala
|
/*
* Copyright (c) 2015-2017 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package s4j.scala.chapter20
import s4j.scala.chapter20.CustomerDatabase.database
object CustomerDatabase {
val database = new CustomerSet()
val customerA = Customer("Albert", Some(Address("1a Bridge St", None)))
val customerB = Customer("Beatriz", None)
val customerC = Customer("Carol", Some(Address("2a Finsbury Circus", Some("AL1 2PY"))))
val customerD = Customer("Sherlock", Some(Address("21b Baker Street", Some("NW1 6XE"))))
customerA.add(PricedItem(10))
customerA.add(PricedItem(5))
customerC.add(PricedItem(10))
database.add(customerA)
database.add(customerB)
database.add(customerC)
database.add(customerD)
}
object Example extends App {
// null case
val albert = database.findOrNull("Albert")
val basket = if (albert != null) albert.total else 0D
// map over option
val exists = database.find("Albert").map(customer => "customer exists")
val basketValue = database.find("Albert").map(customer => customer.total).getOrElse(0D) // forced to handle the else? get would throw
val customers = Set("Albert", "Beatriz", "Carol", "Dave", "Erin", "Sherlock")
def sumCustomerBaskets_NulLCheckVersion() = {
// all.map(customers.findOrNull).map(_.total).sum // throws NPE
// or
customers.map(database.findOrNull).map(customer => if (customer != null) customer.total else 0D).sum
}
def sumCustomerBaskets_NulLAvoidingPatternMatch() = {
customers.map(database.findOrNull(_) match {
case customer@Customer(_, _) => customer.total // why no if customer != null clause? it matches on null below
case a @ _ => println(a); 0D
}).sum
}
def sumCustomerBaskets_MapCouldThrowAnException() = {
// all.map(customers.find(_)).map(customer => customer.get.total) // could throw an exception
// customers.find("Missing").map(customer => customer.total).get // throws an exception
database.find("Missing") match {
case Some(customer) => customer.total
case None => 0D
}
}
def sumCustomerBaskets_MapThenFlattenVersion() = {
customers.map(database.find(_).map(_.total)).flatten.sum
}
def sumCustomerBaskets_FlatMapVersion() = {
customers.flatMap(name => database.find(name)).map(customer => customer.total).sum
}
val partial: Set[Option[Customer]] = customers.map(database.find)
println("1. " + sumCustomerBaskets_NulLCheckVersion())
println("2. " + sumCustomerBaskets_NulLAvoidingPatternMatch()) // yuk!
println("3. " + sumCustomerBaskets_MapCouldThrowAnException())
println("4. " + sumCustomerBaskets_MapThenFlattenVersion())
println("5. " + sumCustomerBaskets_FlatMapVersion())
}
|
alexey-anufriev/intellij-community
|
platform/platform-impl/src/com/intellij/openapi/wm/impl/status/ProcessPopup.java
|
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.openapi.wm.impl.status;
import com.intellij.ide.IdeBundle;
import com.intellij.ide.ui.LafManagerListener;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.ui.panel.ProgressPanel;
import com.intellij.openapi.ui.popup.*;
import com.intellij.openapi.ui.popup.util.MinimizeButton;
import com.intellij.openapi.util.Key;
import com.intellij.openapi.wm.IdeFrame;
import com.intellij.openapi.wm.ex.StatusBarEx;
import com.intellij.ui.ScreenUtil;
import com.intellij.ui.components.JBPanelWithEmptyText;
import com.intellij.ui.components.JBScrollPane;
import com.intellij.ui.components.panels.VerticalLayout;
import com.intellij.ui.popup.AbstractPopup;
import com.intellij.util.IJSwingUtilities;
import com.intellij.util.ui.JBDimension;
import com.intellij.util.ui.JBUI;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.NotNull;
import javax.swing.*;
import java.awt.*;
import static javax.swing.ScrollPaneConstants.HORIZONTAL_SCROLLBAR_NEVER;
import static javax.swing.ScrollPaneConstants.VERTICAL_SCROLLBAR_AS_NEEDED;
class ProcessPopup {
public static final Key<ProgressPanel> KEY = new Key<>("ProgressPanel");
private final InfoAndProgressPanel myProgressPanel;
private final JPanel myIndicatorPanel;
private final JScrollPane myContentPanel;
private JBPopup myPopup;
ProcessPopup(@NotNull InfoAndProgressPanel progressPanel) {
myProgressPanel = progressPanel;
myIndicatorPanel = new JBPanelWithEmptyText().withEmptyText(IdeBundle.message("progress.window.empty.text")).andTransparent();
myIndicatorPanel.setLayout(new VerticalLayout(0));
myIndicatorPanel.setBorder(JBUI.Borders.empty(10, 0, 18, 0));
myIndicatorPanel.setFocusable(true);
myContentPanel = new JBScrollPane(myIndicatorPanel, VERTICAL_SCROLLBAR_AS_NEEDED, HORIZONTAL_SCROLLBAR_NEVER) {
@Override
public Dimension getPreferredSize() {
if (myIndicatorPanel.getComponentCount() > 0) {
return super.getPreferredSize();
}
return getEmptyPreferredSize();
}
};
updateContentUI();
}
public void addIndicator(@NotNull InlineProgressIndicator indicator) {
JComponent component = indicator.getComponent();
if (myIndicatorPanel.getComponentCount() == 0) {
hideSeparator(component);
}
myIndicatorPanel.add(component);
revalidateAll();
}
public void removeIndicator(@NotNull InlineProgressIndicator indicator) {
JComponent component = indicator.getComponent();
int index = myIndicatorPanel.getComponentZOrder(component);
if (index == -1) {
return;
}
myIndicatorPanel.remove(component);
if (index == 0 && myIndicatorPanel.getComponentCount() > 0) {
hideSeparator(myIndicatorPanel.getComponent(0));
}
revalidateAll();
}
public void show(boolean requestFocus) {
updateContentUI();
createPopup(myContentPanel, myIndicatorPanel, requestFocus);
ApplicationManager.getApplication().getMessageBus().connect(myPopup).subscribe(LafManagerListener.TOPIC, source -> updateContentUI());
JFrame frame = (JFrame)UIUtil.findUltimateParent(myProgressPanel);
Dimension contentSize = myContentPanel.getPreferredSize();
Rectangle bounds = frame.getBounds();
int width = Math.max(bounds.width / 4, contentSize.width);
int height = Math.min(bounds.height / 4, contentSize.height);
myContentPanel.setPreferredSize(new Dimension(width, height));
int x = bounds.x + bounds.width - width - JBUI.scale(20);
int y = bounds.y + bounds.height - height - JBUI.scale(40);
StatusBarEx sb = (StatusBarEx)((IdeFrame)frame).getStatusBar();
if (sb != null && sb.isVisible()) {
y -= sb.getSize().height;
}
myPopup.showInScreenCoordinates(myProgressPanel.getRootPane(), new Point(x, y));
}
public boolean isShowing() {
return myPopup != null;
}
public void hide() {
if (myPopup != null) {
JBPopup popup = myPopup;
myPopup = null;
popup.cancel();
}
}
private void revalidateAll() {
myContentPanel.doLayout();
myContentPanel.revalidate();
myContentPanel.repaint();
}
private void updateContentUI() {
IJSwingUtilities.updateComponentTreeUI(myContentPanel);
myContentPanel.getViewport().setBackground(myIndicatorPanel.getBackground());
myContentPanel.setBorder(null);
}
private static void hideSeparator(@NotNull Component component) {
UIUtil.getClientProperty(component, KEY).setSeparatorEnabled(false);
}
@NotNull
private static Dimension getEmptyPreferredSize() {
Dimension size = ScreenUtil.getMainScreenBounds().getSize();
size.width *= 0.3d;
size.height *= 0.3d;
return size;
}
private void createPopup(@NotNull JComponent content, @NotNull JComponent focus, boolean requestFocus) {
ComponentPopupBuilder builder = JBPopupFactory.getInstance().createComponentPopupBuilder(content, focus);
builder.addListener(new JBPopupListener() {
@Override
public void onClosed(@NotNull LightweightWindowEvent event) {
myProgressPanel.hideProcessPopup();
}
});
builder.setMovable(true);
builder.setResizable(true);
builder.setTitle(IdeBundle.message("progress.window.title"));
builder.setDimensionServiceKey(null, "ProcessPopupWindow", true);
builder.setCancelOnClickOutside(false);
builder.setRequestFocus(requestFocus);
builder.setBelongsToGlobalPopupStack(false);
builder.setLocateByContent(true);
builder.setMinSize(new JBDimension(300, 100));
builder.setCancelButton(new MinimizeButton(IdeBundle.message("tooltip.hide")));
myPopup = builder.addUserData("SIMPLE_WINDOW").createPopup();
myPopup.getContent().putClientProperty(AbstractPopup.FIRST_TIME_SIZE, new JBDimension(300, 0));
}
}
|
leroy-dias/simulato
|
test/unit/lib/planner/test-planner-tests.js
|
<filename>test/unit/lib/planner/test-planner-tests.js
'use strict';
const mockery = require('mockery');
const sinon = require('sinon');
const expect = require('chai').expect;
describe('lib/planner/test-planner.js', function() {
describe('on file require', function() {
let Emitter;
let testPlanner;
let plannerEventDispatch;
beforeEach(function() {
mockery.enable({useCleanCache: true});
mockery.registerAllowable('../../../../lib/planner/test-planner.js');
Emitter = {
mixIn: function(myObject) {
myObject.emit = sinon.stub();
},
};
sinon.spy(Emitter, 'mixIn');
plannerEventDispatch = sinon.stub();
mockery.registerMock('../util/emitter.js', Emitter);
mockery.registerMock('../util/config/config-handler.js', {});
mockery.registerMock('./planner-event-dispatch/planner-event-dispatch.js', plannerEventDispatch);
});
afterEach(function() {
mockery.resetCache();
mockery.deregisterAll();
mockery.disable();
});
it('should Emitter.mixIn once with testPlanner and plannerEventDispatch as parameters', function() {
testPlanner = require('../../../../lib/planner/test-planner.js');
expect(Emitter.mixIn.args).to.deep.equal([
[
testPlanner,
plannerEventDispatch,
],
]);
});
});
describe('generateTests', function() {
let Emitter;
let testPlanner;
let plannerEventDispatch;
let config;
let actionTree;
let forwardStateSpaceSearchHeuristic;
beforeEach(function() {
mockery.enable({useCleanCache: true});
mockery.registerAllowable('../../../../lib/planner/test-planner.js');
Emitter = {
mixIn: function(myObject) {
myObject.emit = sinon.stub();
},
};
sinon.spy(Emitter, 'mixIn');
plannerEventDispatch = sinon.stub();
config = {
get: sinon.stub(),
};
actionTree = sinon.stub();
forwardStateSpaceSearchHeuristic = sinon.stub();
mockery.registerMock('../util/emitter.js', Emitter);
mockery.registerMock('../util/config/config-handler.js', config);
mockery.registerMock('./planner-event-dispatch/planner-event-dispatch.js', plannerEventDispatch);
testPlanner = require('../../../../lib/planner/test-planner.js');
testPlanner._generateActionTree = actionTree;
testPlanner._generateStateSpace = forwardStateSpaceSearchHeuristic;
});
afterEach(function() {
mockery.resetCache();
mockery.deregisterAll();
mockery.disable();
});
describe('when testPlanner._algorithm is actionTree', function() {
it('should call _generateActionTree()', function() {
config.get.onCall(0).returns('actionTree');
testPlanner.generateTests();
expect(testPlanner._generateActionTree.calledOnce).to.be.true;
});
});
describe('when testPlanner._algorithm is forwardStateSpaceSearchHeuristic', function() {
it('should call _generateStateSpace()', function() {
config.get.onCall(0).returns('forwardStateSpaceSearchHeuristic');
testPlanner.generateTests();
expect(testPlanner._generateStateSpace.calledOnce).to.be.true;
});
});
});
describe('_generateActionTree', function() {
let Emitter;
let testPlanner;
let plannerEventDispatch;
let config;
beforeEach(function() {
mockery.enable({useCleanCache: true});
mockery.registerAllowable('../../../../lib/planner/test-planner.js');
Emitter = {
mixIn: function(myObject) {
myObject.emit = sinon.stub();
},
};
sinon.spy(Emitter, 'mixIn');
plannerEventDispatch = sinon.stub();
config = {
get: sinon.stub(),
};
mockery.registerMock('../util/emitter.js', Emitter);
mockery.registerMock('../util/config/config-handler.js', config);
mockery.registerMock('./planner-event-dispatch/planner-event-dispatch.js', plannerEventDispatch);
testPlanner = require('../../../../lib/planner/test-planner.js');
});
afterEach(function() {
mockery.resetCache();
mockery.deregisterAll();
mockery.disable();
});
it('should call testPlanner.emit with the event' +
'\testPlanner.createActionTreePlans\'', function() {
testPlanner._generateActionTree();
expect(testPlanner.emit.args[0][0]).to.deep.equal('testPlanner.createActionTreePlans');
});
describe('when the callback for testPlanner.emit with the event ' +
'\'testPlanner.createActionTreePlans\' is called', function() {
describe('if an error is passed in to the callback', function() {
it('should throw an error', function() {
testPlanner.emit.onCall(0).callsArgWith(1, new Error('An error occurred!'));
expect(testPlanner._generateActionTree).to.throw('An error occurred!');
});
});
describe('if a truthy value for done is pass in to the callback', function() {
it('should call testPlanner.emit with the event \'testPlanner.reduceToMinimumSetOfPlans\', ' +
'plans, and discoveredActions', function() {
testPlanner.emit.onCall(0).callsArgWith(1, null, null, true, 'discoveredActions');
testPlanner._generateStateSpace();
expect(testPlanner.emit.args[1].slice(0, 2)).to.deep.equal([
'testPlanner.reduceToMinimumSetOfPlans',
[],
]);
});
describe('when the callback is called for testPlanner.emit with the event ' +
'\'testPlanner.reduceToMinimumSetOfTestPlans\'', function() {
describe('if an error is passed in', function() {
it('should throw the error', function() {
testPlanner._algorithm = 'actionTree';
testPlanner.emit.onCall(0).callsArgWith(1, null, null, true, 'discoveredActions');
testPlanner.emit.onCall(1).callsArgWith(3, new Error('An error occurred!'));
expect(testPlanner._generateActionTree).to.throw('An error occurred!');
});
});
describe('if an error is not passed in', function() {
it('should call config.get once with the string "plannerTestLength"', function() {
testPlanner.emit.onCall(0).callsArgWith(1, null, null, 'discoveredActions');
testPlanner.emit.onCall(1).callsArgWith(3, null, 'theFinalPlans');
testPlanner._generateActionTree();
expect(config.get.args[0]).to.deep.equal([
'plannerTestLength',
]);
});
describe('if testLength is a truthy value', function() {
it('should call testPlanner.emit with the event \'offlineReplanning.replan\', ' +
'the finalPlans, discoveredActions, and an object with the testLength', function() {
testPlanner.emit.onCall(0).callsArgWith(1, null, null, 'discoveredActions');
testPlanner.emit.onCall(1).callsArgWith(3, null, 'theFinalPlans');
testPlanner._algorithm = 'actionTree';
config.get.onCall(0).returns(5);
testPlanner._generateActionTree();
expect(testPlanner.emit.args[2]).to.deep.equal([
'offlineReplanning.replan',
'theFinalPlans',
'discoveredActions',
'actionTree',
{
testLength: 5,
},
]);
});
});
describe('if testLength is a falsy value', function() {
it('should call testPlanner.emit with the event \'testPlanner.planningFinished\', ' +
'the finalPlans, and discoveredActions', function() {
testPlanner.emit.onCall(0).callsArgWith(1, null, null, 'discoveredActions');
testPlanner.emit.onCall(1).callsArgWith(3, null, 'theFinalPlans');
testPlanner._algorithm = 'actionTree';
config.get.onCall(0).returns(undefined);
testPlanner._generateActionTree();
expect(testPlanner.emit.args[2]).to.deep.equal([
'planner.planningFinished',
'theFinalPlans',
'discoveredActions',
'actionTree',
]);
});
});
});
});
});
});
});
});
describe('_generateStateSpace', function() {
let Emitter;
let testPlanner;
let plannerEventDispatch;
let config;
beforeEach(function() {
mockery.enable({useCleanCache: true});
mockery.registerAllowable('../../../../lib/planner/test-planner.js');
Emitter = {
mixIn: function(myObject) {
myObject.emit = sinon.stub();
},
};
sinon.spy(Emitter, 'mixIn');
plannerEventDispatch = sinon.stub();
config = {
get: sinon.stub(),
};
mockery.registerMock('../util/emitter.js', Emitter);
mockery.registerMock('../util/config/config-handler.js', config);
mockery.registerMock('./planner-event-dispatch/planner-event-dispatch.js', plannerEventDispatch);
testPlanner = require('../../../../lib/planner/test-planner.js');
});
afterEach(function() {
mockery.resetCache();
mockery.deregisterAll();
mockery.disable();
});
it('should call testPlanner.emit with the event' +
'\testPlanner.createForwardStateSpaceSearchHeuristicPlans\'', function() {
testPlanner._generateStateSpace();
expect(testPlanner.emit.args[0][0]).to.deep.equal('testPlanner.createForwardStateSpaceSearchHeuristicPlans');
});
describe('when the callback for testPlanner.emit with the event ' +
'\'testPlanner.createPlans\' is called', function() {
describe('if an error is passed in to the callback', function() {
it('should throw an error', function() {
testPlanner.emit.onCall(0).callsArgWith(1, new Error('An error occurred!'));
expect(testPlanner._generateStateSpace).to.throw('An error occurred!');
});
});
describe('if a plan is passed in to the callback', function() {
it('should add it to the plans array', function() {
testPlanner._generateStateSpace();
const callback = testPlanner.emit.args[0][1];
callback(null, 'myPlan');
callback(null, null, true);
expect(testPlanner.emit.args[1][1]).to.deep.equal(['myPlan']);
});
});
describe('if a truthy value for done is pass in to the callback', function() {
it('should call testPlanner.emit with the event \'testPlanner.reduceToMinimumSetOfPlans\', ' +
'plans, and discoveredActions', function() {
testPlanner.emit.onCall(0).callsArgWith(1, null, null, true, 'discoveredActions');
testPlanner._generateStateSpace();
expect(testPlanner.emit.args[1].slice(0, 2)).to.deep.equal([
'testPlanner.reduceToMinimumSetOfPlans',
[],
]);
});
describe('when the callback is called for testPlanner.emit with the event ' +
'\'testPlanner.reduceToMinimumSetOfTestPlans\'', function() {
describe('if an error is passed in', function() {
it('should throw the error', function() {
testPlanner._algorithm = 'default';
testPlanner.emit.onCall(0).callsArgWith(1, null, null, true, 'discoveredActions');
testPlanner.emit.onCall(1).callsArgWith(3, new Error('An error occurred!'));
expect(testPlanner._generateStateSpace).to.throw('An error occurred!');
});
});
describe('if an error is not passed in', function() {
it('should call config.get once with the string "plannerTestLength"', function() {
testPlanner.emit.onCall(0).callsArgWith(1, null, null, true, 'discoveredActions');
testPlanner.emit.onCall(1).callsArgWith(3, null, 'theFinalPlans');
testPlanner._generateStateSpace();
expect(config.get.args[0]).to.deep.equal([
'plannerTestLength',
]);
});
describe('if testLength is a truthy value', function() {
it('should call testPlanner.emit with the event \'offlineReplanning.replan\', ' +
'the finalPlans, discoveredActions, and an object with the testLength', function() {
testPlanner.emit.onCall(0).callsArgWith(1, null, null, true, 'discoveredActions');
testPlanner.emit.onCall(1).callsArgWith(3, null, 'theFinalPlans');
testPlanner._algorithm = 'forwardStateSpaceSearchHeuristic';
config.get.onCall(0).returns(5);
testPlanner._generateStateSpace();
expect(testPlanner.emit.args[2]).to.deep.equal([
'offlineReplanning.replan',
'theFinalPlans',
'discoveredActions',
'forwardStateSpaceSearchHeuristic',
{
testLength: 5,
},
]);
});
});
describe('if testLength is a falsy value', function() {
it('should call testPlanner.emit with the event \'testPlanner.planningFinished\', ' +
'the finalPlans, and discoveredActions', function() {
testPlanner.emit.onCall(0).callsArgWith(1, null, null, true, 'discoveredActions');
testPlanner.emit.onCall(1).callsArgWith(3, null, 'theFinalPlans');
testPlanner._algorithm = 'forwardStateSpaceSearchHeuristic';
testPlanner._generateStateSpace();
expect(testPlanner.emit.args[2]).to.deep.equal([
'planner.planningFinished',
'theFinalPlans',
'discoveredActions',
'forwardStateSpaceSearchHeuristic',
]);
});
});
});
});
});
});
});
|
drunkwater/leetcode
|
hard/ruby/c0158_818_race-car/00_leetcode_0158.rb
|
<reponame>drunkwater/leetcode<filename>hard/ruby/c0158_818_race-car/00_leetcode_0158.rb<gh_stars>0
# DRUNKWATER TEMPLATE(add description and prototypes)
# Question Title and Description on leetcode.com
# Function Declaration and Function Prototypes on leetcode.com
#818. Race Car
#Your car starts at position 0 and speed +1 on an infinite number line. (Your car can go into negative positions.)
#Your car drives automatically according to a sequence of instructions A (accelerate) and R (reverse).
#When you get an instruction "A", your car does the following: position += speed, speed *= 2.
#When you get an instruction "R", your car does the following: if your speed is positive then speed = -1 , otherwise speed = 1. (Your position stays the same.)
#For example, after commands "AAR", your car goes to positions 0->1->3->3, and your speed goes to 1->2->4->-1.
#Now for some target position, say the length of the shortest sequence of instructions to get there.
#Example 1:
#Input:
#target = 3
#Output: 2
#Explanation:
#The shortest instruction sequence is "AA".
#Your position goes from 0->1->3.
#Example 2:
#Input:
#target = 6
#Output: 5
#Explanation:
#The shortest instruction sequence is "AAARA".
#Your position goes from 0->1->3->7->7->6.
# Note:
#1 <= target <= 10000.
## @param {Integer} target
## @return {Integer}
#def racecar(target)
#end
# Time Is Money
|
ambertests/scala_coursework
|
beginners/rock-the-jvm-scala-beginners/src/main/lectures/part3fp/WhatsAFunction.scala
|
<reponame>ambertests/scala_coursework<gh_stars>0
package lectures.part3fp
object WhatsAFunction extends App {
//DREAM: use functions as first class elements
//problem: object-oriented programming sees everything as objects
// so JVM would take functions wrapped up as class objects (ie Traits)
val doubler = new MyFunction[Int, Int] {
override def apply(element: Int): Int = element * 2
}
println(doubler(2)) //because of how apply is treated, the doubler object
// can be called like a function
//function types = Function[A,B]
val stringToIntConverter = new Function1[String, Int] {
override def apply(string: String): Int = string.toInt
}
println(stringToIntConverter("3") + 4)
val adder = new Function2[Int, Int, Int] { // could be ((Int, Int) => Int)
override def apply(a: Int, b: Int): Int = a+b
}
// all Scala functions are objects (Function1, Function2, etc)
/*
1. define a function which takes two strings and concatenates
2. transform the MyPredicate and MyTransformer into function types
3. define a function which takes an Int and returns another function
which takes an Int and returns an Int
- what is the type of this function
- how to actually implement
*/
}
trait MyFunction[A,B]{
def apply(element: A): B
}
|
701Szc/CustomComponent
|
CustomComponentTest/app/src/main/java/com/example/CustomComponentTest/activity/PhotoViewActivity.java
|
package com.example.CustomComponentTest.activity;
import android.content.Intent;
import android.os.Bundle;
import android.view.View;
import android.widget.ImageView;
import android.widget.TextView;
import androidx.viewpager.widget.ViewPager;
import com.example.CustomComponentTest.R;
import com.example.CustomComponentTest.activity.base.BaseActivity;
import com.example.CustomComponentTest.adapter.PhotoPagerAdapter;
import com.example.CustomComponentTest.share.ShareDialog;
import com.example.CustomComponentTest.util.Util;
import com.example.mysdk.adutil.Utils;
import java.util.ArrayList;
import cn.sharesdk.framework.Platform;
/**
* Created by renzhiqiang on 16/8/31.
*
* @function 显示产品大图页面
*/
public class PhotoViewActivity extends BaseActivity implements View.OnClickListener {
/**
* UI
*/
private ViewPager mPager;
private TextView mIndictorView;
private ImageView mShareView;
/**
* Data
*/
private PhotoPagerAdapter mAdapter;
private ArrayList<String> mPhotoLists;
private int mLenght;
private int currentPos;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_photo_view_layout);
initData();
initView();
}
public static final String PHOTO_LIST = "photo_list";
//初始化要显示的图片地址的列表
private void initData(){
Intent intent = getIntent();
mPhotoLists = getIntent().getStringArrayListExtra(PHOTO_LIST);
mLenght = mPhotoLists.size();
}
private void initView() {
mIndictorView = (TextView) findViewById(R.id.indictor_view);
mIndictorView.setText("1/" + mLenght);
mShareView = (ImageView) findViewById(R.id.share_view);
mShareView.setOnClickListener(this);
mPager = (ViewPager) findViewById(R.id.photo_pager);
mPager.setPageMargin(Utils.dip2px(this,30));
mAdapter = new PhotoPagerAdapter(this,mPhotoLists,false);
mPager.setAdapter(mAdapter);
mPager.addOnPageChangeListener(new ViewPager.OnPageChangeListener() {
@Override
public void onPageScrolled(int position, float positionOffset, int positionOffsetPixels) {
}
@Override
public void onPageSelected(int position) {
mIndictorView.setText(String.valueOf((position +1)).concat("/").concat(String.valueOf(mLenght)));
currentPos = position;
}
@Override
public void onPageScrollStateChanged(int state) {
}
});
Util.hideSoftInputMethod(this,mIndictorView);
}
@Override
public void onClick(View v) {
switch (v.getId()) {
case R.id.share_view:
ShareDialog dialog = new ShareDialog(this, true);
dialog.setShareType(Platform.SHARE_IMAGE);
dialog.setShareTitle(getString(R.string.imooc));
dialog.setShareTitleUrl(getString(R.string.imooc_site));
dialog.setShareText(getString(R.string.imooc));
dialog.setShareSite(getString(R.string.imooc));
dialog.setShareTitle(getString(R.string.imooc));
dialog.show();
break;
}
}
}
|
ShunjiroOsada/jsk_visualization_package
|
jsk_rqt_plugins/src/jsk_rqt_plugins/plot.py
|
<reponame>ShunjiroOsada/jsk_visualization_package<filename>jsk_rqt_plugins/src/jsk_rqt_plugins/plot.py
#!/usr/bin/env python
from rqt_gui_py.plugin import Plugin
from python_qt_binding import loadUi
from python_qt_binding.QtCore import Qt, QTimer, qWarning, Slot
from python_qt_binding.QtGui import QAction, QIcon, QMenu, QWidget
from python_qt_binding.QtGui import QWidget, QVBoxLayout, QSizePolicy, QColor
from rqt_py_common.topic_completer import TopicCompleter
from matplotlib.colors import colorConverter
from rqt_py_common.topic_helpers import is_slot_numeric
from rqt_plot.rosplot import ROSData, RosPlotException
from matplotlib.collections import PolyCollection, PathCollection, LineCollection
import matplotlib
import rospkg
import rospy
import os, sys
import argparse
try:
from matplotlib.backends.backend_qt4agg import FigureCanvasQTAgg as FigureCanvas
except ImportError:
# work around bug in dateutil
import sys
import thread
sys.modules['_thread'] = thread
from matplotlib.backends.backend_qt4agg import FigureCanvasQTAgg as FigureCanvas
from matplotlib.backends.backend_qt4agg import NavigationToolbar2QTAgg as NavigationToolbar
from matplotlib.figure import Figure
import numpy
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import axes3d, Axes3D #<-- Note the capitalization!
class MatDataPlot3D(QWidget):
class Canvas(FigureCanvas):
"""Ultimately, this is a QWidget (as well as a FigureCanvasAgg, etc.)."""
def __init__(self, parent=None):
super(MatDataPlot3D.Canvas, self).__init__(Figure())
#self.fig = fig = plt.figure()
self.axes = self.figure.add_subplot(111, projection='3d')
#self.axes = self.figure.gca(projection="3d")
#self.axes.grid(True, color='gray')
self.axes.set_xlabel('t')
self.axes.set_xlim3d(0, 10)
self.axes.set_ylabel('Y')
self.axes.set_ylim3d(-1, 1)
self.axes.set_zlabel('Z')
self.axes.set_zlim3d(0, 1)
self.figure.tight_layout()
self.setSizePolicy(QSizePolicy.Expanding, QSizePolicy.Expanding)
self.updateGeometry()
def resizeEvent(self, event):
super(MatDataPlot3D.Canvas, self).resizeEvent(event)
self.figure.tight_layout()
_colors = [QColor(c) for c in [Qt.red, Qt.blue, Qt.magenta, Qt.cyan, Qt.green, Qt.darkYellow, Qt.black, Qt.darkRed, Qt.gray, Qt.darkCyan]]
def __init__(self, parent=None, buffer_length=100, use_poly=True,
no_legend=False):
super(MatDataPlot3D, self).__init__(parent)
self._canvas = MatDataPlot3D.Canvas()
self._use_poly = use_poly
self._buffer_length = buffer_length
self._toolbar = NavigationToolbar(self._canvas, self._canvas)
vbox = QVBoxLayout()
vbox.addWidget(self._toolbar)
vbox.addWidget(self._canvas)
self.setLayout(vbox)
self._curves_verts = {}
self._color_index = 0
self._curves = {}
self._no_legend = no_legend
self._autoscroll = False
def autoscroll(self, enabled=True):
self._autoscroll = enabled
def add_curve(self, curve_id, curve_name, x, y):
color = QColor(self._colors[self._color_index % len(self._colors)])
self._color_index += 1
#line = self._canvas.axes.plot([], [], label=curve_name, linewidth=1, picker=5, color=color.name())[0]
line = None
self._curves[curve_id] = [[], [], line, [None, None],
(color.red() / 255.0,
color.green() / 255.0,
color.blue() / 255.0,
0.6)]
self.update_values(curve_id, x, y)
self._update_legend()
def remove_curve(self, curve_id):
curve_id = str(curve_id)
if curve_id in self._curves:
del self._curves[curve_id]
del self._curves_verts[curve_id]
self._update_legend()
def _update_legend(self):
if self._no_legend:
return
labels = self._curves.keys()
handles = [plt.Rectangle((0, 0), 1, 1, fc=self._curves[labels[i]][4] ) for i in range(len(labels))]
self._canvas.axes.legend(handles, labels, loc='upper left')
@Slot(str, list, list)
def update_values(self, curve_id, x, y):
data_x, data_y, line, range_y,c = self._curves[curve_id]
data_x.extend(x)
data_y.extend(y)
if len(data_x) > self._buffer_length:
data_x = data_x[-self._buffer_length:]
data_y = data_y[-self._buffer_length:]
self._curves[curve_id][0] = data_x
self._curves[curve_id][1] = data_y
self._curves_verts[curve_id] = (data_x, data_y)
if y:
ymin = min(y)
if range_y[0]:
ymin = min(ymin, range_y[0])
range_y[0] = ymin
ymax = max(y)
if range_y[1]:
ymax = max(ymax, range_y[1])
range_y[1] = ymax
def redraw(self):
self._canvas.axes.grid(True, color='gray')
# Set axis bounds
ymin = ymax = None
xmax = 0
xmin = sys.maxint
for curve in self._curves.values():
data_x, _, _, range_y, c = curve
if len(data_x) == 0:
continue
xmax = max(xmax, data_x[-1])
xmin = min(xmin, data_x[0])
if ymin is None:
ymin = range_y[0]
ymax = range_y[1]
else:
ymin = min(range_y[0], ymin)
ymax = max(range_y[1], ymax)
# pad the min/max
# delta = max(ymax - ymin, 0.1)
# ymin -= .05 * delta
# ymax += .05 * delta
if self._autoscroll and ymin is not None:
self._canvas.axes.set_xbound(lower=xmin, upper=xmax)
self._canvas.axes.set_zbound(lower=ymin, upper=ymax)
self._canvas.axes.set_ybound(lower=0,
upper=len(self._curves.keys()))
# create poly object
verts = []
colors = []
for curve_id in self._curves_verts.keys():
(data_x, data_y) = self._curves_verts[curve_id]
colors.append(self._curves[curve_id][4])
if self._use_poly:
verts.append([(xmin, ymin)] + list(zip(data_x, data_y))
+ [(xmax, ymin)])
else:
verts.append(zip(data_x, data_y))
line_num = len(self._curves.keys())
if self._use_poly:
poly = PolyCollection(verts, facecolors=colors, closed=False)
else:
poly = LineCollection(verts, colors=colors)
poly.set_alpha(0.7)
self._canvas.axes.cla()
self._canvas.axes.add_collection3d(poly,
zs=range(line_num), zdir='y')
self._update_legend()
self._canvas.draw()
class Plot3D(Plugin):
def __init__(self, context):
super(Plot3D, self).__init__(context)
self.setObjectName('Plot3D')
self._args = self._parse_args(context.argv())
self._widget = Plot3DWidget(initial_topics=self._args.topics, start_paused=self._args.start_paused,
buffer_length=self._args.buffer,
use_poly=not self._args.show_line,
no_legend=self._args.no_legend)
context.add_widget(self._widget)
def _parse_args(self, argv):
parser = argparse.ArgumentParser(prog='rqt_3d_plot', add_help=False)
Plot3D.add_arguments(parser)
args = parser.parse_args(argv)
topic_list = []
for t in args.topics:
# c_topics is the list of topics to plot
c_topics = []
# compute combined topic list, t == '/foo/bar1,/baz/bar2'
for sub_t in [x for x in t.split(',') if x]:
# check for shorthand '/foo/field1:field2:field3'
if ':' in sub_t:
base = sub_t[:sub_t.find(':')]
# the first prefix includes a field name, so save then strip it off
c_topics.append(base)
if not '/' in base:
parser.error("%s must contain a topic and field name" % sub_t)
base = base[:base.rfind('/')]
# compute the rest of the field names
fields = sub_t.split(':')[1:]
c_topics.extend(["%s/%s" % (base, f) for f in fields if f])
else:
c_topics.append(sub_t)
# #1053: resolve command-line topic names
import rosgraph
c_topics = [rosgraph.names.script_resolve_name('rqt_plot', n) for n in c_topics]
if type(c_topics) == list:
topic_list.extend(c_topics)
else:
topic_list.append(c_topics)
args.topics = topic_list
return args
@staticmethod
def add_arguments(parser):
group = parser.add_argument_group('Options for rqt_plot plugin')
group.add_argument('-P', '--pause', action='store_true', dest='start_paused',
help='Start in paused state')
group.add_argument('-L', '--line', action='store_true', dest='show_line',
help='Show lines rather than polygon representation')
group.add_argument('--no-legend', action='store_true', dest='no_legend',
help='do not show legend')
group.add_argument('-B', '--buffer', dest='buffer', action="store",
help='the length of the buffer', default=100, type=int)
# group.add_argument('-e', '--empty', action='store_true', dest='start_empty',
# help='Start without restoring previous topics')
group.add_argument('topics', nargs='*', default=[], help='Topics to plot')
class Plot3DWidget(QWidget):
_redraw_interval = 40
def __init__(self, initial_topics=None, start_paused=False,
buffer_length=100, use_poly=True, no_legend=False):
super(Plot3DWidget, self).__init__()
self.setObjectName('Plot3DWidget')
self._buffer_length = buffer_length
self._initial_topics = initial_topics
rp = rospkg.RosPack()
ui_file = os.path.join(rp.get_path('jsk_rqt_plugins'),
'resource', 'plot3d.ui')
loadUi(ui_file, self)
self.subscribe_topic_button.setIcon(QIcon.fromTheme('add'))
self.remove_topic_button.setIcon(QIcon.fromTheme('remove'))
self.pause_button.setIcon(QIcon.fromTheme('media-playback-pause'))
self.clear_button.setIcon(QIcon.fromTheme('edit-clear'))
self.data_plot = MatDataPlot3D(self, self._buffer_length,
use_poly, no_legend)
self.data_plot_layout.addWidget(self.data_plot)
self.data_plot.autoscroll(self.autoscroll_checkbox.isChecked())
self.data_plot.dropEvent = self.dropEvent
self.data_plot.dragEnterEvent = self.dragEnterEvent
self.subscribe_topic_button.setEnabled(False)
if start_paused:
self.pause_button.setChecked(True)
self._topic_completer = TopicCompleter(self.topic_edit)
self._topic_completer.update_topics()
self.topic_edit.setCompleter(self._topic_completer)
self._start_time = rospy.get_time()
self._rosdata = {}
self._remove_topic_menu = QMenu()
# init and start update timer for plot
self._update_plot_timer = QTimer(self)
self._update_plot_timer.timeout.connect(self.update_plot)
if self._initial_topics:
for topic_name in self._initial_topics:
self.add_topic(topic_name)
self._initial_topics = None
@Slot('QDragEnterEvent*')
def dragEnterEvent(self, event):
# get topic name
if not event.mimeData().hasText():
if not hasattr(event.source(), 'selectedItems') or len(event.source().selectedItems()) == 0:
qWarning('Plot.dragEnterEvent(): not hasattr(event.source(), selectedItems) or len(event.source().selectedItems()) == 0')
return
item = event.source().selectedItems()[0]
topic_name = item.data(0, Qt.UserRole)
if topic_name == None:
qWarning('Plot.dragEnterEvent(): not hasattr(item, ros_topic_name_)')
return
else:
topic_name = str(event.mimeData().text())
# check for numeric field type
is_numeric, is_array, message = is_slot_numeric(topic_name)
if is_numeric and not is_array:
event.acceptProposedAction()
else:
qWarning('Plot.dragEnterEvent(): rejecting: "%s"' % (message))
@Slot('QDropEvent*')
def dropEvent(self, event):
if event.mimeData().hasText():
topic_name = str(event.mimeData().text())
else:
droped_item = event.source().selectedItems()[0]
topic_name = str(droped_item.data(0, Qt.UserRole))
self.add_topic(topic_name)
@Slot(str)
def on_topic_edit_textChanged(self, topic_name):
# on empty topic name, update topics
if topic_name in ('', '/'):
self._topic_completer.update_topics()
is_numeric, is_array, message = is_slot_numeric(topic_name)
self.subscribe_topic_button.setEnabled(is_numeric and not is_array)
self.subscribe_topic_button.setToolTip(message)
@Slot()
def on_topic_edit_returnPressed(self):
if self.subscribe_topic_button.isEnabled():
self.add_topic(str(self.topic_edit.text()))
@Slot()
def on_subscribe_topic_button_clicked(self):
self.add_topic(str(self.topic_edit.text()))
@Slot(bool)
def on_pause_button_clicked(self, checked):
self.enable_timer(not checked)
@Slot(bool)
def on_autoscroll_checkbox_clicked(self, checked):
self.data_plot.autoscroll(checked)
@Slot()
def on_clear_button_clicked(self):
self.clean_up_subscribers()
def update_plot(self):
if self.data_plot is not None:
needs_redraw = False
for topic_name, rosdata in self._rosdata.items():
try:
data_x, data_y = rosdata.next()
if data_x or data_y:
self.data_plot.update_values(topic_name, data_x, data_y)
needs_redraw = True
except RosPlotException as e:
qWarning('PlotWidget.update_plot(): error in rosplot: %s' % e)
if needs_redraw:
self.data_plot.redraw()
def _subscribed_topics_changed(self):
self._update_remove_topic_menu()
if not self.pause_button.isChecked():
# if pause button is not pressed, enable timer based on subscribed topics
self.enable_timer(self._rosdata)
def _update_remove_topic_menu(self):
def make_remove_topic_function(x):
return lambda: self.remove_topic(x)
self._remove_topic_menu.clear()
for topic_name in sorted(self._rosdata.keys()):
action = QAction(topic_name, self._remove_topic_menu)
action.triggered.connect(make_remove_topic_function(topic_name))
self._remove_topic_menu.addAction(action)
self.remove_topic_button.setMenu(self._remove_topic_menu)
def add_topic(self, topic_name):
if topic_name in self._rosdata:
qWarning('PlotWidget.add_topic(): topic already subscribed: %s' % topic_name)
return
self._rosdata[topic_name] = ROSData(topic_name, self._start_time)
if self._rosdata[topic_name].error is not None:
qWarning(str(self._rosdata[topic_name].error))
del self._rosdata[topic_name]
else:
data_x, data_y = self._rosdata[topic_name].next()
self.data_plot.add_curve(topic_name, topic_name, data_x, data_y)
self._subscribed_topics_changed()
def remove_topic(self, topic_name):
self._rosdata[topic_name].close()
del self._rosdata[topic_name]
self.data_plot.remove_curve(topic_name)
self._subscribed_topics_changed()
def clean_up_subscribers(self):
for topic_name, rosdata in self._rosdata.items():
rosdata.close()
self.data_plot.remove_curve(topic_name)
self._rosdata = {}
self._subscribed_topics_changed()
def enable_timer(self, enabled=True):
if enabled:
self._update_plot_timer.start(self._redraw_interval)
else:
self._update_plot_timer.stop()
|
logbee/keyscore
|
keyscore-model/src/main/scala/io/logbee/keyscore/model/pipeline/FilterState.scala
|
<reponame>logbee/keyscore<filename>keyscore-model/src/main/scala/io/logbee/keyscore/model/pipeline/FilterState.scala
package io.logbee.keyscore.model.pipeline
import java.util.UUID
import io.logbee.keyscore.model.data.Health
case class FilterState(id:UUID, health: Health, throughPutTime: Long = 0, totalThroughputTime: Long = 0, status: FilterStatus = Unknown)
|
q759729997/qytPytorch
|
qytPytorch/models/cv/image_classification/densenet.py
|
from torch import nn
from qytPytorch.modules.layer import DenseBlock
from qytPytorch.modules.layer import FlattenLayer
from qytPytorch.modules.layer import GlobalAvgPool2d
class DenseNet(nn.Module):
"""稠密网络模型"""
def __init__(self):
super().__init__()
# 首先使用同ResNet一样的单卷积层和最大池化层
self.net = nn.Sequential(
nn.Conv2d(1, 64, kernel_size=7, stride=2, padding=3),
nn.BatchNorm2d(64),
nn.ReLU(),
nn.MaxPool2d(kernel_size=3, stride=2, padding=1))
# 类似于ResNet接下来使用的4个残差块,DenseNet使用的是4个稠密块
# 稠密块里的卷积层通道数(即增长率)设为32,所以每个稠密块将增加128个通道
num_channels, growth_rate = 64, 32 # num_channels为当前的通道数
num_convs_in_dense_blocks = [4, 4, 4, 4]
for i, num_convs in enumerate(num_convs_in_dense_blocks):
DB = DenseBlock(num_convs, num_channels, growth_rate)
self.net.add_module("DenseBlosk_%d" % i, DB)
# 上一个稠密块的输出通道数
num_channels = DB.out_channels
# 在稠密块之间加入通道数减半的过渡层
if i != len(num_convs_in_dense_blocks) - 1:
self.net.add_module("transition_block_%d" % i, self._transition_block(num_channels, num_channels // 2))
num_channels = num_channels // 2
# 最后接上全局池化层和全连接层来输出
self.net.add_module("BN", nn.BatchNorm2d(num_channels))
self.net.add_module("relu", nn.ReLU())
self.net.add_module("global_avg_pool", GlobalAvgPool2d()) # GlobalAvgPool2d的输出: (Batch, num_channels, 1, 1)
self.net.add_module("fc", nn.Sequential(FlattenLayer(), nn.Linear(num_channels, 10)))
def _transition_block(self, in_channels, out_channels):
"""过渡层,过渡层用来控制模型复杂度。它通过1×1卷积层来减小通道数,并使用步幅为2的平均池化层减半高和宽,从而进一步降低模型复杂度。"""
blk = nn.Sequential(
nn.BatchNorm2d(in_channels),
nn.ReLU(),
nn.Conv2d(in_channels, out_channels, kernel_size=1),
nn.AvgPool2d(kernel_size=2, stride=2))
return blk
def forward(self, img):
output = self.net(img)
return output
|
Sir-Branch/k64f-starter-template
|
sdk_k64f/components/timer/lptmr_adapter.c
|
<gh_stars>1-10
/*
* Copyright 2018-2019 NXP
* All rights reserved.
*
*
* SPDX-License-Identifier: BSD-3-Clause
*/
#include "fsl_common.h"
#include "fsl_device_registers.h"
#include "timer.h"
#include "fsl_lptmr.h"
typedef struct _hal_timer_handle_struct_t
{
uint32_t timeout;
uint32_t timerClock_Hz;
hal_timer_callback_t callback;
void *callbackParam;
uint8_t instance;
} hal_timer_handle_struct_t;
/*******************************************************************************
* Variables
******************************************************************************/
static LPTMR_Type *const s_LptmrBase[] = LPTMR_BASE_PTRS;
static hal_timer_handle_t s_timerHandle[sizeof(s_LptmrBase) / sizeof(LPTMR_Type *)];
/************************************************************************************
*************************************************************************************
* Private memory declarations
*************************************************************************************
************************************************************************************/
static void HAL_TimerInterruptHandle(uint8_t instance)
{
hal_timer_handle_struct_t *halTimerState = (hal_timer_handle_struct_t *)s_timerHandle[instance];
LPTMR_ClearStatusFlags(s_LptmrBase[halTimerState->instance], kLPTMR_TimerCompareFlag);
if (halTimerState->callback != NULL)
{
halTimerState->callback(halTimerState->callbackParam);
}
}
void PWT_LPTMR0_IRQHandler(void)
{
HAL_TimerInterruptHandle(0);
/* Add for ARM errata 838869, affects Cortex-M4, Cortex-M4F Store immediate overlapping
exception return operation might vector to incorrect interrupt */
#if defined __CORTEX_M && (__CORTEX_M == 4U)
__DSB();
#endif
}
void LPTMR0_IRQHandler(void)
{
HAL_TimerInterruptHandle(0);
/* Add for ARM errata 838869, affects Cortex-M4, Cortex-M4F Store immediate overlapping
exception return operation might vector to incorrect interrupt */
#if defined __CORTEX_M && (__CORTEX_M == 4U)
__DSB();
#endif
}
/************************************************************************************
*************************************************************************************
* Public functions
*************************************************************************************
************************************************************************************/
hal_timer_status_t HAL_TimerInit(hal_timer_handle_t halTimerHandle, hal_timer_config_t *halTimerConfig)
{
IRQn_Type mLptmrIrqId[] = LPTMR_IRQS;
IRQn_Type irqId;
hal_timer_handle_struct_t *halTimerState = halTimerHandle;
/* Structure of initialize LPTMR */
lptmr_config_t lptmrConfig;
assert(sizeof(hal_timer_handle_struct_t) == HAL_TIMER_HANDLE_SIZE);
assert(halTimerConfig);
assert(halTimerHandle);
assert(halTimerConfig->instance < (sizeof(s_LptmrBase) / sizeof(LPTMR_Type *)));
halTimerState->timeout = halTimerConfig->timeout;
halTimerState->instance = halTimerConfig->instance;
irqId = mLptmrIrqId[halTimerState->instance];
LPTMR_GetDefaultConfig(&lptmrConfig);
/* Initialize the LPTMR */
LPTMR_Init(s_LptmrBase[halTimerState->instance], &lptmrConfig);
halTimerState->timerClock_Hz = halTimerConfig->srcClock_Hz;
/*
* Set timer period.
* Note : the parameter "ticks" of LPTMR_SetTimerPeriod should be equal or greater than 1.
*/
LPTMR_SetTimerPeriod(s_LptmrBase[halTimerState->instance],
USEC_TO_COUNT(halTimerState->timeout, halTimerState->timerClock_Hz));
/* Enable timer interrupt */
LPTMR_EnableInterrupts(s_LptmrBase[halTimerState->instance], kLPTMR_TimerInterruptEnable);
s_timerHandle[halTimerState->instance] = halTimerHandle;
NVIC_SetPriority((IRQn_Type)irqId, HAL_TIMER_ISR_PRIORITY);
EnableIRQ(irqId);
return kStatus_HAL_TimerSuccess;
}
void HAL_TimerDeinit(hal_timer_handle_t halTimerHandle)
{
assert(halTimerHandle);
hal_timer_handle_struct_t *halTimerState = halTimerHandle;
s_timerHandle[halTimerState->instance] = NULL;
LPTMR_Deinit(s_LptmrBase[halTimerState->instance]);
}
/*************************************************************************************/
void HAL_TimerEnable(hal_timer_handle_t halTimerHandle)
{
assert(halTimerHandle);
hal_timer_handle_struct_t *halTimerState = halTimerHandle;
LPTMR_StartTimer(s_LptmrBase[halTimerState->instance]);
}
/*************************************************************************************/
void HAL_TimerDisable(hal_timer_handle_t halTimerHandle)
{
assert(halTimerHandle);
hal_timer_handle_struct_t *halTimerState = halTimerHandle;
LPTMR_StopTimer(s_LptmrBase[halTimerState->instance]);
}
/*************************************************************************************/
void HAL_TimerInstallCallback(hal_timer_handle_t halTimerHandle, hal_timer_callback_t callback, void *callbackParam)
{
assert(halTimerHandle);
hal_timer_handle_struct_t *halTimerState = halTimerHandle;
halTimerState->callback = callback;
halTimerState->callbackParam = callbackParam;
}
uint32_t HAL_TimerGetMaxTimeout(hal_timer_handle_t halTimerHandle)
{
uint32_t reserveCount;
assert(halTimerHandle);
hal_timer_handle_struct_t *halTimerState = halTimerHandle;
reserveCount = (uint32_t)MSEC_TO_COUNT((4), (halTimerState->timerClock_Hz));
if (reserveCount < MSEC_TO_COUNT((1), (halTimerState->timerClock_Hz)))
{
return 1000;
}
return COUNT_TO_USEC(0xFFFF - reserveCount, halTimerState->timerClock_Hz);
}
/* return micro us */
uint32_t HAL_TimerGetCurrentTimerCount(hal_timer_handle_t halTimerHandle)
{
assert(halTimerHandle);
hal_timer_handle_struct_t *halTimerState = halTimerHandle;
return COUNT_TO_USEC(LPTMR_GetCurrentTimerCount(s_LptmrBase[halTimerState->instance]),
halTimerState->timerClock_Hz);
}
hal_timer_status_t HAL_TimerUpdateTimeout(hal_timer_handle_t halTimerHandle, uint32_t timeout)
{
uint32_t tickCount;
assert(halTimerHandle);
hal_timer_handle_struct_t *halTimerState = halTimerHandle;
halTimerState->timeout = timeout;
tickCount = USEC_TO_COUNT(halTimerState->timeout, halTimerState->timerClock_Hz);
if ((tickCount < 1) || (tickCount > 0xfff0))
return kStatus_HAL_TimerOutOfRanger;
LPTMR_SetTimerPeriod(s_LptmrBase[halTimerState->instance], tickCount);
return kStatus_HAL_TimerSuccess;
}
void HAL_TimerExitLowpower(hal_timer_handle_t halTimerHandle)
{
assert(halTimerHandle);
}
void HAL_TimerEnterLowpower(hal_timer_handle_t halTimerHandle)
{
assert(halTimerHandle);
}
|
code-krypt/maggi
|
maggi-core/src/main/java/com/drykode/maggi/core/domain/job/progress/JobProgressEvent.java
|
package com.drykode.maggi.core.domain.job.progress;
import com.drykode.maggi.core.domain.job.progress.support.JobStatus;
import java.io.Serializable;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.ToString;
@Builder
@Getter
@AllArgsConstructor
@NoArgsConstructor
@ToString
public class JobProgressEvent implements Serializable {
private static final long serialVersionUID = -7458299151557012644L;
private String jobId;
private String submitterApiKey;
private JobStatus status;
}
|
sppp/Overlook
|
src/Overlook/Core.h
|
<reponame>sppp/Overlook
#ifndef _Overlook_Core_h_
#define _Overlook_Core_h_
namespace Overlook {
using namespace Upp;
class Overlook;
// Visual setting enumerators
enum {DRAW_LINE, DRAW_SECTION, DRAW_HISTOGRAM, DRAW_ARROW, DRAW_NONE};
enum {WINDOW_SEPARATE, WINDOW_CHART};
enum {STYLE_SOLID, STYLE_DASH, STYLE_DOT, STYLE_DASHDOT, STYLE_DASHDOTDOT};
// Class for vertical levels on containers
struct DataLevel : public Moveable<DataLevel> {
int style, line_width;
double value; // height
Color clr;
DataLevel() : value(0), style(0), line_width(1) {}
DataLevel& operator=(const DataLevel& src) {value = src.value; clr = src.clr; style = src.style; line_width = src.line_width; return *this;}
void Serialize(Stream& s) {s % style % line_width % value % clr;}
};
// Class for registering input and output types of values of classes
typedef bool (*FilterFunction)(void* basesystem, bool match_tf, int in_sym, int in_tf, int out_sym, int out_tf);
inline bool SymTfFilter(void* basesystem, bool match_tf, int in_sym, int in_tf, int out_sym, int out_tf) {
if (match_tf)
return in_tf == out_tf;
else
return in_sym == out_sym;
}
inline bool AnyTf(void* basesystem, bool match_tf, int in_sym, int in_tf, int out_sym, int out_tf) {
if (match_tf)
return true;
else
return in_sym == out_sym;
}
// Classes for IO arguments
template <class T>
struct In : public ValueBase {
In(FilterFunction fn, ArgsFn args=NULL) {data_type = IN_; data = (void*)fn; factory = System::Find<T>(); data2 = (void*)args;}
In(ArgsFn args) {data_type = IN_; data = (void*)SymTfFilter; factory = System::Find<T>(); data2 = (void*)args;}
In() {data_type = IN_; data = (void*)SymTfFilter; factory = System::Find<T>(); data2 = NULL;}
};
struct InOptional : public ValueBase {
InOptional() {data_type = INOPT_;}
};
struct Out : public ValueBase {
Out(int count, int visible) {this->count = count; this->visible = visible; data_type = OUT_;}
};
struct Lbl : public ValueBase {
Lbl(int count) {this->count = count; data_type = LBL_;}
};
struct Arg : public ValueBase {
Arg(const char* key, int& value, int min, int max=10000) {s0 = key; data = &value; data_type = INT_; this->min = min; this->max = max;}
};
struct Persistent : public ValueBase {
Callback1<Stream&> serialize;
protected:
Persistent() {data_type = PERS_;}
public:
Persistent(Callback1<Stream&> serialize) : serialize(serialize) {data_type = PERS_;}
Persistent(const Persistent& src) {*this = src;}
virtual ~Persistent() {}
void operator = (const Persistent& base) {
serialize = base.serialize;
ValueBase::operator=(base);
}
virtual void Serialize(Stream& s) {serialize(s);}
};
// The "easy" method for callbacks with partially template arguments and partly with constant
// arguments. It requires one single global object, but that's not too much to ask.
// Some purist would go with some static function method, but I don't have a clue what it is.
struct StreamSerializer {
typedef StreamSerializer CLASSNAME;
template <class T> void ItemSerialize(Stream& s, T* obj) {s % *obj;}
template <class T> Callback1<Stream&> GetSerializer(T& obj) {
return THISBACK1(ItemSerialize<T>, &obj);
}
};
inline StreamSerializer& GetStreamSerializer() {return Single<StreamSerializer>();}
template <class T> inline Persistent Mem(T& t) {
return Persistent(GetStreamSerializer().GetSerializer(t));
}
// Utility function for changing class arguments
struct ArgChanger : public ValueRegister {
ArgChanger() : cursor(0), storing(0) {}
virtual void IO(const ValueBase& base) {
if (!storing) {
keys.SetCount(cursor+1);
keys[cursor] = base.s0;
args.SetCount(cursor+1);
if (base.data_type == ValueBase::INT_)
args[cursor++] = *(int*)base.data;
} else {
if (base.data_type == ValueBase::INT_)
*(int*)base.data = args[cursor++];
}
}
void SetLoading() {storing = false; cursor = 0;}
void SetStoring() {storing = true; cursor = 0;}
Vector<Value> args;
Vector<String> keys;
int cursor;
bool storing;
};
struct ArgPtr : Moveable<ArgPtr> {
int* ptr = NULL;
int min = 0, max = 0;
};
class CoreIO : public ValueRegister, public Pte<CoreIO> {
protected:
friend class System;
friend class Buffer;
friend class Job;
typedef Ptr<CoreIO> CoreIOPtr;
Vector<Input> inputs;
Vector<Output> outputs;
Vector<Label> labels;
Vector<ArgPtr> args;
Vector<Buffer*> buffers;
Array<Job> jobs;
Array<Persistent> persistents;
Job* current_job = NULL;
JobThread* current_thrd = NULL;
SpinLock serialization_lock, refresh_lock;
String cache_dir;
int sym_id, tf_id, factory, hash;
int counted, bars;
int db_src;
bool serialized;
bool is_init = false;
typedef const Output ConstOutput;
typedef const Input ConstInput;
// Some utility functions for checking that indicator values are strictly L-R
#if defined flagDEBUG && defined flagSAFETYLIMITS
#define SAFETYASSERT(x) ASSERT(x)
int read_safety_limit;
void SafetyInspect(int i) {ASSERT(i <= read_safety_limit);}
void SetSafetyLimit(int i) {read_safety_limit = i;}
ConstBuffer& SafetyBuffer(ConstBuffer& cb) const {Buffer& b = (Buffer&)cb; b.SafetyInspect((CoreIO*)this); return cb;}
Buffer& SafetyBuffer(Buffer& b) {b.SafetyInspect((CoreIO*)this); return b;}
#else
#define SAFETYASSERT(x)
void SafetyInspect(int i) const {}
void SetSafetyLimit(int i) const {}
Buffer& SafetyBuffer(Buffer& cb) const {return cb;}
ConstBuffer& SafetyBuffer(ConstBuffer& cb) const {return cb;}
#endif
public:
CoreIO();
virtual ~CoreIO();
void StoreCache();
void LoadCache();
void Put(Stream& out, const String& dir, int subcore_id);
void Get(Stream& in, const String& dir, int subcore_id);
virtual void IO(const ValueBase& base);
void RefreshBuffers();
template <class T> T* Get() {
T* t = dynamic_cast<T*>(this);
if (t) return t;
for(int i = 0; i < inputs.GetCount(); i++) {
ConstInput& in = inputs[i];
for(int j = 0; j < in.GetCount(); j++) {
CoreIO* c = in[j].core;
if (c == this)
continue;
ASSERT(c);
T* t = dynamic_cast<T*>(c);
if (t) return t;
t = c->Get<T>();
if (t) return t;
}
}
return NULL;
}
Buffer& GetBuffer(int buffer) {return SafetyBuffer(*buffers[buffer]);}
ConstBuffer& GetBuffer(int buffer) const {return SafetyBuffer(*buffers[buffer]);}
ConstBuffer& GetInputBuffer(int input, int sym, int tf, int buffer) const;
ConstLabelSignal& GetInputLabel(int input, int sym, int tf, int buf=0) const;
CoreIO* GetInputCore(int input, int sym, int tf) const;
CoreIO* GetInputCore(int input) const;
Output& GetOutput(int output) {return outputs[output];}
ConstOutput& GetOutput(int output) const {return outputs[output];}
ConstLabel& GetLabel(int lbl) const {return labels[lbl];}
ConstLabelSignal& GetLabelBuffer(int lbl, int buf) const {return labels[lbl].buffers[buf];}
LabelSignal& GetLabelBuffer(int lbl, int buf) {return labels[lbl].buffers[buf];}
const CoreIO& GetInput(int input, int sym, int tf) const;
int GetTimeframe() const {return tf_id;}
int GetTf() const {return tf_id;}
int GetSymbol() const {return sym_id;}
String GetCacheDirectory();
Color GetBufferColor(int i) {return buffers[i]->clr;}
double GetBufferValue(int i, int shift) {return buffers[i]->value[shift];}
double GetBufferValue(int shift) {return outputs[0].buffers[0].value[shift];}
int GetBufferStyle(int i) {return buffers[i]->style;}
int GetBufferArrow(int i) {return buffers[i]->chr;}
int GetBufferLineWidth(int i) {return buffers[i]->line_width;}
int GetBufferType(int i) {return buffers[i]->line_style;}
int GetBufferCount() {return buffers.GetCount();}
int GetOutputCount() const {return outputs.GetCount();}
int GetLabelCount() const {return labels.GetCount();}
int GetLabelBufferCount(int l) const {return labels[l].buffers.GetCount();}
int GetFactory() const {return factory;}
bool IsInitialized() const {return is_init;}
void SetInput(int input_id, int sym_id, int tf_id, CoreIO& core, int output_id);
void SetBufferColor(int i, Color c) {buffers[i]->clr = c;}
void SetBufferLineWidth(int i, int line_width) {buffers[i]->line_width = line_width;}
void SetBufferType(int i, int style) {buffers[i]->line_style = style;}
void SetBufferStyle(int i, int style) {buffers[i]->style = style;}
void SetBufferShift(int i, int shift) {buffers[i]->shift = shift;}
void SetBufferBegin(int i, int begin) {buffers[i]->begin = begin;}
void SetBufferArrow(int i, int chr) {buffers[i]->chr = chr;}
void SetSymbol(int i) {sym_id = i;}
void SetTimeframe(int i) {tf_id = i;}
void SetFactory(int i) {factory = i;}
void SetHash(int i) {hash = i;}
};
typedef Ptr<CoreIO> CoreIOPtr;
struct ArgumentBase {
void Arg(const char* var, bool& v) {}
void Arg(const char* var, int& v, int min=0, int max=0, int step=0) {}
void Arg(const char* var, double& v, double min=0.0, double max=0.0, double step=0.0) {}
void Arg(const char* var, Time& v, Time min=Time(1970,1,1), Time max=Time(2070,1,1), int step=0) {}
void Arg(const char* var, String& v) {}
};
class DataBridge;
class Core : public CoreIO {
protected:
friend class CoreIO;
// Settings
String short_name;
// Visual settings
Array<Core> subcores;
Vector<int> subcore_factories;
Vector<DataLevel> levels;
Color levels_clr;
double minimum, maximum;
int levels_style;
int window_type = WINDOW_CHART;
int next_count;
int period;
int end_offset;
int future_bars;
bool has_maximum, has_minimum;
bool skip_setcount;
bool skip_allocate;
bool has_heatmap = false;
bool avoid_refresh = false;
bool ready = false;
Core();
public:
virtual ~Core();
virtual void Arguments(ArgumentBase& args) {}
virtual void Init() {}
virtual void Deinit() {}
virtual void Start() {}
virtual void IO(ValueRegister& reg) = 0;
virtual double GetHeatmapValue(int i, double price) {return 0;}
void InitAll();
void AllowJobs();
template <class T> Core& AddSubCore() {
int i = System::Find<T>();
ASSERT_(i != -1, "This class is not registered to the factory");
subcore_factories.Add(i);
return subcores.Add(new T);
}
void ResetSubCores();
Core& At(int i) {return subcores[i];}
Core& Set(String key, Value value);
// Get settings
int GetBars() {return bars;}
int GetWindowType() {return window_type;}
int GetCounted() {return counted;}
int GetCoreLevelCount() const {return levels.GetCount();}
int GetCoreLevelType(int i) const {return levels[i].style;}
int GetCoreLevelLineWidth(int i) const {return levels[i].line_width;}
double GetMaximum() const {return maximum;}
double GetMinimum() const {return minimum;}
double GetCoreLevelValue(int i) const {return levels[i].value;}
double GetCoreMinimum() {return minimum;}
double GetCoreMaximum() {return maximum;}
bool IsCoreSeparateWindow() {return window_type == WINDOW_SEPARATE;}
bool HasMaximum() const {return has_maximum;}
bool HasMinimum() const {return has_minimum;}
int GetMinutePeriod();
int GetPeriod() const;
int GetVisibleCount() const {return outputs[0].visible;}
int GetFutureBars() const {return future_bars;}
bool IsHeatmap() {return has_heatmap;}
inline ConstBuffer& GetInputBuffer(int input, int buffer) const {return CoreIO::GetInputBuffer(input, GetSymbol(), GetTimeframe(), buffer);}
inline ConstBuffer& GetInputBuffer(int input, int sym, int tf, int buffer) const {return CoreIO::GetInputBuffer(input, sym, tf, buffer);}
inline ConstLabelSignal& GetInputLabel(int input) const {return CoreIO::GetInputLabel(input, GetSymbol(), GetTimeframe());}
DataBridge* GetDataBridge();
bool IsJobsFinished() const;
bool IsDependencyJobsFinished() const;
Job& GetCurrentJob() {return *current_job;}
JobThread& GetCurrentThread() {return *current_thrd;}
// Set settings
void SetTimeframe(int i, int period);
void SetWindowType(int i) {window_type = i;}
void SetPoint(double d);
void SetCoreLevelCount(int count) {levels.SetCount(count);}
void SetCoreLevel(int i, double value) {levels[i].value = value;}
void SetCoreLevelType(int i, int style) {levels[i].style = style;}
void SetCoreLevelLineWidth(int i, int line_width) {levels[i].line_width = line_width;}
void SetCoreLevelsColor(Color clr) {levels_clr = clr;}
void SetCoreLevelsStyle(int style) {levels_style = style;}
void SetCoreMinimum(double value) {minimum = value; has_minimum = true;}
void SetCoreMaximum(double value) {maximum = value; has_maximum = true;}
void SetCoreChartWindow() {window_type = WINDOW_CHART;}
void SetCoreSeparateWindow() {window_type = WINDOW_SEPARATE;}
void ForceSetCounted(int i) {counted = i; next_count = i;}
void SetSkipSetCount(bool b=true) {skip_setcount = b;}
void SetBufferLabel(int i, const String& s) {}
void SetEndOffset(int i) {ASSERT(i > 0); end_offset = i;}
void SetSkipAllocate(bool b=true) {skip_allocate = b;}
void SetFutureBars(int i) {future_bars = i;}
Job& SetJob(int i, String job_title);
Job& GetJob(int i);
void SetJobFinished(bool b=true);
void SetJobCount(int i) {jobs.SetCount(i);}
void EnterJob(Job* job, JobThread* thrd) {current_job = job; current_thrd = thrd;}
void LeaveJob() {current_job = NULL; current_thrd = NULL;}
void SetHeatmap(bool b=true) {has_heatmap=b;}
void SetAvoidRefresh(bool b=true) {avoid_refresh = b;}
void Ready() {ready = true;}
// Visible main functions
void Refresh();
void RefreshSubCores();
void RefreshSources();
void RefreshSourcesOnlyDeep();
void ClearContent();
void RefreshIO() {IO(*this);}
void InitSubCores();
protected:
// Value data functions
double GetAppliedValue ( int applied_value, int i );
double Open(int shift);
double High(int shift);
double Low(int shift);
double Volume(int shift);
int HighestHigh(int period, int shift);
int LowestLow(int period, int shift);
int HighestOpen(int period, int shift);
int LowestOpen(int period, int shift);
};
struct ArgScript {
Vector<String> titles;
Vector<int> mins, maxs, steps;
Vector<int*> args;
void Add(String title, int min, int max, int step, int& arg) {titles.Add(title); mins.Add(min); maxs.Add(max); steps.Add(step); args.Add(&arg);}
};
class ScriptCore {
protected:
friend class System;
friend class ScriptAutomationCtrl;
// Persistent
// Temporary
Vector<ScriptCore*> input_cores;
Vector<int> args;
String qtf_test_result;
int factory = -1;
int actual = 0, total = 1;
ScriptCore& GetInputCore(int i) {return *input_cores[i];}
public:
ScriptCore();
virtual void Init() {};
virtual void Run() {};
virtual void Arg(ArgScript& arg) = 0;
virtual void SerializeEvent(Stream& s) {}
virtual String GetTitle() = 0;
virtual void GetSignal(int symbol, LabelSignal& signal) {Panic("Not implemented");}
void Load();
void Store();
void Serialize(Stream& s) {SerializeEvent(s);}
void SetInputCore(int i, ScriptCore& c) {if (i >= input_cores.GetCount()) input_cores.SetCount(i+1, NULL); input_cores[i] = &c;}
int GetActual() const {return actual;}
int GetTotal() const {return total;}
String GetTestResultQTF() const {return qtf_test_result;}
};
}
#endif
|
Factoriall/GuardianTalesGuildApp
|
app/src/main/java/org/techtown/gtguildraid/fragments/MemberResignedFragment.java
|
package org.techtown.gtguildraid.fragments;
import android.app.Dialog;
import android.graphics.Color;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.view.WindowManager;
import android.widget.Button;
import android.widget.EditText;
import android.widget.TextView;
import android.widget.Toast;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.fragment.app.Fragment;
import androidx.recyclerview.widget.LinearLayoutManager;
import androidx.recyclerview.widget.RecyclerView;
import org.techtown.gtguildraid.R;
import org.techtown.gtguildraid.adapters.MemberCardAdapter;
import org.techtown.gtguildraid.models.entities.GuildMember;
import org.techtown.gtguildraid.utils.MySwipeHelper;
import org.techtown.gtguildraid.utils.RoomDB;
import java.util.ArrayList;
import java.util.List;
public class MemberResignedFragment extends Fragment {
TextView resignCnt;
RecyclerView recyclerView;
List<GuildMember> memberList = new ArrayList<>();
LinearLayoutManager linearLayoutManager;
RoomDB database;
MemberCardAdapter adapter;
final int MAX_MEMBER = 29;
@Nullable
@Override
public View onCreateView(@NonNull LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) {
ViewGroup view = (ViewGroup) inflater.inflate(R.layout.fragment_member_resigned, container, false);
database = RoomDB.getInstance(getActivity());
linearLayoutManager = new LinearLayoutManager(getActivity());
recyclerView = view.findViewById(R.id.resignedRecyclerView);
memberList = database.memberDao().getResignedMembers();
recyclerView.setLayoutManager(linearLayoutManager);
adapter = new MemberCardAdapter(memberList);
recyclerView.setAdapter(adapter);
resignCnt = view.findViewById(R.id.resignCnt);
resignCnt.setText(getString(R.string.member_numbers_resign, memberList.size()));
//swipeHelper를 통해 swipe 구현
new MySwipeHelper(getActivity(), recyclerView, 200) {
//instaiateMyButton 구현을 통해 button 관련 정보를 삽입
@Override
public void instantiateMyButton(List<MySwipeHelper.MyButton> buffer) {
buffer.add(new MyButton(getActivity(),
"삭제",
50,
0,
Color.parseColor("#FF0000"),
pos -> deleteMember(pos)));
buffer.add(new MyButton(getActivity(),
"복귀",
50,
0,
Color.parseColor("#FFFF00"),
pos -> recoverMember(pos)));
buffer.add(new MyButton(getActivity(),
"수정",
50,
0,
Color.parseColor("#90ee90"),
pos -> updateMember(pos)));
}
};
return view;
}
private void updateMember(int pos) {
GuildMember m = memberList.get(pos);
final int sID = m.getID();
String sName = m.getName();
String sRemark = m.getRemark();
final Dialog dialog = new Dialog(getActivity());
dialog.setContentView(R.layout.dialog_member);
int width = WindowManager.LayoutParams.MATCH_PARENT;
int height = WindowManager.LayoutParams.WRAP_CONTENT;
dialog.getWindow().setLayout(width, height);
dialog.show();
final EditText name = dialog.findViewById(R.id.name);
final EditText remark = dialog.findViewById(R.id.remark);
Button updateButton = dialog.findViewById(R.id.updateButton);
name.setText(sName);
remark.setText(sRemark);
updateButton.setOnClickListener(view -> {
dialog.dismiss();
String uName = name.getText().toString().trim();
String uRemark = remark.getText().toString().trim();
database.memberDao().update(sID, uName, uRemark);
memberList.clear();
memberList.addAll(database.memberDao().getResignedMembers());
resignCnt.setText(getString(R.string.member_numbers_resign, memberList.size()));
adapter.notifyDataSetChanged();
});
}
private void recoverMember(int pos) {
int memberNum = database.memberDao().getCurrentMembersWithoutMe().size();
if(memberNum == MAX_MEMBER){
showToast();
return;
}
GuildMember m = memberList.get(pos);
database.memberDao().setIsResigned(m.getID(), false);
memberList.remove(pos);
resignCnt.setText(getString(R.string.member_numbers_resign, memberList.size()));
adapter.notifyItemRemoved(pos);
adapter.notifyItemRangeChanged(pos, memberList.size());
}
private void deleteMember(int pos) {
GuildMember m = memberList.get(pos);
database.memberDao().delete(m);
memberList.remove(pos);
resignCnt.setText(getString(R.string.member_numbers_resign, memberList.size()));
adapter.notifyItemRemoved(pos);
adapter.notifyItemRangeChanged(pos, memberList.size());
}
private void showToast(){
Toast.makeText(getActivity(), "인원이 가득찼습니다!", Toast.LENGTH_LONG).show();
}
}
|
eltociear/material-ui
|
packages/material-ui-icons/lib/FontDownloadOffRounded.js
|
"use strict";
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
var _interopRequireWildcard = require("@babel/runtime/helpers/interopRequireWildcard");
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = void 0;
var React = _interopRequireWildcard(require("react"));
var _createSvgIcon = _interopRequireDefault(require("./utils/createSvgIcon"));
var _jsxRuntime = require("react/jsx-runtime");
var _default = (0, _createSvgIcon.default)( /*#__PURE__*/(0, _jsxRuntime.jsx)("path", {
d: "m12.58 9.75-.87-.87.23-.66h.1l.54 1.53zm-2.23-2.23.2-.52c.23-.6.8-1 1.45-1s1.22.4 1.45 1l2.17 5.79L22 19.17V4c0-1.1-.9-2-2-2H4.83l5.52 5.52zm10.84 15.09c-.39.39-1.02.39-1.41 0l-.61-.61H4c-1.1 0-2-.9-2-2V4.83l-.61-.61a.9959.9959 0 0 1 0-1.41c.39-.39 1.02-.39 1.41 0l18.38 18.38c.4.39.4 1.03.01 1.42zm-9.09-7.68-3.3-3.3-1.9 5.07c-.23.63.23 1.3.9 1.3h.01c.41 0 .77-.26.9-.64l.86-2.43h2.53z"
}), 'FontDownloadOffRounded');
exports.default = _default;
|
avra-me/avra-me.github.io
|
plugins/generate-assets/generate-gif.js
|
<gh_stars>0
import PNG from "png-js";
import GIFEncoder from "gifencoder";
import fs from "fs";
import * as readline from "readline";
function decode(png) {
return new Promise((resolve, reject) => {
try {
png.decode((pixels) => resolve(pixels));
} catch (e) {
reject(e);
}
});
}
export const createEncoder = ({
filePath,
width,
height,
length,
fps,
quality = 10,
}) => {
const encoder = new GIFEncoder(width, height);
encoder.createWriteStream().pipe(fs.createWriteStream(filePath));
encoder.start();
// Repeat the length in seconds * the number of frames in a given second
encoder.setRepeat(length * fps);
// Take a frame every 1000/frames per second
encoder.setDelay(1000 / fps);
encoder.setQuality(quality);
return encoder;
};
export const generateGif = async ({
encoder,
page,
width,
height,
length,
fps,
}) => {
let timeoutId, intervalId;
const screenshotOptions = { clip: { width, height, x: 0, y: 0 } };
const inFlightPromises = [];
const state = {
processing: 0,
completed: 0,
};
const logCurrentState = () => {
readline.clearLine(process.stdout, 0);
readline.cursorTo(process.stdout, 0, 1);
process.stdout.write(
`Captured ${state.processing} | Completed ${
state.completed
} | Expected ${length * fps}\n`
);
};
const setInFlight = () => {
state.processing += 1;
logCurrentState();
};
const setDone = () => {
state.completed += 1;
logCurrentState();
};
const addScreenToGif = (reject) => async () => {
const promise = page
.screenshot(screenshotOptions)
.then((screen) => new PNG(screen))
.then((png) => decode(png))
.then((pixels) => encoder.addFrame(pixels))
.then(() => setDone())
.catch(reject);
inFlightPromises.push(promise);
setInFlight();
return promise;
};
const onComplete = (resolve) => async () => {
// Prevent new screen grabs being queued
clearInterval(intervalId);
clearInterval(timeoutId);
// Wait for in flight screenshots
await Promise.allSettled(inFlightPromises);
console.log("Image generation complete");
if (resolve) {
resolve();
}
};
return new Promise((resolve, reject) => {
timeoutId = setTimeout(onComplete(resolve), length * 1000);
intervalId = setInterval(addScreenToGif(reject), 1000 / fps);
}).finally(onComplete());
};
export default async ({ filePath, width, height, formatInfo, page }) => {
const encoder = createEncoder({ filePath, width, height, ...formatInfo });
await generateGif({ encoder, page, width, height, ...formatInfo });
encoder.finish();
};
|
aherbert/GDSC-Core
|
gdsc-core/src/test/java/uk/ac/sussex/gdsc/core/utils/rng/MixersTest.java
|
<filename>gdsc-core/src/test/java/uk/ac/sussex/gdsc/core/utils/rng/MixersTest.java
/*-
* #%L
* Genome Damage and Stability Centre ImageJ Core Package
*
* Contains code used by:
*
* GDSC ImageJ Plugins - Microscopy image analysis
*
* GDSC SMLM ImageJ Plugins - Single molecule localisation microscopy (SMLM)
* %%
* Copyright (C) 2011 - 2022 <NAME>
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/gpl-3.0.html>.
* #L%
*/
package uk.ac.sussex.gdsc.core.utils.rng;
import java.util.function.IntUnaryOperator;
import java.util.function.LongUnaryOperator;
import org.apache.commons.rng.UniformRandomProvider;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import uk.ac.sussex.gdsc.test.junit5.SeededTest;
import uk.ac.sussex.gdsc.test.rng.RngFactory;
import uk.ac.sussex.gdsc.test.utils.RandomSeed;
@SuppressWarnings("javadoc")
class MixersTest {
@SeededTest
void testXor(RandomSeed seed) {
final UniformRandomProvider rng = RngFactory.create(seed.get());
for (int i = 0; i < 500; i++) {
final int x = rng.nextInt();
final int y = rng.nextInt();
final int z = x ^ y;
Assertions.assertEquals(x, y ^ z);
Assertions.assertEquals(y, x ^ z);
}
}
@SeededTest
void testReverseXorRightShift(RandomSeed seed) {
final UniformRandomProvider rng = RngFactory.create(seed.get());
for (int shift = 1; shift < 64; shift++) {
final int fshift = shift;
for (int i = 0; i < 500; i++) {
final long x = rng.nextLong();
final long y = x ^ (x >>> shift);
final long x2 = Mixers.reverseXorRightShift(y, shift);
Assertions.assertEquals(x, x2, () -> "shift = " + fshift);
}
}
}
@SeededTest
void testReverseXorLeftShift(RandomSeed seed) {
final UniformRandomProvider rng = RngFactory.create(seed.get());
for (int shift = 1; shift < 64; shift++) {
final int fshift = shift;
for (int i = 0; i < 500; i++) {
final long x = rng.nextLong();
final long y = x ^ (x << shift);
final long x2 = Mixers.reverseXorLeftShift(y, shift);
Assertions.assertEquals(x, x2, () -> "shift = " + fshift);
}
}
}
@SeededTest
void testRxsmxs(RandomSeed seed) {
assertUnmixer(seed, Mixers::rxsmxs, Mixers::rxsmxsUnmix);
}
@SeededTest
void testRrmxmx(RandomSeed seed) {
assertUnmixer(seed, Mixers::rrmxmx, Mixers::rrmxmxUnmix);
}
private static void assertUnmixer(RandomSeed seed, LongUnaryOperator mix,
LongUnaryOperator unmix) {
for (final long x : new long[] {Long.MIN_VALUE, -1, 0, 1, Long.MAX_VALUE}) {
final long y = mix.applyAsLong(x);
final long x2 = unmix.applyAsLong(y);
Assertions.assertEquals(x, x2);
}
for (int i = 0; i < 64; i++) {
final long x = 1L << i;
final long y = mix.applyAsLong(x);
final long x2 = unmix.applyAsLong(y);
Assertions.assertEquals(x, x2);
}
final UniformRandomProvider rng = RngFactory.create(seed.get());
for (int i = 0; i < 500; i++) {
final long x = rng.nextLong();
final long y = mix.applyAsLong(x);
final long x2 = unmix.applyAsLong(y);
Assertions.assertEquals(x, x2);
}
}
@Test
void testRxsmxsOutput() {
// Code generated using the reference c code obtained from:
// https://mostlymangling.blogspot.com/2018/07/on-mixing-functions-in-fast-splittable.html
final long[] values =
{0x4909bf4228b09f5dL, 0x62f1175aa2ac2becL, 0x0fff75f3a0f6eaa1L, 0x440055bc9b89eaf0L,
0x8d3954796546094bL, 0x541dc47bccef0e39L, 0xbcf2cf7ed5e3db25L, 0x3902cbf791fbac96L,
0x144101ff31d0bccdL, 0x5da6aec2faa5adceL, 0xbfff757b69be2784L, 0x6b3e67846edd0fd5L,
0x68192de4e987bdc4L, 0xb3fe34cee77a79a8L, 0xdea80e3b85df836dL, 0xd6276bdcf3d6d342L,
0x0b6cba29f4d2ad13L, 0xb6bfe1d2d013aa12L, 0xf08e347e079ba78cL, 0x0150ac7471e862fcL,
0x16457cc24205be12L, 0x1b3c87d7fc416c26L, 0xa7d1a4e56bbf95a9L, 0x44d605b26c0bbdaaL,
0xfe6167e96c66a310L, 0x09ecd862a1ef544bL, 0x8d83e2956e0da35dL, 0x268734a1a4d94cfdL,
0x104b3ca0ce4b772bL, 0x488689436f671ba8L, 0x3ae5b9b467cc827aL, 0xac0e0ba50f83e589L,
0xfd8d4041fb3350c6L, 0x2aebfa9bf4afe825L, 0xf7f840be98599b68L, 0x2c65c37d25d3f0ccL,
0xbf49fcbcef6acb81L, 0x0552891572d190a4L, 0x8292c9877a7a73dbL, 0x6377a3a02614593eL,};
assertMixer(Mixers::rrmxmx, values, 0x012de1babb3c4104L, 0xc8161b4202294965L);
}
@Test
void testRrxmrrxmsx0Output() {
// Code generated using the reference c code obtained from:
// http://mostlymangling.blogspot.com/2019/01/better-stronger-mixer-and-test-procedure.html
final long[] values =
{0x9ccbd09c5dc10ca7L, 0x3c8947159899882bL, 0xd893601ad724cbe5L, 0x72f26c4df3673ee6L,
0xfaf48ced65d211cfL, 0x8a44f467531be62aL, 0x09e0c1313a1f5d0aL, 0xf6187ebbdc0757f5L,
0xcb46619de4f97746L, 0x22144f07f748af25L, 0x80b16f5e7b47ea31L, 0x32b8afbb80754fc5L,
0xa3b5c4410d509a03L, 0xd168c5f50cce6993L, 0x0f82c25b1fe2b83cL, 0xaa9939e780d46a94L,
0xb243a1b70ccfd29fL, 0x8339e40046415384L, 0x330356935c8bd63eL, 0x409d8934b393c9ffL,
0x79e4e7213ca3b57eL, 0xb41093551559a3bcL, 0x6617421a35827962L, 0x07e578716190d472L,
0x658d3b2eac566bdcL, 0xf1d12ab9abe02ac8L, 0xd342993d81c80796L, 0x7db078f0750ff480L,
0xb293aa0fe53fe1a3L, 0x6e48ce5dbf834da5L, 0x4c53364beb947791L, 0xc6dbd1f80ce8ff08L,
0x35d26dadbb5a7380L, 0xa672343acf8264c8L, 0xac015d5d9bef0924L, 0x39b36602bbd90446L,
0x759517ff4a1ef0fbL, 0x0ef3fdcdbf413a46L, 0x991d24c8b6c9f04fL, 0x7640f103903507ecL,};
assertMixer(Mixers::rrxmrrxmsx0, values, 0x012de1babb3c4104L, 0xc8161b4202294965L);
}
@Test
void testMurmur3IntOutput() {
// Code generated using the original c code:
// https://github.com/aappleby/smhasher/blob/master/src/MurmurHash3.cpp
final int[] values = {0x44d89774, 0xe7488b4e, 0x6c1465b4, 0xef03ae52, 0x5b2fd1f4, 0x184498c9,
0x9a667364, 0x087ae15c, 0xcf91862a, 0x4bd74e5e, 0x709fbc40, 0x57d623de, 0x32a51e70,
0x35a5495b, 0xc2648d76, 0xcca068c5, 0xcf3774e5, 0xedc88ea5, 0x37d08241, 0x7470b7b8,
0xab52458a, 0x2d25df71, 0xa3349e19, 0x083a64a6, 0xa23389d2, 0xf4d447c9, 0xc9514d1e,
0xe92325c8, 0x8d7a890b, 0x135d4731, 0x1a3b17b6, 0x958f6b3b, 0xc054db4b, 0xbe6ecbca,
0x4530fdb7, 0xcaa56899, 0xe1c959d6, 0xe67d6e9a, 0xb7f13547, 0xa5e9f292,};
assertMixer(Mixers::murmur3, values, 0x012de1ba, 0xc8161b42);
}
@Test
void testMurmur3LongOutput() {
// Code generated using the reference c code provided by <NAME>:
// http://zimbry.blogspot.com/2011/09/better-bit-mixing-improving-on.html
final long[] values =
{0xdb3e232c1003b576L, 0x4da53e5b7c84173eL, 0xe2c498d0956e2a67L, 0x3a1a865a2c08ea34L,
0xb297e624901fea79L, 0x241a17faf3ee04bdL, 0x32b88f7c345c3c8aL, 0xbed326c79cde3d54L,
0x1173b9a40603f93fL, 0xc690ca776314d65cL, 0xe3753c2a2602094eL, 0x40d36f700f646e45L,
0xf03851d528a213e2L, 0x53acf12d51b641b4L, 0x91f382b63e0faee1L, 0x659dda2c787f7c9aL,
0xeddedb24f7d1f43eL, 0xe8a2dc137d7a84aeL, 0xc8cfa41cc2af0a4fL, 0xd2337b326e5c306eL,
0x88f0cf730266cf9bL, 0xdce208c02c2aaa04L, 0xbf2e5961057a35f8L, 0x5e136f9d03f7ccb0L,
0x6b64306b15c5fc39L, 0x04c6252c23bf5144L, 0x854ab124a4663d80L, 0xdb3746bcfc35ebceL,
0xb1f3fcc933315abbL, 0xc2cb15b0127ff726L, 0x6a5ebc92086ac3f2L, 0x9dfe71c717ac71aaL,
0x4d7856a84b1d4ee4L, 0x9fc002527767f050L, 0xc784f29e480b2aa2L, 0x1400dc5cbb74621eL,
0x63afda06ad81baeeL, 0x93ac5025caf335e4L, 0x988cbf34e715eec9L, 0x14318afff8984560L,};
assertMixer(Mixers::murmur3, values, 0x012de1babb3c4104L, 0xc8161b4202294965L);
}
@Test
void testStafford1Output() {
// Code generated using the reference c code provided by <NAME>:
// http://zimbry.blogspot.com/2011/09/better-bit-mixing-improving-on.html
final long[] values =
{0xe00854e38d780911L, 0xbb0ac46041e42918L, 0x10e69689ca4a9a11L, 0x17c6893b3cd5037fL,
0x52c12c13aea13d04L, 0x5c7eda3b1354483cL, 0x3da00e6700b4bd8dL, 0x989c64f349b85574L,
0xe27908f80f5754dbL, 0xd89e3d09f9bdfc73L, 0xec0859bf6f61c638L, 0x0a248688fedb698eL,
0x898dc39bd789089aL, 0x53f1dda084300687L, 0xccf410496922c3e9L, 0xc6bb4639b2759264L,
0x058cb1a89134d055L, 0xd85d9b39de7669beL, 0xbe4804c7ada87e09L, 0xc05408fb8d0a4b8cL,
0x4864108702020a36L, 0x6bc93bbbf6f1ef7dL, 0x74a538e6d9a238f7L, 0xb9204a971ef323fbL,
0xd1e4d8e08809efe8L, 0x669dc2e2c05fc928L, 0xdc5d71277d80e453L, 0x301282108a494599L,
0x11bb69f547d202e3L, 0xe4f5e136de914632L, 0xae25ab4c28031282L, 0xc1bbfc67c9ed36c1L,
0x37ef297a41970b69L, 0x2cf273e9261aa2e2L, 0xe50ec4c9e9c46763L, 0x3dae4f49c7b9803aL,
0x527d10e48f1804e2L, 0x4ddd2787e6088b0bL, 0x3c80ef127e4d4e8eL, 0xa5c012fe3273f5b5L,};
assertMixer(Mixers::stafford1, values, 0x012de1babb3c4104L, 0xc8161b4202294965L);
}
@Test
void testStafford13Output() {
// Code generated using the reference c code provided by <NAME>:
// http://zimbry.blogspot.com/2011/09/better-bit-mixing-improving-on.html
final long[] values =
{0xb6e5bb4394c07618L, 0xc22a2f0fedefdbc5L, 0xc8c692032976fb0dL, 0x5eeafdee96357649L,
0xac07551c38d612edL, 0x4c02325af6a2d1c6L, 0x419a0e82ffcb9bacL, 0x0429b1de8b800604L,
0x087870444a51ef1dL, 0x909c0bbbb26527d6L, 0x6127dc6b5184f343L, 0x8e1a3ce1a932ff2cL,
0x6111ce001b3d0c3cL, 0xcd6de6af46e8bb99L, 0xc9456a240b1475a7L, 0x4d3d1bfc44594264L,
0xe3030ad6a2ede854L, 0xfccb1eea6d681fa9L, 0x9ac3b08ef7a65e65L, 0x14548ff7c51b277dL,
0x80638702bc9817abL, 0xaf44c7426c48f1a5L, 0xe588effe5e90d5bdL, 0xf9debeb061581d31L,
0xe5783c539c916096L, 0xe70372dfeabf1da8L, 0x049ccefce30b5e55L, 0xb2fdd15229d06a64L,
0xe15967acded0e841L, 0x3ab056912a9c7f44L, 0x800f750d6f92ac6bL, 0x828beb2c6b65fa10L,
0x2c7ebbc82cdee193L, 0x5f5bd29b1fb5b4e2L, 0xba69ecf78275b12eL, 0x82aea2bebbd0caa5L,
0xf5ebd97835f7d5abL, 0x3513e0fc1af2b448L, 0x151b8e21cbf81789L, 0xdb17e9cfacf6b51eL,};
assertMixer(Mixers::stafford13, values, 0x012de1babb3c4104L, 0xc8161b4202294965L);
}
@Test
void testLea64Output() {
// Code generated using the reference java code provided by Steele and Vigna:
// https://doi.org/10.1145/3485525
final long[] values =
{0x45b8512f9ff46f10L, 0xd6ce3db0dd63efc3L, 0x47bf6058710f2a88L, 0x85b8c74e40981596L,
0xd77442e45944235eL, 0x3ea4255636bfb1c3L, 0x296ec3c9d3e0addcL, 0x6c285eb9694f6eb2L,
0x8121aeca2ba15b66L, 0x2b6d5c2848c4fdc4L, 0xcc99bc57f5e3e024L, 0xc00f59a3ad3666cbL,
0x74e5285467c20ae7L, 0xf4d51701e3ea9555L, 0x3aeb92e31a9b1a0eL, 0x5a1a0ce875c7dcaL,
0xb9a561fb7d82d0f3L, 0x97095f0ab633bf2fL, 0xfe74b5290c07c1d1L, 0x9dfd354727d45838L,
0xf6279a8801201eddL, 0x2db471b1d42860eeL, 0x4ee66ceb27bd34ecL, 0x2005875ad25bd11aL,
0x92eac4d1446a0204L, 0xa46087d5dd5fa38eL, 0x7967530c43faabe1L, 0xc53e1dd74fd9bd15L,
0x259001ab97cca8bcL, 0x5edf024ee6cb1d8bL, 0x3fc021bba7d0d7e6L, 0xf82cae56e00245dbL,
0xf1dc30974b524d02L, 0xe1f2f1db0af7ace9L, 0x853d5892ebccb9f6L, 0xe266f36a3121da55L,
0x3b034a81bad01622L, 0x852b53c14569ada2L, 0xee902ddc658c86c9L, 0xd9e926b766013254L,};
assertMixer(Mixers::lea64, values, 0x012de1babb3c4104L, 0xc8161b4202294965L);
}
private static void assertMixer(IntUnaryOperator mix, int[] expected, int state, int increment) {
for (int i = 0; i < expected.length; i++) {
Assertions.assertEquals(expected[i], mix.applyAsInt(state += increment));
}
}
private static void assertMixer(LongUnaryOperator mix, long[] expected, long state,
long increment) {
for (int i = 0; i < expected.length; i++) {
Assertions.assertEquals(expected[i], mix.applyAsLong(state += increment));
}
}
/**
* This is an example test to demonstrate that the MurmurHash3-style mix functions are bijections
* that can be reversed.
*/
@Test
void canUnmixStafford13() {
final long u1 = NumberUtils.computeInverse(0xbf58476d1ce4e5b9L);
final long u2 = NumberUtils.computeInverse(0x94d049bb133111ebL);
System.out.println(u1);
System.out.println(u2);
final UniformRandomProvider rng = RngFactory.createWithFixedSeed();
for (int i = 0; i < 200; i++) {
final long x = rng.nextLong();
final long y = Mixers.stafford13(x);
long z = Mixers.reverseXorRightShift(y, 31);
z *= u2;
z = Mixers.reverseXorRightShift(z, 27);
z *= u1;
z = Mixers.reverseXorRightShift(z, 30);
Assertions.assertEquals(x, z);
Assertions.assertEquals(x, unmixStafford13(y));
}
}
private static long unmixStafford13(long x) {
// Hard-coded inverse constants
final long u1 = -7575587736534282103L;
final long u2 = 3573116690164977347L;
x = x ^ (x >>> 31);
x ^= (x >>> 62);
x *= u2;
x = x ^ (x >>> 27);
x ^= (x >>> 54);
x *= u1;
x = x ^ (x >>> 30);
x ^= (x >>> 60);
return x;
}
}
|
sergeishay/Screenters
|
source-pack/pro/SideNav/SideNavNew/SideNavNav/index.js
|
export { default } from './SideNavNav';
export * from './SideNavNav';
|
barnyard/pi
|
p2p-networkmanager/src/main/java/com/bt/pi/app/networkmanager/dhcp/DhcpRefreshFailedException.java
|
<gh_stars>1-10
/* (c) British Telecommunications plc, 2009, All Rights Reserved */
package com.bt.pi.app.networkmanager.dhcp;
public class DhcpRefreshFailedException extends RuntimeException {
private static final long serialVersionUID = 1L;
public DhcpRefreshFailedException(String message, Throwable t) {
super(message, t);
}
}
|
lingjf/h2un
|
test/test_matcher_matcher.cpp
|
<filename>test/test_matcher_matcher.cpp
#include "../source/h2_unit.cpp"
#include "test_cplusplus.hpp"
#include "test_types.hpp"
SUITE(Matcher)
{
Case(Eq number)
{
// 65 is A
OK(nullptr == h2::h2_matcher<int>(Eq(65)).matches(65));
OK(nullptr == h2::h2_matcher<double>(Eq(65)).matches(65.0));
OK(nullptr == h2::h2_matcher<float>(65.000000001).matches(65));
OK(nullptr == h2::h2_matcher<double>(65.000000001).matches(65));
OK(nullptr == h2::h2_matcher<char>(Eq(65)).matches('A'));
OK(nullptr == h2::h2_matcher<unsigned long long>(Eq(65)).matches(65ULL));
OK(nullptr != h2::h2_matcher<int>(Eq(65)).matches(66));
}
Case(Eq bool)
{
OK(nullptr == h2::h2_matcher<bool>(Eq(true)).matches(true));
OK(nullptr == h2::h2_matcher<bool>(Eq(true)).matches(2));
OK(nullptr != h2::h2_matcher<bool>(Eq(true)).matches(false));
}
Case(Eq string)
{
char a[1024];
sprintf(a, "A");
OK(nullptr == h2::h2_matcher<std::string>(Eq("A")).matches("A"));
OK(nullptr == h2::h2_matcher<const char*>(Eq("A")).matches(a));
OK(nullptr != h2::h2_matcher<const char*>(Eq("A")).matches("B"));
OK(nullptr == h2::h2_matcher<const char*>("abcd").matches("abcd"));
std::string abcd = "abcd";
OK(nullptr == h2::h2_matcher<std::string>(abcd).matches("abcd"));
}
Case(CaseLess)
{
OK(nullptr == h2::h2_matcher<const char*>(CaseLess("AbCd")).matches("ABcd"));
std::string AbCd = "AbCd";
OK(nullptr == h2::h2_matcher<const char*>(CaseLess(AbCd)).matches("ABcd"));
OK(nullptr == h2::h2_matcher<std::string>(CaseLess("ABcd")).matches(AbCd));
}
Case(Regex [re])
{
OK(nullptr == h2::h2_matcher<const char*>(Re("abc.*")).matches("abcdef"));
OK(nullptr != h2::h2_matcher<const char*>(Re("A.*")).matches("abcdef"));
}
Case(Wildcard)
{
OK(nullptr == h2::h2_matcher<const char*>(We("abc*")).matches("abcdef"));
OK(nullptr != h2::h2_matcher<const char*>(We("abc?yz")).matches("abcdef"));
}
Case(Substr)
{
OK(nullptr == h2::h2_matcher<const char*>(Substr("cd")).matches("abcdef"));
OK(nullptr != h2::h2_matcher<const char*>(Substr("cc")).matches("abcdef"));
std::string cd = "cd";
OK(nullptr == h2::h2_matcher<const char*>(Substr(cd)).matches("abcdef"));
OK(nullptr == h2::h2_matcher<const char*>(CaseLess(Substr("cd"))).matches("ABCDEF"));
}
Case(StartsWith)
{
OK(nullptr == h2::h2_matcher<const char*>(StartsWith("abc")).matches("abcdef"));
std::string abc = "abc";
OK(nullptr == h2::h2_matcher<const char*>(StartsWith(abc)).matches("abcdef"));
OK(nullptr == h2::h2_matcher<const char*>(CaseLess(StartsWith("abc"))).matches("ABCDEF"));
}
Case(EndsWith)
{
OK(nullptr == h2::h2_matcher<const char*>(EndsWith("def")).matches("abcdef"));
OK(nullptr == h2::h2_matcher<const char*>(CaseLess(EndsWith("def"))).matches("ABCDEF"));
}
Case(Me)
{
unsigned char t1[] = {1, 2, 3, 4, 5, 6, 7, 8};
unsigned char t2[] = {1, 2, 3, 4, 5, 6, 7, 8};
OK(nullptr == h2::h2_matcher<unsigned char*>(Me(t1, 8)).matches(t2));
}
Case(Pointee)
{
// 65 is A
int a65 = 65;
int a66 = 66;
OK(nullptr == h2::h2_matcher<int*>(Pointee(65)).matches(&a65));
OK(nullptr == h2::h2_matcher<int*>(Pointee(Gt(65))).matches(&a66));
OK(nullptr != h2::h2_matcher<int*>(Pointee(65)).matches(&a66));
}
Case(Not)
{
// 65 is A
OK(nullptr == h2::h2_matcher<int>(Not(65)).matches(11));
OK(nullptr == h2::h2_matcher<int>(Not(Gt(65))).matches(11));
#if !defined __clang__
OK(nullptr == h2::h2_matcher<const char*>(Not(Lt("A"))).matches("B"));
#endif
OK(nullptr != h2::h2_matcher<int>(Not(65)).matches(65));
}
Case(AllOf)
{
// (65, 75)
OK(nullptr == h2::h2_matcher<int>(AllOf(Gt(65), Lt(75))).matches(66));
OK(nullptr != h2::h2_matcher<int>(AllOf(Gt(65), Lt(75))).matches(11));
}
Case(AnyOf)
{
// (65, 75)
OK(nullptr == h2::h2_matcher<int>(AnyOf(Gt(65), Lt(75))).matches(66));
OK(nullptr == h2::h2_matcher<int>(AnyOf(Gt(65), Lt(55))).matches(66));
OK(nullptr != h2::h2_matcher<int>(AnyOf(Gt(65), Lt(55))).matches(60));
}
Case(NoneOf)
{
// <65, 75<
OK(nullptr == h2::h2_matcher<int>(NoneOf(Lt(65), Gt(75))).matches(70));
OK(nullptr != h2::h2_matcher<int>(NoneOf(Lt(65), Gt(75))).matches(60));
}
Case(ListOf)
{
int a[] = {1, 3, 5};
OK(nullptr == h2::h2_matcher<int*>(ListOf(1, 3, 5)).matches(a));
std::vector<int> b = {1, 3, 5};
OK(nullptr == h2::h2_matcher<std::vector<int>>(ListOf(1, 3, 5)).matches(b));
}
}
CASE(Any matcher{})
{
MOCK(foobar2, int(int, const char*)).Once({}, {}).Return(11);
OK(11, foobar2(1, "A"));
}
|
kkrull/gohttp
|
capability/doc.go
|
// Handles requests about generic capabilities of the server
// Any requests specific to a resource are handled by that resource's Route.
package capability
|
Ezeer/VegaStrike_win32FR
|
vegastrike/boost/1_28/boost/graph/smallest_last_ordering.hpp
|
//=======================================================================
// Copyright 1997, 1998, 1999, 2000 University of Notre Dame.
// Authors: <NAME>, <NAME>, <NAME>
//
// This file is part of the Boost Graph Library
//
// You should have received a copy of the License Agreement for the
// Boost Graph Library along with the software; see the file LICENSE.
// If not, contact Office of Research, University of Notre Dame, Notre
// Dame, IN 46556.
//
// Permission to modify the code and to distribute modified code is
// granted, provided the text of this NOTICE is retained, a notice that
// the code was modified is included with the above COPYRIGHT NOTICE and
// with the COPYRIGHT NOTICE in the LICENSE file, and that the LICENSE
// file is distributed with the modified code.
//
// LICENSOR MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR IMPLIED.
// By way of example, but not limitation, Licensor MAKES NO
// REPRESENTATIONS OR WARRANTIES OF MERCHANTABILITY OR FITNESS FOR ANY
// PARTICULAR PURPOSE OR THAT THE USE OF THE LICENSED SOFTWARE COMPONENTS
// OR DOCUMENTATION WILL NOT INFRINGE ANY PATENTS, COPYRIGHTS, TRADEMARKS
// OR OTHER RIGHTS.
//=======================================================================
#ifndef BOOST_SMALLEST_LAST_VERTEX_ORDERING_HPP
#define BOOST_SMALLEST_LAST_VERTEX_ORDERING_HPP
/*
The smallest-last ordering is defined for the loopless graph G with
vertices a(j), j = 1,2,...,n where a(j) is the j-th column of A and
with edge (a(i),a(j)) if and only if columns i and j have a
non-zero in the same row position. The smallest-last ordering is
determined recursively by letting list(k), k = n,...,1 be a column
with least degree in the subgraph spanned by the un-ordered
columns.
*/
#include <vector>
#include <algorithm>
#include <boost/graph/graph_traits.hpp>
#include <boost/pending/bucket_sorter.hpp>
namespace boost {
template <class VertexListGraph, class Order, class Degree, class Marker>
void
smallest_last_vertex_ordering(const VertexListGraph& G, Order order,
Degree degree, Marker marker) {
typedef typename graph_traits<VertexListGraph> GraphTraits;
typedef typename GraphTraits::vertex_descriptor Vertex;
//typedef typename GraphTraits::size_type size_type;
typedef std::size_t size_type;
const size_type num = num_vertices(G);
typedef typename vertex_property_map<VertexListGraph, vertex_index_t>::type ID;
typedef bucket_sorter<size_type, Vertex, Degree, ID> BucketSorter;
BucketSorter degree_bucket_sorter(num, num, degree,
get_vertex_property(G, vertex_index));
smallest_last_vertex_ordering(G, order, degree, marker, degree_bucket_sorter);
}
template <class VertexListGraph, class Order, class Degree,
class Marker, class BucketSorter>
void
smallest_last_vertex_ordering(const VertexListGraph& G, Order order,
Degree degree, Marker marker,
BucketSorter& degree_buckets) {
typedef typename graph_traits<VertexListGraph> GraphTraits;
typedef typename GraphTraits::vertex_descriptor Vertex;
//typedef typename GraphTraits::size_type size_type;
typedef std::size_t size_type;
const size_type num = num_vertices(G);
typename GraphTraits::vertex_iterator v, vend;
for (boost::tie(v, vend) = vertices(G); v != vend; ++v) {
put(marker, *v, num);
put(degree, *v, out_degree(*v, G));
degree_buckets.push(*v);
}
size_type minimum_degree = 1;
size_type current_order = num - 1;
while ( 1 ) {
typedef typename BucketSorter::stack MDStack;
MDStack minimum_degree_stack = degree_buckets[minimum_degree];
while (minimum_degree_stack.empty())
minimum_degree_stack = degree_buckets[++minimum_degree];
Vertex node = minimum_degree_stack.top();
put(order, current_order, node);
if ( current_order == 0 ) //find all vertices
break;
minimum_degree_stack.pop();
put(marker, node, 0); //node has been ordered.
typename GraphTraits::adjacency_iterator v, vend;
for (boost::tie(v,vend) = adjacent_vertices(node, G); v != vend; ++v)
if ( get(marker,*v) > current_order ) { //*v is unordered vertex
put(marker, *v, current_order); //mark the columns adjacent to node
//It is possible minimum degree goes down
//Here we keep tracking it.
put(degree, *v, get(degree, *v) - 1);
minimum_degree = std::min(minimum_degree, get(degree, *v));
//update the position of *v in the bucket sorter
degree_buckets.update(*v);
}
current_order--;
}
//at this point, order[i] = v_i;
}
}
#endif
|
jdavidberger/libsurvive
|
src/survive_kalman_lighthouses.h
|
#pragma once
#include "survive_kalman.h"
#include "survive_kalman_tracker.h"
typedef struct SurviveKalmanLighthouse {
SurvivePose state;
survive_kalman_state_t model;
SurviveContext *ctx;
int lh;
FLT process_weight_pos;
FLT process_weight_rotation;
FLT light_variance;
FLT light_stationary_mintime;
FLT light_stationary_maxtime;
} SurviveKalmanLighthouse;
SURVIVE_EXPORT void survive_kalman_lighthouse_integrate_light(SurviveKalmanLighthouse *tracker, SurviveObject *so,
PoserDataLight *data);
SURVIVE_EXPORT void survive_kalman_lighthouse_init(SurviveKalmanLighthouse *tracker, SurviveContext *ctx, int lh);
SURVIVE_EXPORT void survive_kalman_lighthouse_free(SurviveKalmanLighthouse *tracker);
SURVIVE_EXPORT void survive_kalman_lighthouse_integrate_observation(SurviveKalmanLighthouse *tracker,
const SurvivePose *pose, const FLT *variance);
|
arushmangal/Hack-CP-DSA
|
Codechef/Cutting Recipes/solution.cpp
|
#include <iostream>
using namespace std;
int main() {
int T,N,a[50],i,j,f,HCF,small=2000;
cin>>T; //input no of testcases
while(T--)
{
small=2000;
//input number of ingredients
cin>>N;
//inputing N number of ingredient quantity
for(i=0;i<N;i++)
{
cin>>a[i];
if(a[i]<small)
small=a[i];
}
//an if stmt to find the smallest number
//starting with the smallest number, we try to find the HCF
for(j=small;j>0;j--)
{
f=0;
for(i=0;i<N;i++)
if(a[i]%j!=0)
{
f=1;
break;
}
if(f==0)
{
HCF=j;
break;
}
}
//then we divide everything by the HCF found, which is the answer array
for(i=0;i<N;i++)
{
a[i]/=HCF;
cout<<a[i]<<" ";
}
cout<<endl;
}
return 0;
}
|
NanmiaoWu/phylanx
|
src/execution_tree/primitives/node_data_helpers2d.cpp
|
// Copyright (c) 2017-2018 <NAME>
//
// Distributed under the Boost Software License, Version 1.0. (See accompanying
// file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
#include <phylanx/config.hpp>
#include <phylanx/execution_tree/compiler/primitive_name.hpp>
#include <phylanx/execution_tree/primitives/base_primitive.hpp>
#include <phylanx/execution_tree/primitives/node_data_helpers.hpp>
#include <phylanx/execution_tree/primitives/primitive_argument_type.hpp>
#include <phylanx/ir/node_data.hpp>
#include <hpx/errors/throw_exception.hpp>
#include <algorithm>
#include <array>
#include <cstddef>
#include <cstdint>
#include <string>
#include <utility>
#include <vector>
namespace phylanx { namespace execution_tree
{
///////////////////////////////////////////////////////////////////////////
// return argument as a matrix, scalars and vectors are properly broadcast
template <typename T>
PHYLANX_EXPORT void extract_value_matrix(
typename ir::node_data<T>::storage2d_type& result,
ir::node_data<T>&& rhs, std::size_t rows, std::size_t columns,
std::string const& name, std::string const& codename)
{
using storage2d_type = typename ir::node_data<T>::storage2d_type;
switch (rhs.num_dimensions())
{
case 0:
{
result.resize(rows, columns);
result = rhs.scalar();
}
return;
case 1:
{
// vectors of size one can be broadcast into any matrix
if (rhs.size() == 1)
{
result.resize(rows, columns);
result = rhs[0];
return;
}
if (columns != rhs.size())
{
HPX_THROW_EXCEPTION(hpx::bad_parameter,
"phylanx::execution_tree::extract_value_matrix",
util::generate_error_message(
"cannot broadcast a vector into a matrix with a "
"different number of columns",
name, codename));
}
result.resize(rows, columns);
for (std::size_t i = 0; i != rows; ++i)
{
blaze::row(result, i) = blaze::trans(rhs.vector());
}
return;
}
case 2:
{
// matrices of size one can be broadcast into any other matrix
if (rhs.size() == 1)
{
result.resize(rows, columns);
result = rhs[0];
return;
}
// matrices with one column can be broadcast into any other
// matrix with the same number of rows
if (rhs.dimension(0) == 1 && columns == rhs.dimension(1))
{
result.resize(rows, columns);
auto m = rhs.matrix();
auto row = blaze::row(m, 0);
for (std::size_t i = 0; i != rows; ++i)
{
blaze::row(result, i) = row;
}
return;
}
// matrices with one row can be broadcast into any other
// matrix with the same number of columns
if (rhs.dimension(1) == 1 && rows == rhs.dimension(0))
{
result.resize(rows, columns);
auto m = rhs.matrix();
auto column = blaze::column(m, 0);
for (std::size_t i = 0; i != columns; ++i)
{
blaze::column(result, i) = column;
}
return;
}
if (rows != rhs.dimension(0) || columns != rhs.dimension(1))
{
HPX_THROW_EXCEPTION(hpx::bad_parameter,
"phylanx::execution_tree::extract_value_matrix",
util::generate_error_message(
"cannot broadcast a matrix into a differently "
"sized matrix",
name, codename));
}
if (rhs.is_ref())
{
result = rhs.matrix();
}
else
{
result = std::move(rhs.matrix_non_ref());
}
return;
}
case 3:
{
// tensors of size one can be broadcast into any other matrix
if (rhs.size() == 1)
{
result.resize(rows, columns);
result = rhs.at(0, 0, 0);
return;
}
// tensors with one column can be broadcast into any other
// matrix with the same number of rows
if (rhs.dimension(0) == 1 && rhs.dimension(1) == rows &&
rhs.dimension(2) == 1)
{
result.resize(rows, columns);
auto t = rhs.tensor();
auto column = blaze::column(blaze::pageslice(t, 0), 0);
for (std::size_t j = 0; j != columns; ++j)
{
blaze::column(result, j) = column;
}
return;
}
// tensors with one row can be broadcast into any other
// matrix with the same number of columns
if (rhs.dimension(0) == 1 && rhs.dimension(1) == 1 &&
rhs.dimension(2) == columns)
{
result.resize(rows, columns);
auto t = rhs.tensor();
auto row = blaze::row(blaze::pageslice(t, 0), 0);
for (std::size_t i = 0; i != rows; ++i)
{
blaze::row(result, i) = row;
}
return;
}
// pageslice matches
if (rhs.dimension(0) == 1 && rhs.dimension(1) == rows &&
rhs.dimension(2) == columns)
{
result = blaze::pageslice(rhs.tensor(), 0);
return;
}
HPX_THROW_EXCEPTION(hpx::bad_parameter,
"phylanx::execution_tree::extract_value_matrix",
util::generate_error_message(
"cannot broadcast a tensor into a differently "
"sized matrix",
name, codename));
}
default:
break;
}
HPX_THROW_EXCEPTION(hpx::bad_parameter,
"phylanx::execution_tree::extract_value_matrix",
util::generate_error_message(
"primitive_argument_type does not hold a numeric "
"value type",
name, codename));
}
template <typename T>
PHYLANX_EXPORT ir::node_data<T> extract_value_matrix(ir::node_data<T>&& arg,
std::size_t rows, std::size_t columns, std::string const& name,
std::string const& codename)
{
typename ir::node_data<T>::storage2d_type result;
extract_value_matrix(
result, std::move(arg), rows, columns, name, codename);
return ir::node_data<T>{std::move(result)};
}
template <typename T>
PHYLANX_EXPORT ir::node_data<T> extract_value_matrix(
primitive_argument_type const& val, std::size_t rows,
std::size_t columns, std::string const& name,
std::string const& codename)
{
return extract_value_matrix(
extract_node_data<T>(val, name, codename),
rows, columns, name, codename);
}
template <typename T>
PHYLANX_EXPORT ir::node_data<T> extract_value_matrix(
primitive_argument_type&& val, std::size_t rows, std::size_t columns,
std::string const& name, std::string const& codename)
{
return extract_value_matrix(
extract_node_data<T>(std::move(val), name, codename),
rows, columns, name, codename);
}
// explicitly instantiate necessary functions
template PHYLANX_EXPORT ir::node_data<double> extract_value_matrix<double>(
primitive_argument_type const& val, std::size_t rows,
std::size_t columns, std::string const& name,
std::string const& codename);
template PHYLANX_EXPORT ir::node_data<std::int64_t>
extract_value_matrix<std::int64_t>(primitive_argument_type const& val,
std::size_t rows, std::size_t columns, std::string const& name,
std::string const& codename);
template PHYLANX_EXPORT ir::node_data<std::uint8_t>
extract_value_matrix<std::uint8_t>(primitive_argument_type const& val,
std::size_t rows, std::size_t columns, std::string const& name,
std::string const& codename);
template PHYLANX_EXPORT ir::node_data<double> extract_value_matrix<double>(
primitive_argument_type&& val, std::size_t rows, std::size_t columns,
std::string const& name, std::string const& codename);
template PHYLANX_EXPORT ir::node_data<std::int64_t>
extract_value_matrix<std::int64_t>(primitive_argument_type&& val,
std::size_t rows, std::size_t columns, std::string const& name,
std::string const& codename);
template PHYLANX_EXPORT ir::node_data<std::uint8_t>
extract_value_matrix<std::uint8_t>(primitive_argument_type&& val,
std::size_t rows, std::size_t columns, std::string const& name,
std::string const& codename);
}}
|
ollystephens/benthos
|
lib/metrics/statsd_legacy.go
|
package metrics
import (
"fmt"
"time"
"github.com/Jeffail/benthos/v3/lib/log"
"github.com/quipo/statsd"
)
//------------------------------------------------------------------------------
type wrappedLogger struct {
m log.Modular
}
func (w *wrappedLogger) Println(v ...interface{}) {
w.m.Warnf(fmt.Sprintln(v...))
}
//------------------------------------------------------------------------------
// StatsdLegacyStat is a representation of a single metric stat. Interactions with
// this stat are thread safe.
type StatsdLegacyStat struct {
path string
s statsd.Statsd
}
// Incr increments a metric by an amount.
func (s *StatsdLegacyStat) Incr(count int64) error {
s.s.Incr(s.path, count)
return nil
}
// Decr decrements a metric by an amount.
func (s *StatsdLegacyStat) Decr(count int64) error {
s.s.Decr(s.path, count)
return nil
}
// Timing sets a timing metric.
func (s *StatsdLegacyStat) Timing(delta int64) error {
s.s.Timing(s.path, delta)
return nil
}
// Set sets a gauge metric.
func (s *StatsdLegacyStat) Set(value int64) error {
s.s.Gauge(s.path, value)
return nil
}
//------------------------------------------------------------------------------
// StatsdLegacy is a stats object with capability to hold internal stats as a JSON
// endpoint.
type StatsdLegacy struct {
config Config
s statsd.Statsd
log log.Modular
pathMapping *pathMapping
}
// NewStatsdLegacy creates and returns a new StatsdLegacy object.
func NewStatsdLegacy(config Config, opts ...func(Type)) (Type, error) {
flushPeriod, err := time.ParseDuration(config.Statsd.FlushPeriod)
if err != nil {
return nil, fmt.Errorf("failed to parse flush period: %s", err)
}
s := &StatsdLegacy{
config: config,
log: log.Noop(),
}
for _, opt := range opts {
opt(s)
}
if s.pathMapping, err = newPathMapping(config.Statsd.PathMapping, s.log); err != nil {
return nil, fmt.Errorf("failed to init path mapping: %v", err)
}
if config.Statsd.Network == "tcp" {
s.log.Warnf(
"Network set to 'tcp', falling back to legacy statsd client. The " +
"network field is due to be removed in the next major release, " +
" if you are relying on this field please raise an issue at: " +
"https://github.com/Jeffail/benthos/issues\n",
)
} else {
s.log.Warnf(
"Falling back to legacy statsd client. To use the new client set " +
"the 'tag_format' field to 'none', 'datadog' or 'influxdb'. The " +
"network field is due to be removed in the next major release, " +
"if you are relying on this field please raise an issue at: " +
"https://github.com/Jeffail/benthos/issues\n",
)
}
if config.Statsd.TagFormat != TagFormatNone && config.Statsd.TagFormat != TagFormatLegacy {
return nil, fmt.Errorf("tag format '%v' is not supported when using 'tcp' network traffic", config.Statsd.TagFormat)
}
prefix := config.Statsd.Prefix
if len(prefix) > 0 && prefix[len(prefix)-1] != '.' {
prefix = prefix + "."
}
statsdclient := statsd.NewStatsdBuffer(
flushPeriod,
statsd.NewStatsdClient(config.Statsd.Address, prefix),
)
statsdclient.Logger = &wrappedLogger{m: s.log}
if config.Statsd.Network == "udp" {
if err := statsdclient.CreateSocket(); err != nil {
return nil, err
}
} else {
if err := statsdclient.CreateTCPSocket(); err != nil {
return nil, err
}
}
s.s = statsdclient
return s, nil
}
//------------------------------------------------------------------------------
// GetCounter returns a stat counter object for a path.
func (h *StatsdLegacy) GetCounter(path string) StatCounter {
if path = h.pathMapping.mapPathNoTags(path); len(path) == 0 {
return DudStat{}
}
return &StatsdLegacyStat{
path: path,
s: h.s,
}
}
// GetCounterVec returns a stat counter object for a path with the labels
// discarded.
func (h *StatsdLegacy) GetCounterVec(path string, n []string) StatCounterVec {
path = h.pathMapping.mapPathNoTags(path)
return fakeCounterVec(func([]string) StatCounter {
if len(path) == 0 {
return DudStat{}
}
return &StatsdLegacyStat{
path: path,
s: h.s,
}
})
}
// GetTimer returns a stat timer object for a path.
func (h *StatsdLegacy) GetTimer(path string) StatTimer {
if path = h.pathMapping.mapPathNoTags(path); len(path) == 0 {
return DudStat{}
}
return &StatsdLegacyStat{
path: path,
s: h.s,
}
}
// GetTimerVec returns a stat timer object for a path with the labels
// discarded.
func (h *StatsdLegacy) GetTimerVec(path string, n []string) StatTimerVec {
path = h.pathMapping.mapPathNoTags(path)
return fakeTimerVec(func([]string) StatTimer {
if len(path) == 0 {
return DudStat{}
}
return &StatsdLegacyStat{
path: path,
s: h.s,
}
})
}
// GetGauge returns a stat gauge object for a path.
func (h *StatsdLegacy) GetGauge(path string) StatGauge {
if path = h.pathMapping.mapPathNoTags(path); len(path) == 0 {
return DudStat{}
}
return &StatsdLegacyStat{
path: path,
s: h.s,
}
}
// GetGaugeVec returns a stat timer object for a path with the labels
// discarded.
func (h *StatsdLegacy) GetGaugeVec(path string, n []string) StatGaugeVec {
path = h.pathMapping.mapPathNoTags(path)
return fakeGaugeVec(func([]string) StatGauge {
if len(path) == 0 {
return DudStat{}
}
return &StatsdLegacyStat{
path: path,
s: h.s,
}
})
}
// SetLogger sets the logger used to print connection errors.
func (h *StatsdLegacy) SetLogger(log log.Modular) {
h.log = log
}
// Close stops the StatsdLegacy object from aggregating metrics and cleans up
// resources.
func (h *StatsdLegacy) Close() error {
h.s.Close()
return nil
}
//------------------------------------------------------------------------------
|
rrvt/ICS-214a
|
Library/Library.prj/qsort.h
|
// Qsort Template for an array of something
// Copyright Software Design & Engineering, <NAME>, 2013. All rights reserved.
// Requires the following operations for node N* p, N* q, N* s
// *p == *q
// *p > *q
// *p <= *q
// *s = *p
// Call as follows:
// qsort(&array[firstIndex], &array[lastIndex]); or equivalent
#pragma once
template <class N>
void qsort(N* p, N* q) {
enum {split = 15};
N* r;
N* s;
N* pivot;
N temp;
if (p >= q) return; // Nothing to sort!
do {
if (q - p <= split) {
for (r = p+1; r <= q; r++) { // Insertion Sort is faster
if (*(r-1) > *r) { // for small vectors
temp = *r;
for (s = r-1; s >= p; s--) {
*(s+1) = *s;
if (s == p || *(s-1) <= temp) break;
}
*s = temp;
}
}
return;
}
r = p - 1; s = q; pivot = s;
while (r < s) {
while (*pivot > *(++r)) ;
while (--s > r && *s > *pivot) ;
if (r < s) {temp = *r; *r = *s; *s = temp;}
}
temp = *r; *r = *pivot; *pivot = temp;
if (r - p < q - r) {qsort(p, r-1); p = r+1;}
else {qsort(r+1, q); q = r-1;}
} while (p < q);
}
|
SilencerWeb/karma
|
src/ui/atoms/switch/switch.js
|
<reponame>SilencerWeb/karma<filename>src/ui/atoms/switch/switch.js
import * as React from 'react';
import styled, { css } from 'styled-components';
import PropTypes from 'prop-types';
import { color, transition } from 'ui/theme';
const Track = styled.div`
width: 3.4rem;
height: 1.4rem;
background: #3c4858;
border-radius: 10rem;
opacity: 0.24;
transition: ${transition};
`;
const Thumb = styled.span`
position: absolute;
top: 50%;
left: -0.4rem;
width: 2rem;
height: 2rem;
background: #f1f1f1;
border-radius: 50%;
box-shadow: 0 0.1rem 0.2rem rgba(60, 72, 88, 0.24);
transform: translateY(-50%);
transition: ${transition};
`;
const IconWrapper = styled.div`
position: relative;
margin-right: 0.8rem;
margin-left: 0.8rem;
`;
const Wrapper = styled.label`
position: relative;
display: flex;
width: fit-content;
align-items: center;
transition: ${transition};
input {
position: absolute;
top: 0;
left: 0;
opacity: 0;
visibility: hidden;
&:checked {
~ ${Track} {
background-color: ${color.primary};
}
~ ${Thumb} {
left: -0.2rem;
background-color: ${color.primary};
box-shadow: 0 0.1rem 0.2rem rgba(0, 181, 255, 0.48);
transform: translate(100%, -50%);
}
}
}
${p => css`
${p.disabled && css`
opacity: 0.5;
cursor: not-allowed;
`}
`}
`;
export const Switch = (props) => {
return (
<Wrapper className={ props.className } disabled={ props.disabled }>
<span>{ props.content.off }</span>
<IconWrapper>
<input type={ 'checkbox' } checked={ props.checked } disabled={ props.disabled }/>
<Track/>
<Thumb/>
</IconWrapper>
<span>{ props.content.on }</span>
</Wrapper>
);
};
Switch.propTypes = {
className: PropTypes.string,
content: PropTypes.shape({
off: PropTypes.string.isRequired,
on: PropTypes.string.isRequired,
}),
checked: PropTypes.bool,
disabled: PropTypes.bool,
};
Switch.defaultProps = {
disabled: false,
};
|
pydata-apis/python-api-record
|
data/typing/numpy.lib.scimath.py
|
<reponame>pydata-apis/python-api-record
from typing import *
def arccos(x: List[float]):
"""
usage.scipy: 2
"""
...
@overload
def sqrt(x: numpy.matrix):
"""
usage.networkx: 2
"""
...
@overload
def sqrt(x: numpy.ndarray):
"""
usage.networkx: 1
"""
...
def sqrt(x: Union[numpy.ndarray, numpy.matrix]):
"""
usage.networkx: 3
"""
...
|
weltam/dremio-oss
|
sabot/kernel/src/test/java/com/dremio/exec/fn/impl/TestContextFunctions.java
|
<gh_stars>1000+
/*
* Copyright (C) 2017-2019 Dremio Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.dremio.exec.fn.impl;
import java.util.Properties;
import org.junit.Test;
import com.dremio.PlanTestBase;
import com.dremio.QueryTestUtil;
import com.dremio.TestResult;
import com.dremio.common.utils.protos.QueryIdHelper;
import com.dremio.exec.client.DremioClient;
import com.dremio.sabot.rpc.user.UserSession;
public class TestContextFunctions extends PlanTestBase {
@Test
public void userUDFForAnonymousConnection() throws Exception {
final String user = "anonymous";
updateClient(user);
testBuilder()
.sqlQuery("select user, session_user, system_user, query_user() as query_user from cp.\"employee.json\" limit 1")
.unOrdered()
.baselineColumns("user", "session_user", "system_user", "query_user")
.baselineValues(user, user, user, user)
.go();
}
@Test
public void userUDFForNamedConnection() throws Exception {
final String testUserName = "testUser1";
updateClient(testUserName);
testBuilder()
.sqlQuery("select user, session_user, system_user, query_user() as query_user from cp.\"employee.json\" limit 1")
.unOrdered()
.baselineColumns("user", "session_user", "system_user", "query_user")
.baselineValues(testUserName, testUserName, testUserName, testUserName)
.go();
}
@Test
public void userUDFInFilterCondition() throws Exception {
final String testUserName = "testUser2";
updateClient(testUserName);
final String query = String.format(
"select employee_id from cp.\"employee.json\" where '%s' = user order by employee_id limit 1", testUserName);
testBuilder()
.sqlQuery(query)
.unOrdered()
.baselineColumns("employee_id")
.baselineValues(1L)
.go();
}
@Test
public void queryUserConstantReduction() throws Exception {
final String caSalesUser = "testUser1";
final String waSalesUser = "testUser2";
final String query = String.format("SELECT sales_city " +
"FROM cp.\"region.json\" t WHERE " +
"(query_user() = '%s' and t.sales_state_province = 'CA') OR " +
"(query_user() = '%s' and t.sales_state_province = 'WA')" +
"ORDER BY sales_city LIMIT 2", caSalesUser, waSalesUser);
updateClient(caSalesUser);
testPhysicalPlan(query, "Filter(condition=[=($1, 'CA')])");
testBuilder()
.sqlQuery(query)
.unOrdered()
.baselineColumns("sales_city")
.baselineValues("Altadena")
.baselineValues("Arcadia")
.go();
updateClient(waSalesUser);
testPhysicalPlan(query, "Filter(condition=[=($1, 'WA')])");
testBuilder()
.sqlQuery(query)
.unOrdered()
.baselineColumns("sales_city")
.baselineValues("Anacortes")
.baselineValues("Ballard")
.go();
}
@Test
public void currentSchemaUDFWhenDefaultSchemaNotSet() throws Exception {
testBuilder()
.sqlQuery("select current_schema from cp.\"employee.json\" limit 1")
.unOrdered()
.baselineColumns("current_schema")
.baselineValues("")
.go();
}
@Test
public void currentSchemaUDFWithSingleLevelDefaultSchema() throws Exception {
testBuilder()
.optionSettingQueriesForTestQuery("USE dfs_test")
.sqlQuery("select current_schema from cp.\"employee.json\" limit 1")
.unOrdered()
.baselineColumns("current_schema")
.baselineValues("dfs_test")
.go();
}
/**
* We want to return the old client to the class state when we are done
* running tests with a different client.
*
* @param newClient
* @return default client
*/
private static DremioClient swapClient(DremioClient newClient) {
DremioClient old = client;
client = newClient;
return old;
}
/**
* We need a client with no prior queries performed.
* However, this means we are foregoing defaults like maximum
* execution width.
*
* @return a client with no prior queries run.
* @throws Exception
*/
private static DremioClient createCleanClientHelper() throws Exception{
final Properties props = new Properties();
props.setProperty(UserSession.USER, "testUser");
return QueryTestUtil.createCleanClient(config, clusterCoordinator, props);
}
/**
* We need to put back the original client so the defaults
* are the same from test to test.
*
* @param cleanClient - client we used for the test
* @param oldClient - client we swapped out
*/
private static void teardownCleanClientTest(DremioClient cleanClient, DremioClient oldClient) {
cleanClient.close();
client = oldClient;
}
@Test
public void lastQueryIdUDFWithNoPriorQueries() throws Exception {
DremioClient cleanClient = createCleanClientHelper();
DremioClient oldClient = swapClient(cleanClient);
testBuilder()
.sqlQuery("select last_query_id from cp.\"employee.json\" limit 1")
.unOrdered()
.baselineColumns("last_query_id")
.baselineValues(null)
.go();
teardownCleanClientTest(cleanClient, oldClient);
}
@Test
public void lastQueryIdUDFWithPriorQuery() throws Exception {
DremioClient cleanClient = createCleanClientHelper();
DremioClient oldClient = swapClient(cleanClient);
final TestResult res = testBuilder()
.sqlQuery("select last_query_id from cp.\"employee.json\" limit 1")
.unOrdered()
.baselineColumns("last_query_id")
.baselineValues(null)
.go();
testBuilder()
.sqlQuery("select last_query_id from cp.\"employee.json\" limit 1")
.unOrdered()
.baselineColumns("last_query_id")
.baselineValues(QueryIdHelper.getQueryId(res.getQueryId()))
.go();
teardownCleanClientTest(cleanClient, oldClient);
}
}
|
hasridha/react-native
|
ReactCommon/cxxreact/PlatformBundleInfo.h
|
#pragma once
#include <cxxreact/JSBigString.h>
namespace facebook { namespace react {
struct PlatformBundleInfo
{
std::unique_ptr<const JSBigString> Bundle;
std::string BundleUrl;
std::string BytecodePath;
uint64_t Version;
};
}}//namespace facebook::react
|
marcgrue/molecule
|
molecule/shared/src/main/scala/molecule/core/macros/rowAttr/JsonOptNested.scala
|
package molecule.core.macros.rowAttr
import java.lang.{Long => jLong}
import java.util.{Date, Iterator => jIterator, List => jList, Map => jMap, Set => jSet}
private[molecule] trait JsonOptNested extends JsonBase {
// One ===========================================================================================
protected def jsonOptNestedOneQuoted(sb: StringBuffer, field: String, it: jIterator[_]): StringBuffer =
quotedPair(sb, field, it.next.toString)
protected def jsonOptNestedOne(sb: StringBuffer, field: String, it: jIterator[_]): StringBuffer =
pair(sb, field, it.next)
protected def jsonOptNestedOneToString(sb: StringBuffer, field: String, it: jIterator[_]): StringBuffer =
pair(sb, field, it.next.toString)
protected def jsonOptNestedOneDate(sb: StringBuffer, field: String, it: jIterator[_]): StringBuffer =
quotedPair(sb, field, date2str(it.next.asInstanceOf[Date]))
protected def jsonOptNestedOneAny(sb: StringBuffer, field: String, it: jIterator[_]): StringBuffer = {
it.next match {
case value: String => quotedPair(sb, field, value)
case value: Int => pair(sb, field, value)
case value: Float => pair(sb, field, value)
case value: Boolean => pair(sb, field, value)
case value: Long => pair(sb, field, value)
case value: Double => pair(sb, field, value)
case value: java.util.Date => quotedPair(sb, field, date2str(value))
case value: java.util.UUID => quotedPair(sb, field, value.toString)
case value: java.net.URI => quotedPair(sb, field, value.toString)
case value: BigInt => quotedPair(sb, field, value.toString)
case value: BigDecimal => quotedPair(sb, field, value.toString)
case valueOfUnknownType => quotedPair(sb, field, valueOfUnknownType.toString)
}
}
protected def jsonOptNestedOneEnum(sb: StringBuffer, field: String, it: jIterator[_]): StringBuffer = {
it.next match {
case s: String => quotedPair(sb, field, getKwName(s))
case vs => quotedPair(sb, field, getKwName(vs.asInstanceOf[jMap[_, _]].values.iterator.next.toString))
}
}
protected def jsonOptNestedOneRefAttr(sb: StringBuffer, field: String, it: jIterator[_]): StringBuffer = {
it.next match {
case l: jLong => pair(sb, field, l.toLong)
case vs =>
val refAttr = vs
.asInstanceOf[jMap[_, _]].values.iterator.next
.asInstanceOf[jLong].toLong
pair(sb, field, refAttr)
}
}
// Many ===========================================================================================
protected def jsonOptNestedManyQuoted(sb: StringBuffer, field: String, it: jIterator[_], tabs: Int): StringBuffer = {
it.next match {
case vs: jSet[_] =>
quote(sb, field)
sb.append(": [")
var next = false
val it1 = vs.iterator
while (it1.hasNext) {
if (next) sb.append(",") else next = true
sb.append(indent(tabs + 2))
quote(sb, it1.next.toString)
}
if (next) sb.append(indent(tabs + 1))
sb.append("]")
case vs =>
quote(sb, field)
sb.append(": [")
var next = false
val it1 = vs.asInstanceOf[jList[_]].iterator
while (it1.hasNext) {
if (next) sb.append(",") else next = true
sb.append(indent(tabs + 2))
quote(sb, it1.next.toString)
}
if (next) sb.append(indent(tabs + 1))
sb.append("]")
}
}
protected def jsonOptNestedMany(sb: StringBuffer, field: String, it: jIterator[_], tabs: Int): StringBuffer = {
it.next match {
case vs: jSet[_] =>
quote(sb, field)
sb.append(": [")
var next = false
val it1 = vs.iterator
while (it1.hasNext) {
if (next) sb.append(",") else next = true
sb.append(indent(tabs + 2))
sb.append(it1.next)
}
if (next) sb.append(indent(tabs + 1))
sb.append("]")
case vs =>
quote(sb, field)
sb.append(": [")
var next = false
val it1 = vs.asInstanceOf[jList[_]].iterator
while (it1.hasNext) {
if (next) sb.append(",") else next = true
sb.append(indent(tabs + 2))
sb.append(it1.next)
}
if (next) sb.append(indent(tabs + 1))
sb.append("]")
}
}
protected def jsonOptNestedManyToString(sb: StringBuffer, field: String, it: jIterator[_], tabs: Int): StringBuffer = {
it.next match {
case vs: jSet[_] =>
quote(sb, field)
sb.append(": [")
var next = false
val it1 = vs.iterator
while (it1.hasNext) {
if (next) sb.append(",") else next = true
sb.append(indent(tabs + 2))
sb.append(it1.next.toString)
}
if (next) sb.append(indent(tabs + 1))
sb.append("]")
case vs =>
quote(sb, field)
sb.append(": [")
var next = false
val it1 = vs.asInstanceOf[jList[_]].iterator
while (it1.hasNext) {
if (next) sb.append(",") else next = true
sb.append(indent(tabs + 2))
sb.append(it1.next.toString)
}
if (next) sb.append(indent(tabs + 1))
sb.append("]")
}
}
protected def jsonOptNestedManyDate(sb: StringBuffer, field: String, it: jIterator[_], tabs: Int): StringBuffer = {
it.next match {
case vs: jSet[_] =>
quote(sb, field)
sb.append(": [")
var next = false
val it1 = vs.iterator
while (it1.hasNext) {
if (next) sb.append(",") else next = true
sb.append(indent(tabs + 2))
quote(sb, date2str(it1.next.asInstanceOf[Date]))
}
if (next) sb.append(indent(tabs + 1))
sb.append("]")
case vs =>
quote(sb, field)
sb.append(": [")
var next = false
val it1 = vs.asInstanceOf[jList[_]].iterator
while (it1.hasNext) {
if (next) sb.append(",") else next = true
sb.append(indent(tabs + 2))
quote(sb, date2str(it1.next.asInstanceOf[Date]))
}
if (next) sb.append(indent(tabs + 1))
sb.append("]")
}
}
protected def jsonOptNestedManyEnum(sb: StringBuffer, field: String, it: jIterator[_], tabs: Int): StringBuffer = {
it.next match {
case vs: jSet[_] =>
quote(sb, field)
sb.append(": [")
var next = false
val it1 = vs.iterator
while (it1.hasNext) {
if (next) sb.append(",") else next = true
sb.append(indent(tabs + 2))
quote(sb, it1.next.toString)
}
if (next) sb.append(indent(tabs + 1))
sb.append("]")
case vs =>
quote(sb, field)
sb.append(": [")
var next = false
val it1 = vs.asInstanceOf[jList[_]].iterator
while (it1.hasNext) {
if (next) sb.append(",") else next = true
sb.append(indent(tabs + 2))
quote(sb, getKwName(it1.next.asInstanceOf[jMap[_, _]].values.iterator.next.toString))
}
if (next) sb.append(indent(tabs + 1))
sb.append("]")
}
}
protected def jsonOptNestedManyRefAttr(sb: StringBuffer, field: String, it: jIterator[_], tabs: Int): StringBuffer = {
it.next match {
case vs: jSet[_] =>
quote(sb, field)
sb.append(": [")
var next = false
val it1 = vs.iterator
while (it1.hasNext) {
if (next) sb.append(",") else next = true
sb.append(indent(tabs + 2))
val refAttr = it1.next
// .asInstanceOf[jMap[_, _]].values.iterator.next
.asInstanceOf[jLong].toLong
sb.append(refAttr)
}
if (next) sb.append(indent(tabs + 1))
sb.append("]")
case vs =>
quote(sb, field)
sb.append(": [")
var next = false
val it1 = vs.asInstanceOf[jList[_]].iterator
while (it1.hasNext) {
if (next) sb.append(",") else next = true
sb.append(indent(tabs + 2))
val refAttr = it1.next
.asInstanceOf[jMap[_, _]].values.iterator.next
.asInstanceOf[jLong].toLong
sb.append(refAttr)
}
if (next) sb.append(indent(tabs + 1))
sb.append("]")
}
}
// Optional card one ===========================================================================================
protected def jsonOptNestedOptOneQuoted(sb: StringBuffer, field: String, it: jIterator[_]): StringBuffer = {
it.next match {
case "__none__" | null => pair(sb, field, "null")
case m: jMap[_, _] => quotedPair(sb, field, m.values.iterator.next.toString)
case v => quotedPair(sb, field, v.toString)
}
}
protected def jsonOptNestedOptOne(sb: StringBuffer, field: String, it: jIterator[_]): StringBuffer = {
it.next match {
case "__none__" | null => pair(sb, field, "null")
case m: jMap[_, _] => pair(sb, field, m.values.iterator.next.toString)
case v => pair(sb, field, v)
}
}
protected def jsonOptNestedOptOneToString(sb: StringBuffer, field: String, it: jIterator[_]): StringBuffer = {
it.next match {
case "__none__" | null => pair(sb, field, "null")
case m: jMap[_, _] => pair(sb, field, m.values.iterator.next.toString)
case v => pair(sb, field, v.toString)
}
}
protected def jsonOptNestedOptOneDate(sb: StringBuffer, field: String, it: jIterator[_]): StringBuffer = {
it.next match {
case "__none__" | null => pair(sb, field, "null")
case m: jMap[_, _] => quotedPair(sb, field, date2str(m.values.iterator.next.asInstanceOf[Date]))
case v => quotedPair(sb, field, date2str(v.asInstanceOf[Date]))
}
}
protected def jsonOptNestedOptOneEnum(sb: StringBuffer, field: String, it: jIterator[_]): StringBuffer = {
it.next match {
case "__none__" | null => pair(sb, field, "null")
case v => v.asInstanceOf[jMap[_, _]].values.iterator.next match {
case m: jMap[_, _] => quotedPair(sb, field, getKwName(m.values.iterator.next.toString))
case v => quotedPair(sb, field, getKwName(v.toString))
}
}
}
protected def jsonOptNestedOptOneRefAttr(sb: StringBuffer, field: String, it: jIterator[_]): StringBuffer = {
it.next match {
case "__none__" | null => pair(sb, field, "null")
case l: jLong => pair(sb, field, l)
case vs =>
val refAttr = vs.asInstanceOf[jMap[_, _]].values.iterator.next match {
case l: jLong => l.toLong
case m => m.asInstanceOf[jMap[_, _]].values.iterator.next.asInstanceOf[jLong].toLong
}
pair(sb, field, refAttr)
}
}
// Optional card many ===========================================================================================
protected def jsonOptNestedOptManyQuoted(sb: StringBuffer, field: String, it: jIterator[_], tabs: Int): StringBuffer = {
it.next match {
case "__none__" | null => pair(sb, field, "null")
case vs: jMap[_, _] =>
quote(sb, field)
sb.append(": [")
var next = false
val it1 = vs.values.iterator.next.asInstanceOf[jList[_]].iterator
while (it1.hasNext) {
if (next) sb.append(",") else next = true
sb.append(indent(tabs + 2))
quote(sb, it1.next.toString)
}
if (next) sb.append(indent(tabs + 1))
sb.append("]")
case vs =>
quote(sb, field)
sb.append(": [")
var next = false
val it1 = vs.asInstanceOf[jList[_]].iterator
while (it1.hasNext) {
if (next) sb.append(",") else next = true
sb.append(indent(tabs + 2))
quote(sb, it1.next.toString)
}
if (next) sb.append(indent(tabs + 1))
sb.append("]")
}
}
protected def jsonOptNestedOptMany(sb: StringBuffer, field: String, it: jIterator[_], tabs: Int): StringBuffer = {
it.next match {
case "__none__" | null => pair(sb, field, "null")
case vs: jMap[_, _] =>
quote(sb, field)
sb.append(": [")
var next = false
val it1 = vs.values.iterator.next.asInstanceOf[jList[_]].iterator
while (it1.hasNext) {
if (next) sb.append(",") else next = true
sb.append(indent(tabs + 2))
sb.append(it1.next)
}
if (next) sb.append(indent(tabs + 1))
sb.append("]")
case vs =>
quote(sb, field)
sb.append(": [")
var next = false
val it1 = vs.asInstanceOf[jList[_]].iterator
while (it1.hasNext) {
if (next) sb.append(",") else next = true
sb.append(indent(tabs + 2))
sb.append(it1.next)
}
if (next) sb.append(indent(tabs + 1))
sb.append("]")
}
}
protected def jsonOptNestedOptManyToString(sb: StringBuffer, field: String, it: jIterator[_], tabs: Int): StringBuffer = {
it.next match {
case "__none__" | null => pair(sb, field, "null")
case vs: jMap[_, _] =>
quote(sb, field)
sb.append(": [")
var next = false
val it1 = vs.values.iterator.next.asInstanceOf[jList[_]].iterator
while (it1.hasNext) {
if (next) sb.append(",") else next = true
sb.append(indent(tabs + 2))
sb.append(it1.next.toString)
}
if (next) sb.append(indent(tabs + 1))
sb.append("]")
case vs =>
quote(sb, field)
sb.append(": [")
var next = false
val it1 = vs.asInstanceOf[jList[_]].iterator
while (it1.hasNext) {
if (next) sb.append(",") else next = true
sb.append(indent(tabs + 2))
sb.append(it1.next.toString)
}
if (next) sb.append(indent(tabs + 1))
sb.append("]")
}
}
protected def jsonOptNestedOptManyDate(sb: StringBuffer, field: String, it: jIterator[_], tabs: Int): StringBuffer = {
it.next match {
case "__none__" | null => pair(sb, field, "null")
case vs: jMap[_, _] =>
quote(sb, field)
sb.append(": [")
var next = false
val it1 = vs.values.iterator.next.asInstanceOf[jList[_]].iterator
while (it1.hasNext) {
if (next) sb.append(",") else next = true
sb.append(indent(tabs + 2))
quote(sb, date2str(it1.next.asInstanceOf[Date]))
}
if (next) sb.append(indent(tabs + 1))
sb.append("]")
case vs =>
quote(sb, field)
sb.append(": [")
var next = false
val it1 = vs.asInstanceOf[jList[_]].iterator
while (it1.hasNext) {
if (next) sb.append(",") else next = true
sb.append(indent(tabs + 2))
quote(sb, date2str(it1.next.asInstanceOf[Date]))
}
if (next) sb.append(indent(tabs + 1))
sb.append("]")
}
}
protected def jsonOptNestedOptManyEnum(sb: StringBuffer, field: String, it: jIterator[_], tabs: Int): StringBuffer = {
it.next match {
case "__none__" | null => pair(sb, field, "null")
case vs: jMap[_, _] =>
quote(sb, field)
sb.append(": [")
var next = false
val it1 = vs.values.iterator.next.asInstanceOf[jList[_]].iterator
while (it1.hasNext) {
if (next) sb.append(",") else next = true
sb.append(indent(tabs + 2))
quote(sb, getKwName(it1.next.asInstanceOf[jMap[_, _]].values.iterator.next.toString))
}
if (next) sb.append(indent(tabs + 1))
sb.append("]")
case vs =>
quote(sb, field)
sb.append(": [")
var next = false
val it1 = vs.asInstanceOf[jList[_]].iterator
while (it1.hasNext) {
if (next) sb.append(",") else next = true
sb.append(indent(tabs + 2))
quote(sb, getKwName(it1.next.asInstanceOf[jMap[_, _]].values.iterator.next.toString))
}
if (next) sb.append(indent(tabs + 1))
sb.append("]")
}
}
protected def jsonOptNestedOptManyRefAttr(sb: StringBuffer, field: String, it: jIterator[_], tabs: Int): StringBuffer = {
it.next match {
case "__none__" | null => pair(sb, field, "null")
case vs: jMap[_, _] =>
quote(sb, field)
sb.append(": [")
var next = false
val it1 = vs.asInstanceOf[jMap[String, jList[_]]].values.iterator.next.iterator
while (it1.hasNext) {
if (next) sb.append(",") else next = true
sb.append(indent(tabs + 2))
val refAttr = it1.next
.asInstanceOf[jMap[_, _]].values.iterator.next
.asInstanceOf[jLong].toLong
sb.append(refAttr)
}
if (next) sb.append(indent(tabs + 1))
sb.append("]")
case vs =>
quote(sb, field)
sb.append(": [")
var next = false
val it1 = vs.asInstanceOf[jList[_]].iterator
while (it1.hasNext) {
if (next) sb.append(",") else next = true
sb.append(indent(tabs + 2))
val refAttr = it1.next
.asInstanceOf[jMap[_, _]].values.iterator.next
.asInstanceOf[jLong].toLong
sb.append(refAttr)
}
if (next) sb.append(indent(tabs + 1))
sb.append("]")
}
}
// Map ===========================================================================================
protected def jsonOptNestedMapQuoted(sb: StringBuffer, field: String, it: jIterator[_], tabs: Int): StringBuffer = {
it.next match {
case vs: jSet[_] =>
quote(sb, field)
sb.append(": {")
var next = false
val it1 = vs.iterator
var pair = new Array[String](2)
while (it1.hasNext) {
if (next) sb.append(",") else next = true
sb.append(indent(tabs + 2))
pair = it1.next.toString.split("@", 2)
quote(sb, pair(0))
sb.append(": ")
quote(sb, pair(1))
}
if (next) sb.append(indent(tabs + 1))
sb.append("}")
case vs =>
quote(sb, field)
sb.append(": {")
var next = false
val it1 = vs.asInstanceOf[jList[_]].iterator
var pair = new Array[String](2)
while (it1.hasNext) {
if (next) sb.append(",") else next = true
sb.append(indent(tabs + 2))
pair = it1.next.toString.split("@", 2)
quote(sb, pair(0))
sb.append(": ")
quote(sb, pair(1))
}
if (next) sb.append(indent(tabs + 1))
sb.append("}")
}
}
protected def jsonOptNestedMap(sb: StringBuffer, field: String, it: jIterator[_], tabs: Int): StringBuffer = {
it.next match {
case vs: jSet[_] =>
quote(sb, field)
sb.append(": {")
var next = false
val it1 = vs.iterator
var pair = new Array[String](2)
while (it1.hasNext) {
if (next) sb.append(",") else next = true
sb.append(indent(tabs + 2))
pair = it1.next.toString.split("@", 2)
quote(sb, pair(0))
sb.append(": ")
sb.append(pair(1))
}
if (next) sb.append(indent(tabs + 1))
sb.append("}")
case vs =>
quote(sb, field)
sb.append(": {")
var next = false
val it1 = vs.asInstanceOf[jList[_]].iterator
var pair = new Array[String](2)
while (it1.hasNext) {
if (next) sb.append(",") else next = true
sb.append(indent(tabs + 2))
pair = it1.next.toString.split("@", 2)
quote(sb, pair(0))
sb.append(": ")
sb.append(pair(1))
}
if (next) sb.append(indent(tabs + 1))
sb.append("}")
}
}
// Optional Map ===========================================================================================
protected def jsonOptNestedOptMapQuoted(sb: StringBuffer, field: String, it: jIterator[_], tabs: Int): StringBuffer = {
it.next match {
case "__none__" | null => pair(sb, field, "null")
case m: jMap[_, _] =>
quote(sb, field)
sb.append(": {")
var next = false
val it = m.values.iterator.next.asInstanceOf[jList[_]].iterator
var pair = new Array[String](2)
while (it.hasNext) {
if (next) sb.append(",") else next = true
sb.append(indent(tabs + 2))
pair = it.next.toString.split("@", 2)
quote(sb, pair(0))
sb.append(": ")
quote(sb, pair(1))
}
if (next) sb.append(indent(tabs + 1))
sb.append("}")
case v =>
quote(sb, field)
sb.append(": {")
var next = false
val it = v.asInstanceOf[jList[_]].iterator
var pair = new Array[String](2)
while (it.hasNext) {
if (next) sb.append(",") else next = true
sb.append(indent(tabs + 2))
pair = it.next.toString.split("@", 2)
quote(sb, pair(0))
sb.append(": ")
quote(sb, pair(1))
}
if (next) sb.append(indent(tabs + 1))
sb.append("}")
}
}
protected def jsonOptNestedOptMap(sb: StringBuffer, field: String, it: jIterator[_], tabs: Int): StringBuffer = {
it.next match {
case "__none__" | null => pair(sb, field, "null")
case m: jMap[_, _] =>
quote(sb, field)
sb.append(": {")
var next = false
val it = m.values.iterator.next.asInstanceOf[jList[_]].iterator
var pair = new Array[String](2)
while (it.hasNext) {
if (next) sb.append(",") else next = true
sb.append(indent(tabs + 2))
pair = it.next.toString.split("@", 2)
quote(sb, pair(0))
sb.append(": ")
sb.append(pair(1))
}
if (next) sb.append(indent(tabs + 1))
sb.append("}")
case v =>
quote(sb, field)
sb.append(": {")
var next = false
val it = v.asInstanceOf[jList[_]].iterator
var pair = new Array[String](2)
while (it.hasNext) {
if (next) sb.append(",") else next = true
sb.append(indent(tabs + 2))
pair = it.next.toString.split("@", 2)
quote(sb, pair(0))
sb.append(": ")
sb.append(pair(1))
}
if (next) sb.append(indent(tabs + 1))
sb.append("}")
}
}
}
|
isuhao/RaidenFree
|
Classes/Bmob/baseobject/bmobcloud.cpp
|
#include "bmobcloud.h"
#include "network/HttpClient.h"
#include "network/HttpResponse.h"
#include "Common/Macro.h"
using namespace network;
BmobCloud::BmobCloud(){
}
BmobCloud::~BmobCloud(){
}
void BmobCloud::send(){
}
void BmobCloud::send(network::HttpRequest::Type type){
HttpRequest* req = new HttpRequest;
req->setUrl(this->m_url.c_str());
//req->setTag(m_sTag.c_str());
req->setResponseCallback(this, cocos2d::SEL_CallFuncND(&BmobCloud::onHttpRequestCompleted));
req->setRequestType(type);
req->setHeaders(getHeader());
//set request header
Json::Value params;
std::string data;
this->enJson(¶ms);
data = params.toStyledString();
if (data.substr(0,4) == "null") {
/* code */
data = "{}";
}
cout<<data<<":"<<endl;
req->setRequestData(data.c_str(), strlen(data.c_str()));
cout<<"request data is:"<<data<<endl;
DEBUG_LOG("send data: %s", data.c_str());
HttpClient::getInstance()->setTimeoutForConnect(3000);
HttpClient::getInstance()->setTimeoutForRead(3000);
HttpClient::getInstance()->send(req);
req->release();
}
void BmobCloud::onHttpRequestCompleted(cocos2d::Node *pSender,void *data){
HttpResponse *response = (HttpResponse *)data;
if (!response->isSucceed()) {
int errorCode = response->getResponseCode();
string errorInfo = response->getErrorBuffer();
if (this->m_delegate != NULL) {
/* code */
this->m_delegate->onCloudFailure(errorCode,errorInfo.c_str());
}
return;
}else{
std::vector<char> *buffer = response->getResponseData();
std::string str((*buffer).begin(),(*buffer).end());
cout<<"request sucess "<<str<<endl;
Json::Reader reader;
Json::Value value;
if (!reader.parse(str, value)) {
//parse error
}else{
this->m_delegate->onCloudSuccess(str.c_str());
}
}
}
vector<string> BmobCloud::getHeader(){
if (this->m_header.empty())
{
/* code */
vector<string> header_list;
header_list.push_back("X-Bmob-Application-Id:"+BmobSDKInit::APP_ID);
header_list.push_back("X-Bmob-REST-API-Key:"+BmobSDKInit::APP_KEY);
//header_list.push_back("Accept-Encoding:gzip,deflate");
header_list.push_back("Content-Type: application/json");
this->m_header = header_list;
}
return this->m_header;
}
void BmobCloud::execCloudCode(string cloudName,std::map<string, Ref*> param,\
BmobCloudDelegate *delegate,BmobCloud::EXEC_Type type /*= EXEC_Type::EXEC_Exec*/){
if(!BmobSDKInit::isInitialize()){
return ;
}
if (cloudName.empty() && type != EXEC_Type::EXEC_Create) {
/* code */
return ;
}
this->m_url.clear();
this->m_url = BmobSDKInit::CLOUD_CODE_URL;
this->m_url += cloudName;
this->m_delegate = delegate;
switch (type) {
case EXEC_Type::EXEC_Exec:{
if (!param.empty()) {
/* code */
m_mapData = param;
}
this->send(HttpRequest::Type::POST);
}break;
case EXEC_Delete:{
this->send(HttpRequest::Type::DELETE);
}break;
case EXEC_Create:{
if (param.size() != 1) {
/* code */
return ;
}
m_mapData = param;
//创建代码
this->send(HttpRequest::Type::PUT);
}break;
default:break;
}
}
|
enriqueescobar-askida/Kinito.Ruby.Patterns
|
chap08/ex2_button_with_cmd_demo.rb
|
#!/usr/bin/env ruby
require '../example'
require 'ex2_button_with_cmd'
example %q{
save_button = SlickButton.new( SaveCommand.new )
pp save_button
}
|
thomasregnet/kleinodien
|
db/migrate/20150303192009_index_compilation_identifiers_on_code_disambiguation.rb
|
<gh_stars>0
class IndexCompilationIdentifiersOnCodeDisambiguation < ActiveRecord::Migration[4.2]
def change
reversible do |idx|
idx.up do
execute <<-DDL
CREATE UNIQUE INDEX
index_compilation_identifiers_on_code_disambiguation
ON compilation_identifiers
(compilation_release_id, identifier_type_id,
code, LOWER(disambiguation));
DDL
end
idx.down do
remove_index(
:compilation_identifiers,
name: :index_compilation_identifiers_on_code_disambiguation)
end
end
end
end
|
meunice/azure-sdk-for-java
|
sdk/textanalytics/azure-ai-textanalytics/src/main/java/com/azure/ai/textanalytics/TextAnalyticsClient.java
|
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
package com.azure.ai.textanalytics;
import com.azure.ai.textanalytics.models.AnalyzeActionsResult;
import com.azure.ai.textanalytics.models.AnalyzeActionsOperationDetail;
import com.azure.ai.textanalytics.models.AnalyzeActionsOptions;
import com.azure.ai.textanalytics.models.AnalyzeHealthcareEntitiesOperationDetail;
import com.azure.ai.textanalytics.models.AnalyzeHealthcareEntitiesOptions;
import com.azure.ai.textanalytics.models.AnalyzeSentimentOptions;
import com.azure.ai.textanalytics.models.CategorizedEntity;
import com.azure.ai.textanalytics.models.CategorizedEntityCollection;
import com.azure.ai.textanalytics.models.DetectLanguageInput;
import com.azure.ai.textanalytics.models.DetectedLanguage;
import com.azure.ai.textanalytics.models.DocumentSentiment;
import com.azure.ai.textanalytics.models.KeyPhrasesCollection;
import com.azure.ai.textanalytics.models.LinkedEntity;
import com.azure.ai.textanalytics.models.LinkedEntityCollection;
import com.azure.ai.textanalytics.models.PiiEntityCollection;
import com.azure.ai.textanalytics.models.RecognizeEntitiesOptions;
import com.azure.ai.textanalytics.models.RecognizeLinkedEntitiesOptions;
import com.azure.ai.textanalytics.models.RecognizePiiEntitiesOptions;
import com.azure.ai.textanalytics.models.TextAnalyticsActions;
import com.azure.ai.textanalytics.models.TextAnalyticsError;
import com.azure.ai.textanalytics.models.TextAnalyticsException;
import com.azure.ai.textanalytics.models.TextAnalyticsRequestOptions;
import com.azure.ai.textanalytics.models.TextDocumentInput;
import com.azure.ai.textanalytics.util.AnalyzeHealthcareEntitiesResultCollection;
import com.azure.ai.textanalytics.util.AnalyzeSentimentResultCollection;
import com.azure.ai.textanalytics.util.DetectLanguageResultCollection;
import com.azure.ai.textanalytics.util.ExtractKeyPhrasesResultCollection;
import com.azure.ai.textanalytics.util.RecognizeEntitiesResultCollection;
import com.azure.ai.textanalytics.util.RecognizeLinkedEntitiesResultCollection;
import com.azure.ai.textanalytics.util.RecognizePiiEntitiesResultCollection;
import com.azure.core.annotation.ReturnType;
import com.azure.core.annotation.ServiceClient;
import com.azure.core.annotation.ServiceMethod;
import com.azure.core.http.rest.PagedIterable;
import com.azure.core.http.rest.Response;
import com.azure.core.util.Context;
import com.azure.core.util.polling.SyncPoller;
import java.util.Objects;
import static com.azure.ai.textanalytics.implementation.Utility.inputDocumentsValidation;
import static com.azure.ai.textanalytics.implementation.Utility.mapByIndex;
/**
* This class provides a synchronous client that contains all the operations that apply to Azure Text Analytics.
* Operations allowed by the client are language detection, entities recognition, linked entities recognition,
* key phrases extraction, and sentiment analysis of a document or a list of documents.
*
* <p><strong>Instantiating a synchronous Text Analytics Client</strong></p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.instantiation}
*
* <p>View {@link TextAnalyticsClientBuilder this} for additional ways to construct the client.</p>
*
* @see TextAnalyticsClientBuilder
*/
@ServiceClient(builder = TextAnalyticsClientBuilder.class)
public final class TextAnalyticsClient {
private final TextAnalyticsAsyncClient client;
/**
* Create a {@code TextAnalyticsClient client} that sends requests to the Text Analytics service's endpoint.
* Each service call goes through the {@link TextAnalyticsClientBuilder#pipeline http pipeline}.
*
* @param client The {@link TextAnalyticsClient} that the client routes its request through.
*/
TextAnalyticsClient(TextAnalyticsAsyncClient client) {
this.client = client;
}
/**
* Get default country hint code.
*
* @return The default country hint code
*/
public String getDefaultCountryHint() {
return client.getDefaultCountryHint();
}
/**
* Get default language when the builder is setup.
*
* @return The default language
*/
public String getDefaultLanguage() {
return client.getDefaultLanguage();
}
/**
* Returns the detected language and a confidence score between zero and one. Scores close to one indicate 100%
* certainty that the identified language is true.
*
* This method will use the default country hint that sets up in
* {@link TextAnalyticsClientBuilder#defaultCountryHint(String)}. If none is specified, service will use 'US' as
* the country hint.
*
* <p><strong>Code Sample</strong></p>
* <p>Detects the language of single document.</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.detectLanguage#String}
*
* @param document The document to be analyzed.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https://docs.microsoft.com/azure/cognitive-services/text-analytics/overview#data-limits">data limits</a>.
*
* @return The {@link DetectedLanguage detected language} of the document.
*
* @throws NullPointerException if {@code document} is null.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public DetectedLanguage detectLanguage(String document) {
return detectLanguage(document, client.getDefaultCountryHint());
}
/**
* Returns the detected language and a confidence score between zero and one.
* Scores close to one indicate 100% certainty that the identified language is true.
*
* <p><strong>Code Sample</strong></p>
* <p>Detects the language of documents with a provided country hint.</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.detectLanguage#String-String}
*
* @param document The document to be analyzed.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https://docs.microsoft.com/azure/cognitive-services/text-analytics/overview#data-limits">data limits</a>.
* @param countryHint Accepts two letter country codes specified by ISO 3166-1 alpha-2. Defaults to "US" if not
* specified. To remove this behavior you can reset this parameter by setting this value to empty string
* {@code countryHint} = "" or "none".
*
* @return The {@link DetectedLanguage detected language} of the document.
*
* @throws NullPointerException if {@code document} is null.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public DetectedLanguage detectLanguage(String document, String countryHint) {
return client.detectLanguage(document, countryHint).block();
}
/**
* Detects Language for a batch of document with the provided country hint and request options.
*
* <p><strong>Code Sample</strong></p>
* <p>Detects the language in a list of documents with a provided country hint and request options.</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.detectLanguageBatch#Iterable-String-TextAnalyticsRequestOptions}
*
* @param documents The list of documents to detect languages for.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https://docs.microsoft.com/azure/cognitive-services/text-analytics/overview#data-limits">data limits</a>.
* @param countryHint Accepts two letter country codes specified by ISO 3166-1 alpha-2. Defaults to "US" if not
* specified. To remove this behavior you can reset this parameter by setting this value to empty string
* {@code countryHint} = "" or "none".
* @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents
* and show statistics.
*
* @return A {@link DetectLanguageResultCollection}.
*
* @throws NullPointerException if {@code documents} is null.
* @throws IllegalArgumentException if {@code documents} is empty.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public DetectLanguageResultCollection detectLanguageBatch(
Iterable<String> documents, String countryHint, TextAnalyticsRequestOptions options) {
inputDocumentsValidation(documents);
return client.detectLanguageBatch(documents, countryHint, options).block();
}
/**
* Detects Language for a batch of {@link DetectLanguageInput document} with provided request options.
*
* <p><strong>Code Sample</strong></p>
* <p>Detects the languages with http response in a list of {@link DetectLanguageInput document} with provided
* request options.</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.detectLanguageBatch#Iterable-TextAnalyticsRequestOptions-Context}
*
* @param documents The list of {@link DetectLanguageInput documents} to be analyzed.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https://docs.microsoft.com/azure/cognitive-services/text-analytics/overview#data-limits">data limits</a>.
* @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents
* and show statistics.
* @param context Additional context that is passed through the Http pipeline during the service call.
*
* @return A {@link Response} that contains a {@link DetectLanguageResultCollection}.
*
* @throws NullPointerException if {@code documents} is null.
* @throws IllegalArgumentException if {@code documents} is empty.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<DetectLanguageResultCollection> detectLanguageBatchWithResponse(
Iterable<DetectLanguageInput> documents, TextAnalyticsRequestOptions options, Context context) {
inputDocumentsValidation(documents);
return client.detectLanguageAsyncClient.detectLanguageBatchWithContext(documents, options, context).block();
}
// Categorized Entity
/**
* Returns a list of general categorized entities in the provided document.
*
* For a list of supported entity types, check: <a href="https://aka.ms/taner">this</a>
*
* This method will use the default language that can be set by using method
* {@link TextAnalyticsClientBuilder#defaultLanguage(String)}. If none is specified, service will use 'en' as
* the language.
*
* <p><strong>Code Sample</strong></p>
* <p>Recognize the entities of documents</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.recognizeCategorizedEntities#String}
*
* @param document The document to recognize entities for.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https://docs.microsoft.com/azure/cognitive-services/text-analytics/overview#data-limits">data limits</a>.
*
* @return A {@link CategorizedEntityCollection} contains a list of
* {@link CategorizedEntity recognized categorized entities} and warnings.
*
* @throws NullPointerException if {@code document} is null.
* @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public CategorizedEntityCollection recognizeEntities(String document) {
return recognizeEntities(document, client.getDefaultLanguage());
}
/**
* Returns a list of general categorized entities in the provided document with provided language code.
*
* For a list of supported entity types, check: <a href="https://aka.ms/taner">this</a>
* For a list of enabled languages, check: <a href="https://aka.ms/talangs">this</a>
*
* <p><strong>Code Sample</strong></p>
* <p>Recognizes the entities in a document with a provided language code.</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.recognizeCategorizedEntities#String-String}
*
* @param document The document to recognize entities for.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https://docs.microsoft.com/azure/cognitive-services/text-analytics/overview#data-limits">data limits</a>.
* @param language The 2 letter ISO 639-1 representation of language. If not set, uses "en" for English as default.
*
* @return The {@link CategorizedEntityCollection} contains a list of
* {@link CategorizedEntity recognized categorized entities} and warnings.
*
* @throws NullPointerException if {@code document} is null.
* @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public CategorizedEntityCollection recognizeEntities(String document, String language) {
return client.recognizeEntities(document, language).block();
}
/**
* Returns a list of general categorized entities for the provided list of documents with provided language code
* and request options.
*
* <p><strong>Code Sample</strong></p>
* <p>Recognizes the entities in a list of documents with a provided language code and request options.</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.recognizeCategorizedEntitiesBatch#Iterable-String-TextAnalyticsRequestOptions}
*
* @param documents A list of documents to recognize entities for.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https://docs.microsoft.com/azure/cognitive-services/text-analytics/overview#data-limits">data limits</a>.
* @param language The 2 letter ISO 639-1 representation of language. If not set, uses "en" for English as default.
* @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents
* and show statistics.
*
* @return A {@link RecognizeEntitiesResultCollection}.
*
* @throws NullPointerException if {@code documents} is null.
* @throws IllegalArgumentException if {@code documents} is empty.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public RecognizeEntitiesResultCollection recognizeEntitiesBatch(
Iterable<String> documents, String language, TextAnalyticsRequestOptions options) {
inputDocumentsValidation(documents);
return client.recognizeEntitiesBatch(documents, language, options).block();
}
/**
* Returns a list of general categorized entities for the provided list of {@link TextDocumentInput document} with
* provided request options.
*
* <p><strong>Code Sample</strong></p>
* <p>Recognizes the entities with http response in a list of {@link TextDocumentInput document} with provided
* request options.</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.recognizeEntitiesBatch#Iterable-TextAnalyticsRequestOptions-Context}
*
* @param documents A list of {@link TextDocumentInput documents} to recognize entities for.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https://docs.microsoft.com/azure/cognitive-services/text-analytics/overview#data-limits">data limits</a>.
* @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents
* and show statistics.
* @param context Additional context that is passed through the Http pipeline during the service call.
*
* @return A {@link Response} that contains a {@link RecognizeEntitiesResultCollection}.
*
* @throws NullPointerException if {@code documents} is null.
* @throws IllegalArgumentException if {@code documents} is empty.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<RecognizeEntitiesResultCollection> recognizeEntitiesBatchWithResponse(
Iterable<TextDocumentInput> documents, TextAnalyticsRequestOptions options, Context context) {
inputDocumentsValidation(documents);
final RecognizeEntitiesOptions recognizeEntitiesOptions = new RecognizeEntitiesOptions();
String modelVersion = null;
boolean includeStatistics = false;
if (options != null) {
modelVersion = options.getModelVersion();
includeStatistics = options.isIncludeStatistics();
recognizeEntitiesOptions.setModelVersion(modelVersion).setIncludeStatistics(includeStatistics);
}
return client.recognizeEntityAsyncClient.recognizeEntitiesBatchWithContext(documents,
recognizeEntitiesOptions,
context).block();
}
/**
* Returns a list of general categorized entities for the provided list of {@link TextDocumentInput document} with
* provided request options.
*
* <p><strong>Code Sample</strong></p>
* <p>Recognizes the entities with http response in a list of {@link TextDocumentInput document} with provided
* request options.</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.recognizeEntitiesBatch#Iterable-RecognizeEntitiesOptions-Context}
*
* @param documents A list of {@link TextDocumentInput documents} to recognize entities for.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https://docs.microsoft.com/azure/cognitive-services/text-analytics/overview#data-limits">data limits</a>.
* @param options The {@link RecognizeEntitiesOptions options} to configure the scoring model for documents
* and show statistics.
* @param context Additional context that is passed through the Http pipeline during the service call.
*
* @return A {@link Response} that contains a {@link RecognizeEntitiesResultCollection}.
*
* @throws NullPointerException if {@code documents} is null.
* @throws IllegalArgumentException if {@code documents} is empty.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<RecognizeEntitiesResultCollection> recognizeEntitiesBatchWithResponse(
Iterable<TextDocumentInput> documents, RecognizeEntitiesOptions options, Context context) {
inputDocumentsValidation(documents);
return client.recognizeEntityAsyncClient.recognizeEntitiesBatchWithContext(documents, options, context).block();
}
// PII Entity
/**
* Returns a list of Personally Identifiable Information(PII) entities in the provided document.
*
* For a list of supported entity types, check: <a href="https://aka.ms/tanerpii">this</a>
* For a list of enabled languages, check: <a href="https://aka.ms/talangs">this</a>. This method will use the
* default language that is set using {@link TextAnalyticsClientBuilder#defaultLanguage(String)}. If none is
* specified, service will use 'en' as the language.
*
* <p><strong>Code Sample</strong></p>
* <p>Recognize the PII entities details in a document.</p>
*
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.recognizePiiEntities#String}
*
* @param document The document to recognize PII entities details for.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https://docs.microsoft.com/azure/cognitive-services/text-analytics/overview#data-limits">data limits</a>.
*
* @return A {@link PiiEntityCollection recognized PII entities collection}.
*
* @throws NullPointerException if {@code document} is null.
* @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public PiiEntityCollection recognizePiiEntities(String document) {
return recognizePiiEntities(document, client.getDefaultLanguage());
}
/**
* Returns a list of Personally Identifiable Information(PII) entities in the provided document
* with provided language code.
*
* For a list of supported entity types, check: <a href="https://aka.ms/tanerpii">this</a>
* For a list of enabled languages, check: <a href="https://aka.ms/talangs">this</a>
*
* <p><strong>Code Sample</strong></p>
* <p>Recognizes the PII entities details in a document with a provided language code.</p>
*
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.recognizePiiEntities#String-String}
*
* @param document The document to recognize PII entities details for.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https://docs.microsoft.com/azure/cognitive-services/text-analytics/overview#data-limits">data limits</a>.
* @param language The 2 letter ISO 639-1 representation of language. If not set, uses "en" for English as default.
*
* @return The {@link PiiEntityCollection recognized PII entities collection}.
*
* @throws NullPointerException if {@code document} is null.
* @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public PiiEntityCollection recognizePiiEntities(String document, String language) {
return client.recognizePiiEntities(document, language).block();
}
/**
* Returns a list of Personally Identifiable Information(PII) entities in the provided document
* with provided language code.
*
* For a list of supported entity types, check: <a href="https://aka.ms/tanerpii">this</a>
* For a list of enabled languages, check: <a href="https://aka.ms/talangs">this</a>
*
* <p><strong>Code Sample</strong></p>
* <p>Recognizes the PII entities details in a document with a provided language code and
* {@link RecognizePiiEntitiesOptions}.</p>
*
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.recognizePiiEntities#String-String-RecognizePiiEntitiesOptions}
*
* @param document The document to recognize PII entities details for.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https://docs.microsoft.com/azure/cognitive-services/text-analytics/overview#data-limits">data limits</a>.
* @param language The 2 letter ISO 639-1 representation of language. If not set, uses "en" for English as default.
* @param options The additional configurable {@link RecognizePiiEntitiesOptions options} that may be passed when
* recognizing PII entities.
*
* @return The {@link PiiEntityCollection recognized PII entities collection}.
*
* @throws NullPointerException if {@code document} is null.
* @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public PiiEntityCollection recognizePiiEntities(String document, String language,
RecognizePiiEntitiesOptions options) {
return client.recognizePiiEntities(document, language, options).block();
}
/**
* Returns a list of Personally Identifiable Information(PII) entities for the provided list of documents with
* provided language code and request options.
*
* <p><strong>Code Sample</strong></p>
* <p>Recognizes the PII entities details in a list of documents with a provided language code
* and request options.</p>
*
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.recognizePiiEntitiesBatch#Iterable-String-RecognizePiiEntitiesOptions}
*
* @param documents A list of documents to recognize PII entities for.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https://docs.microsoft.com/azure/cognitive-services/text-analytics/overview#data-limits">data limits</a>.
* @param language The 2 letter ISO 639-1 representation of language. If not set, uses "en" for English as default.
* @param options The additional configurable {@link RecognizePiiEntitiesOptions options} that may be passed when
* recognizing PII entities.
*
* @return A {@link RecognizePiiEntitiesResultCollection}.
*
* @throws NullPointerException if {@code documents} is null.
* @throws IllegalArgumentException if {@code documents} is empty.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public RecognizePiiEntitiesResultCollection recognizePiiEntitiesBatch(
Iterable<String> documents, String language, RecognizePiiEntitiesOptions options) {
return client.recognizePiiEntitiesBatch(documents, language, options).block();
}
/**
* Returns a list of Personally Identifiable Information(PII) entities for the provided list of
* {@link TextDocumentInput document} with provided request options.
*
* <p><strong>Code Sample</strong></p>
* <p>Recognizes the PII entities details with http response in a list of {@link TextDocumentInput document}
* with provided request options.</p>
*
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.recognizePiiEntitiesBatch#Iterable-RecognizePiiEntitiesOptions-Context}
*
* @param documents A list of {@link TextDocumentInput documents} to recognize PII entities for.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https://docs.microsoft.com/azure/cognitive-services/text-analytics/overview#data-limits">data limits</a>.
* @param options The additional configurable {@link RecognizePiiEntitiesOptions options} that may be passed when
* recognizing PII entities.
* @param context Additional context that is passed through the Http pipeline during the service call.
*
* @return A {@link Response} that contains a {@link RecognizePiiEntitiesResultCollection}.
*
* @throws NullPointerException if {@code documents} is null.
* @throws IllegalArgumentException if {@code documents} is empty.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<RecognizePiiEntitiesResultCollection> recognizePiiEntitiesBatchWithResponse(
Iterable<TextDocumentInput> documents, RecognizePiiEntitiesOptions options, Context context) {
return client.recognizePiiEntityAsyncClient.recognizePiiEntitiesBatchWithContext(documents, options,
context).block();
}
// Linked Entities
/**
* Returns a list of recognized entities with links to a well-known knowledge base for the provided document.
* See <a href="https://aka.ms/talangs">this</a> for supported languages in Text Analytics API.
*
* This method will use the default language that can be set by using method
* {@link TextAnalyticsClientBuilder#defaultLanguage(String)}. If none is specified, service will use 'en' as
* the language.
*
* <p><strong>Code Sample</strong></p>
* <p>Recognize the linked entities of documents</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.recognizeLinkedEntities#String}
*
* @param document The document to recognize linked entities for.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https://docs.microsoft.com/azure/cognitive-services/text-analytics/overview#data-limits">data limits</a>.
*
* @return A {@link LinkedEntityCollection} contains a list of {@link LinkedEntity recognized linked entities}.
*
* @throws NullPointerException if {@code document} is null.
* @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public LinkedEntityCollection recognizeLinkedEntities(String document) {
return recognizeLinkedEntities(document, client.getDefaultLanguage());
}
/**
* Returns a list of recognized entities with links to a well-known knowledge base for the provided document with
* language code.
*
* See <a href="https://aka.ms/talangs">this</a> for supported languages in Text Analytics API.
*
* <p><strong>Code Sample</strong></p>
* <p>Recognizes the linked entities in a document with a provided language code.</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.recognizeLinkedEntities#String-String}
*
* @param document The document to recognize linked entities for.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https://docs.microsoft.com/azure/cognitive-services/text-analytics/overview#data-limits">data limits</a>.
* @param language The 2 letter ISO 639-1 representation of language for the document. If not set, uses "en" for
* English as default.
*
* @return A {@link LinkedEntityCollection} contains a list of {@link LinkedEntity recognized linked entities}.
*
* @throws NullPointerException if {@code document} is null.
* @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public LinkedEntityCollection recognizeLinkedEntities(String document, String language) {
Objects.requireNonNull(document, "'document' cannot be null.");
return client.recognizeLinkedEntities(document, language).block();
}
/**
* Returns a list of recognized entities with links to a well-known knowledge base for the list of documents with
* provided language code and request options.
*
* See <a href="https://aka.ms/talangs">this</a> for supported languages in Text Analytics API.
*
* <p><strong>Code Sample</strong></p>
* <p>Recognizes the linked entities in a list of documents with a provided language code and request options.
* </p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.recognizeLinkedEntitiesBatch#Iterable-String-TextAnalyticsRequestOptions}
*
* @param documents A list of documents to recognize linked entities for.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https://docs.microsoft.com/azure/cognitive-services/text-analytics/overview#data-limits">data limits</a>.
* @param language The 2 letter ISO 639-1 representation of language for the documents. If not set, uses "en" for
* English as default.
* @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents
* and show statistics.
*
* @return A {@link RecognizeLinkedEntitiesResultCollection}.
*
* @throws NullPointerException if {@code documents} is null.
* @throws IllegalArgumentException if {@code documents} is empty.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public RecognizeLinkedEntitiesResultCollection recognizeLinkedEntitiesBatch(
Iterable<String> documents, String language, TextAnalyticsRequestOptions options) {
inputDocumentsValidation(documents);
return client.recognizeLinkedEntitiesBatch(documents, language, options).block();
}
/**
* Returns a list of recognized entities with links to a well-known knowledge base for the list of
* {@link TextDocumentInput document} and request options.
*
* See <a href="https://aka.ms/talangs">this</a> for supported languages in Text Analytics API.
*
* <p><strong>Code Sample</strong></p>
* <p>Recognizes the linked entities with http response in a list of {@link TextDocumentInput} with request options.
* </p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.recognizeLinkedEntitiesBatch#Iterable-TextAnalyticsRequestOptions-Context}
*
* @param documents A list of {@link TextDocumentInput documents} to recognize linked entities for.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https://docs.microsoft.com/azure/cognitive-services/text-analytics/overview#data-limits">data limits</a>.
* @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents
* and show statistics.
* @param context Additional context that is passed through the Http pipeline during the service call.
*
* @return A {@link Response} that contains a {@link RecognizeLinkedEntitiesResultCollection}.
*
* @throws NullPointerException if {@code documents} is null.
* @throws IllegalArgumentException if {@code documents} is empty.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<RecognizeLinkedEntitiesResultCollection> recognizeLinkedEntitiesBatchWithResponse(
Iterable<TextDocumentInput> documents, TextAnalyticsRequestOptions options, Context context) {
inputDocumentsValidation(documents);
final RecognizeLinkedEntitiesOptions recognizeLinkedEntitiesOptions = new RecognizeLinkedEntitiesOptions();
String modelVersion = null;
boolean includeStatistics = false;
if (options != null) {
modelVersion = options.getModelVersion();
includeStatistics = options.isIncludeStatistics();
}
return client.recognizeLinkedEntityAsyncClient.recognizeLinkedEntitiesBatchWithContext(documents,
recognizeLinkedEntitiesOptions.setModelVersion(modelVersion).setIncludeStatistics(includeStatistics),
context).block();
}
/**
* Returns a list of recognized entities with links to a well-known knowledge base for the list of
* {@link TextDocumentInput document} and request options.
*
* See <a href="https://aka.ms/talangs">this</a> for supported languages in Text Analytics API.
*
* <p><strong>Code Sample</strong></p>
* <p>Recognizes the linked entities with http response in a list of {@link TextDocumentInput} with request options.
* </p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.recognizeLinkedEntitiesBatch#Iterable-RecognizeLinkedEntitiesOptions-Context}
*
* @param documents A list of {@link TextDocumentInput documents} to recognize linked entities for.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https://docs.microsoft.com/azure/cognitive-services/text-analytics/overview#data-limits">data limits</a>.
* @param options The {@link RecognizeLinkedEntitiesOptions options} to configure the scoring model for documents
* , and show statistics.
* @param context Additional context that is passed through the Http pipeline during the service call.
*
* @return A {@link Response} that contains a {@link RecognizeLinkedEntitiesResultCollection}.
*
* @throws NullPointerException if {@code documents} is null.
* @throws IllegalArgumentException if {@code documents} is empty.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<RecognizeLinkedEntitiesResultCollection> recognizeLinkedEntitiesBatchWithResponse(
Iterable<TextDocumentInput> documents, RecognizeLinkedEntitiesOptions options, Context context) {
inputDocumentsValidation(documents);
return client.recognizeLinkedEntityAsyncClient.recognizeLinkedEntitiesBatchWithContext(
documents, options, context).block();
}
// Key Phrase
/**
* Returns a list of strings denoting the key phrases in the document.
*
* This method will use the default language that can be set by using method
* {@link TextAnalyticsClientBuilder#defaultLanguage(String)}. If none is specified, service will use 'en' as
* the language.
*
* <p><strong>Code Sample</strong></p>
* <p>Extracts key phrases of documents</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.extractKeyPhrases#String}
*
* @param document The document to be analyzed.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https://docs.microsoft.com/azure/cognitive-services/text-analytics/overview#data-limits">data limits</a>.
*
* @return A {@link KeyPhrasesCollection} contains a list of extracted key phrases.
*
* @throws NullPointerException if {@code document} is null.
* @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public KeyPhrasesCollection extractKeyPhrases(String document) {
return extractKeyPhrases(document, client.getDefaultLanguage());
}
/**
* Returns a list of strings denoting the key phrases in the document.
* See <a href="https://aka.ms/talangs">this</a> for the list of enabled languages.
*
* <p><strong>Code Sample</strong></p>
* <p>Extracts key phrases in a document with a provided language representation.</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.extractKeyPhrases#String-String-Context}
*
* @param document The document to be analyzed.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https://docs.microsoft.com/azure/cognitive-services/text-analytics/overview#data-limits">data limits</a>.
* @param language The 2 letter ISO 639-1 representation of language for the document. If not set, uses "en" for
* English as default.
*
* @return A {@link KeyPhrasesCollection} contains a list of extracted key phrases.
*
* @throws NullPointerException if {@code document} is null.
* @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public KeyPhrasesCollection extractKeyPhrases(String document, String language) {
Objects.requireNonNull(document, "'document' cannot be null.");
return client.extractKeyPhrases(document, language).block();
}
/**
* Returns a list of strings denoting the key phrases in the documents with provided language code and
* request options.
*
* See <a href="https://aka.ms/talangs">this</a> for the list of enabled languages.
*
* <p><strong>Code Sample</strong></p>
* <p>Extracts key phrases in a list of documents with a provided language code and request options.</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.extractKeyPhrasesBatch#Iterable-String-TextAnalyticsRequestOptions}
*
* @param documents A list of documents to be analyzed.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https://docs.microsoft.com/azure/cognitive-services/text-analytics/overview#data-limits">data limits</a>.
* @param language The 2 letter ISO 639-1 representation of language for the documents. If not set, uses "en" for
* English as default.
* @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents
* and show statistics.
*
* @return A {@link ExtractKeyPhrasesResultCollection}.
*
* @throws NullPointerException if {@code documents} is null.
* @throws IllegalArgumentException if {@code documents} is empty.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public ExtractKeyPhrasesResultCollection extractKeyPhrasesBatch(
Iterable<String> documents, String language, TextAnalyticsRequestOptions options) {
inputDocumentsValidation(documents);
return client.extractKeyPhrasesBatch(documents, language, options).block();
}
/**
* Returns a list of strings denoting the key phrases in the a batch of {@link TextDocumentInput document} with
* request options.
*
* See <a href="https://aka.ms/talangs">this</a> for the list of enabled languages.
*
* <p><strong>Code Sample</strong></p>
* <p>Extracts key phrases with http response in a list of {@link TextDocumentInput} with request options.</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.extractKeyPhrasesBatch#Iterable-TextAnalyticsRequestOptions-Context}
*
* @param documents A list of {@link TextDocumentInput documents} to be analyzed.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https://docs.microsoft.com/azure/cognitive-services/text-analytics/overview#data-limits">data limits</a>.
* @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents
* and show statistics.
* @param context Additional context that is passed through the Http pipeline during the service call.
*
* @return A {@link Response} that contains a {@link ExtractKeyPhrasesResultCollection}.
*
* @throws NullPointerException if {@code documents} is null.
* @throws IllegalArgumentException if {@code documents} is empty.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<ExtractKeyPhrasesResultCollection> extractKeyPhrasesBatchWithResponse(
Iterable<TextDocumentInput> documents, TextAnalyticsRequestOptions options, Context context) {
inputDocumentsValidation(documents);
return client.extractKeyPhraseAsyncClient.extractKeyPhrasesBatchWithContext(documents, options, context)
.block();
}
// Sentiment
/**
* Returns a sentiment prediction, as well as confidence scores for each sentiment label
* (Positive, Negative, and Neutral) for the document and each sentence within it.
*
* This method will use the default language that can be set by using method
* {@link TextAnalyticsClientBuilder#defaultLanguage(String)}. If none is specified, service will use 'en' as
* the language.
*
* <p><strong>Code Sample</strong></p>
* <p>Analyze the sentiments of documents</p>
*
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.analyzeSentiment#String}
*
* @param document The document to be analyzed.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https://docs.microsoft.com/azure/cognitive-services/text-analytics/overview#data-limits">data limits</a>.
*
* @return A {@link DocumentSentiment analyzed document sentiment} of the document.
*
* @throws NullPointerException if {@code document} is null.
* @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public DocumentSentiment analyzeSentiment(String document) {
return analyzeSentiment(document, client.getDefaultLanguage());
}
/**
* Returns a sentiment prediction, as well as confidence scores for each sentiment label
* (Positive, Negative, and Neutral) for the document and each sentence within it.
*
* <p><strong>Code Sample</strong></p>
* <p>Analyze the sentiments in a document with a provided language representation.</p>
*
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.analyzeSentiment#String-String}
*
* @param document The document to be analyzed.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https://docs.microsoft.com/azure/cognitive-services/text-analytics/overview#data-limits">data limits</a>.
* @param language The 2 letter ISO 639-1 representation of language for the document. If not set, uses "en" for
* English as default.
*
* @return A {@link DocumentSentiment analyzed document sentiment} of the document.
*
* @throws NullPointerException if {@code document} is null.
* @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public DocumentSentiment analyzeSentiment(String document, String language) {
return client.analyzeSentiment(document, language).block();
}
/**
* Returns a sentiment prediction, as well as confidence scores for each sentiment label (Positive, Negative, and
* Neutral) for the document and each sentence within it. If the {@code includeOpinionMining} of
* {@link AnalyzeSentimentOptions} set to true, the output will include the opinion mining results. It mines the
* opinions of a sentence and conducts more granular analysis around the aspects in the text
* (also known as aspect-based sentiment analysis).
*
* <p><strong>Code Sample</strong></p>
* <p>Analyze the sentiment and mine the opinions for each sentence in a document with a provided language
* representation and {@link AnalyzeSentimentOptions} options.</p>
*
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.analyzeSentiment#String-String-AnalyzeSentimentOptions}
*
* @param document The document to be analyzed.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https://docs.microsoft.com/azure/cognitive-services/text-analytics/overview#data-limits">data limits</a>.
* @param language The 2 letter ISO 639-1 representation of language for the document. If not set, uses "en" for
* English as default.
* @param options The additional configurable {@link AnalyzeSentimentOptions options} that may be passed when
* analyzing sentiments.
*
* @return A {@link DocumentSentiment analyzed document sentiment} of the document.
*
* @throws NullPointerException if {@code document} is null.
* @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public DocumentSentiment analyzeSentiment(String document, String language, AnalyzeSentimentOptions options) {
return client.analyzeSentiment(document, language, options).block();
}
/**
* Returns a sentiment prediction, as well as confidence scores for each sentiment label
* (Positive, Negative, and Neutral) for the document and each sentence within it.
*
* <p><strong>Code Sample</strong></p>
* <p>Analyze the sentiments in a list of documents with a provided language representation and request options.</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.analyzeSentimentBatch#Iterable-String-TextAnalyticsRequestOptions}
*
* @param documents A list of documents to be analyzed.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https://docs.microsoft.com/azure/cognitive-services/text-analytics/overview#data-limits">data limits</a>.
* @param language The 2 letter ISO 639-1 representation of language for the documents. If not set, uses "en" for
* English as default.
* @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents
* and show statistics.
*
* @return A {@link AnalyzeSentimentResultCollection}.
*
* @throws NullPointerException if {@code documents} is null.
* @throws IllegalArgumentException if {@code documents} is empty.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public AnalyzeSentimentResultCollection analyzeSentimentBatch(
Iterable<String> documents, String language, TextAnalyticsRequestOptions options) {
return client.analyzeSentimentBatch(documents, language, options).block();
}
/**
* Returns a sentiment prediction, as well as confidence scores for each sentiment label (Positive, Negative, and
* Neutral) for the document and each sentence within it. If the {@code includeOpinionMining} of
* {@link AnalyzeSentimentOptions} set to true, the output will include the opinion mining results. It mines the
* opinions of a sentence and conducts more granular analysis around the aspects in the text
* (also known as aspect-based sentiment analysis).
*
* <p><strong>Code Sample</strong></p>
* <p>Analyze the sentiments and mine the opinions for each sentence in a list of documents with a provided language
* representation and {@link AnalyzeSentimentOptions} options.</p>
*
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.analyzeSentimentBatch#Iterable-String-AnalyzeSentimentOptions}
*
* @param documents A list of documents to be analyzed.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https://docs.microsoft.com/azure/cognitive-services/text-analytics/overview#data-limits">data limits</a>.
* @param language The 2 letter ISO 639-1 representation of language for the documents. If not set, uses "en" for
* English as default.
* @param options The additional configurable {@link AnalyzeSentimentOptions options} that may be passed when
* analyzing sentiments.
*
* @return A {@link AnalyzeSentimentResultCollection}.
*
* @throws NullPointerException if {@code documents} is null.
* @throws IllegalArgumentException if {@code documents} is empty.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public AnalyzeSentimentResultCollection analyzeSentimentBatch(Iterable<String> documents,
String language, AnalyzeSentimentOptions options) {
return client.analyzeSentimentBatch(documents, language, options).block();
}
/**
* Returns a sentiment prediction, as well as confidence scores for each sentiment label
* (Positive, Negative, and Neutral) for the document and each sentence within it.
*
* <p><strong>Code Sample</strong></p>
* <p>Analyze sentiment in a list of {@link TextDocumentInput document} with provided request options.</p>
*
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.analyzeSentimentBatch#Iterable-TextAnalyticsRequestOptions-Context}
*
* @param documents A list of {@link TextDocumentInput documents} to be analyzed.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https://docs.microsoft.com/azure/cognitive-services/text-analytics/overview#data-limits">data limits</a>.
* @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents
* and show statistics.
* @param context Additional context that is passed through the Http pipeline during the service call.
*
* @return A {@link Response} that contains a {@link AnalyzeSentimentResultCollection}.
*
* @throws NullPointerException if {@code documents} is null.
* @throws IllegalArgumentException if {@code documents} is empty.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<AnalyzeSentimentResultCollection> analyzeSentimentBatchWithResponse(
Iterable<TextDocumentInput> documents, TextAnalyticsRequestOptions options, Context context) {
return client.analyzeSentimentAsyncClient.analyzeSentimentBatchWithContext(documents,
new AnalyzeSentimentOptions()
.setIncludeStatistics(options == null ? false : options.isIncludeStatistics())
.setModelVersion(options == null ? null : options.getModelVersion()), context).block();
}
/**
* Returns a sentiment prediction, as well as confidence scores for each sentiment label (Positive, Negative, and
* Neutral) for the document and each sentence within it. If the {@code includeOpinionMining} of
* {@link AnalyzeSentimentOptions} set to true, the output will include the opinion mining results. It mines the
* opinions of a sentence and conducts more granular analysis around the aspects in the text
* (also known as aspect-based sentiment analysis).
*
* <p><strong>Code Sample</strong></p>
* <p>Analyze sentiment and mine the opinions for each sentence in a list of
* {@link TextDocumentInput document} with provided {@link AnalyzeSentimentOptions} options.</p>
*
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.analyzeSentimentBatch#Iterable-AnalyzeSentimentOptions-Context}
*
* @param documents A list of {@link TextDocumentInput documents} to be analyzed.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https://docs.microsoft.com/azure/cognitive-services/text-analytics/overview#data-limits">data limits</a>.
* @param options The additional configurable {@link AnalyzeSentimentOptions options} that may be passed when
* analyzing sentiments.
* @param context Additional context that is passed through the Http pipeline during the service call.
*
* @return A {@link Response} that contains a {@link AnalyzeSentimentResultCollection}.
*
* @throws NullPointerException if {@code documents} is null.
* @throws IllegalArgumentException if {@code documents} is empty.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<AnalyzeSentimentResultCollection> analyzeSentimentBatchWithResponse(
Iterable<TextDocumentInput> documents, AnalyzeSentimentOptions options, Context context) {
return client.analyzeSentimentAsyncClient.analyzeSentimentBatchWithContext(documents, options, context).block();
}
/**
* Analyze healthcare entities, entity data sources, and entity relations in a list of
* {@link String documents} with provided request options.
*
* Note: In order to use this functionality, request to access public preview is required.
* Azure Active Directory (AAD) is not currently supported. For more information see
* <a href="https://docs.microsoft.com/azure/cognitive-services/text-analytics/how-tos/text-analytics-for-health?tabs=ner#request-access-to-the-public-preview">this</a>.
*
* See <a href="https://aka.ms/talangs">this</a> supported languages in Text Analytics API.
*
* @param documents A list of documents to be analyzed.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https://docs.microsoft.com/azure/cognitive-services/text-analytics/overview#data-limits">data limits</a>.
* @param language The 2 letter ISO 639-1 representation of language for the documents. If not set, uses "en" for
* English as default.
* @param options The additional configurable {@link AnalyzeHealthcareEntitiesOptions options} that may be passed
* when analyzing healthcare entities.
* @return A {@link SyncPoller} that polls the analyze healthcare operation until it has completed, has failed,
* or has been cancelled. The completed operation returns a {@link PagedIterable} of
* {@link AnalyzeHealthcareEntitiesResultCollection}.
*
* @throws NullPointerException if {@code documents} is null.
* @throws IllegalArgumentException if {@code documents} is empty.
* @throws TextAnalyticsException If analyze operation fails.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public SyncPoller<AnalyzeHealthcareEntitiesOperationDetail, PagedIterable<AnalyzeHealthcareEntitiesResultCollection>>
beginAnalyzeHealthcareEntities(Iterable<String> documents, String language,
AnalyzeHealthcareEntitiesOptions options) {
return beginAnalyzeHealthcareEntities(
mapByIndex(documents, (index, value) -> {
final TextDocumentInput textDocumentInput = new TextDocumentInput(index, value);
textDocumentInput.setLanguage(language);
return textDocumentInput;
}), options, Context.NONE);
}
/**
* Analyze healthcare entities, entity data sources, and entity relations in a list of
* {@link TextDocumentInput documents} with provided request options.
*
* Note: In order to use this functionality, request to access public preview is required.
* Azure Active Directory (AAD) is not currently supported. For more information see
* <a href="https://docs.microsoft.com/azure/cognitive-services/text-analytics/how-tos/text-analytics-for-health?tabs=ner#request-access-to-the-public-preview">this</a>.
*
* See <a href="https://aka.ms/talangs">this</a> supported languages in Text Analytics API.
*
* <p><strong>Code Sample</strong></p>
* <p>Analyze healthcare entities, entity data sources, and entity relations in a list of
* {@link TextDocumentInput document} and provided request options to
* show statistics.</p>
*
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.beginAnalyzeHealthcareEntities#Iterable-AnalyzeHealthcareEntitiesOptions-Context}
*
* @param documents A list of {@link TextDocumentInput documents} to be analyzed.
* @param options The additional configurable {@link AnalyzeHealthcareEntitiesOptions options} that may be passed
* when analyzing healthcare entities.
* @param context Additional context that is passed through the Http pipeline during the service call.
*
* @return A {@link SyncPoller} that polls the analyze healthcare operation until it has completed, has failed,
* or has been cancelled. The completed operation returns a {@link PagedIterable} of
* {@link AnalyzeHealthcareEntitiesResultCollection}.
*
* @throws NullPointerException if {@code documents} is null.
* @throws IllegalArgumentException if {@code documents} is empty.
* @throws TextAnalyticsException If analyze operation fails.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public SyncPoller<AnalyzeHealthcareEntitiesOperationDetail,
PagedIterable<AnalyzeHealthcareEntitiesResultCollection>>
beginAnalyzeHealthcareEntities(Iterable<TextDocumentInput> documents, AnalyzeHealthcareEntitiesOptions options,
Context context) {
return client.analyzeHealthcareEntityAsyncClient.beginAnalyzeHealthcarePagedIterable(documents, options,
context).getSyncPoller();
}
/**
* Execute actions, such as, entities recognition, PII entities recognition and key phrases extraction for a list of
* {@link String documents} with provided request options.
*
* See <a href="https://aka.ms/talangs">this</a> supported languages in Text Analytics API.
*
* <p><strong>Code Sample</strong></p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.beginAnalyzeActions#Iterable-TextAnalyticsActions-String-AnalyzeActionsOptions}
*
* @param documents A list of documents to be analyzed.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https://docs.microsoft.com/azure/cognitive-services/text-analytics/overview#data-limits">data limits</a>.
* @param actions The {@link TextAnalyticsActions actions} that contains all actions to be executed.
* An action is one task of execution, such as a single task of 'Key Phrases Extraction' on the given document
* inputs.
* @param language The 2 letter ISO 639-1 representation of language for the documents. If not set, uses "en" for
* English as default.
* @param options The additional configurable {@link AnalyzeActionsOptions options} that may be passed when
* analyzing a collection of actions.
*
* @return A {@link SyncPoller} that polls the analyze a collection of actions operation until it has completed,
* has failed, or has been cancelled. The completed operation returns a {@link PagedIterable} of
* {@link AnalyzeActionsResult}.
*
* @throws NullPointerException if {@code documents} is null.
* @throws IllegalArgumentException if {@code documents} is empty.
* @throws TextAnalyticsException If analyze operation fails.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public SyncPoller<AnalyzeActionsOperationDetail, PagedIterable<AnalyzeActionsResult>> beginAnalyzeActions(
Iterable<String> documents, TextAnalyticsActions actions, String language, AnalyzeActionsOptions options) {
return client.analyzeActionsAsyncClient.beginAnalyzeActionsIterable(
mapByIndex(documents, (index, value) -> {
final TextDocumentInput textDocumentInput = new TextDocumentInput(index, value);
textDocumentInput.setLanguage(language);
return textDocumentInput;
}), actions, options, Context.NONE).getSyncPoller();
}
/**
* Execute actions, such as, entities recognition, PII entities recognition and key phrases extraction for a list of
* {@link TextDocumentInput documents} with provided request options.
*
* See <a href="https://aka.ms/talangs">this</a> supported languages in Text Analytics API.
*
* <p><strong>Code Sample</strong></p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.beginAnalyzeActions#Iterable-TextAnalyticsActions-AnalyzeActionsOptions-Context}
*
* @param documents A list of {@link TextDocumentInput documents} to be analyzed.
* @param actions The {@link TextAnalyticsActions actions} that contains all actions to be executed.
* An action is one task of execution, such as a single task of 'Key Phrases Extraction' on the given document
* inputs.
* @param options The additional configurable {@link AnalyzeActionsOptions options} that may be passed when
* analyzing a collection of actions.
* @param context Additional context that is passed through the Http pipeline during the service call.
*
* @return A {@link SyncPoller} that polls the analyze a collection of actions operation until it has completed,
* has failed, or has been cancelled. The completed operation returns a {@link PagedIterable} of
* {@link AnalyzeActionsResult}.
*
* @throws NullPointerException if {@code documents} is null.
* @throws IllegalArgumentException if {@code documents} is empty.
* @throws TextAnalyticsException If analyze operation fails.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public SyncPoller<AnalyzeActionsOperationDetail, PagedIterable<AnalyzeActionsResult>> beginAnalyzeActions(
Iterable<TextDocumentInput> documents, TextAnalyticsActions actions, AnalyzeActionsOptions options,
Context context) {
return client.analyzeActionsAsyncClient.beginAnalyzeActionsIterable(documents, actions, options, context)
.getSyncPoller();
}
}
|
ramaneswaran/mantisshrimp
|
tests/models/mantis_rcnn/test_rcnn_build_training_sample.py
|
<gh_stars>0
import pytest
from mantisshrimp import *
from mantisshrimp.imports import first, torch, tensor, Tensor
_fake_box = [0, 1, 2, 3]
@pytest.fixture(scope="module")
def data_sample(record):
return default_prepare_record(record)
def test_build_training_sample_maskrcnn(data_sample):
x, y = MantisMaskRCNN.build_training_sample(**data_sample)
assert x.dtype == torch.float32
assert x.shape == (3, 427, 640)
assert isinstance(y, dict)
assert set(y.keys()) == {"image_id", "labels", "boxes", "masks"}
assert y["image_id"].dtype == torch.int64
assert y["image_id"] == 0
assert y["labels"].dtype == torch.int64
assert y["labels"].shape == (16,)
assert (
y["labels"] == tensor([6, 1, 1, 1, 1, 1, 1, 1, 31, 31, 1, 3, 31, 1, 31, 31])
).all()
assert y["boxes"].dtype == torch.float32
assert y["boxes"].dtype == torch.float32
assert y["boxes"].shape == (16, 4)
assert y["masks"].dtype == torch.uint8
assert y["masks"].shape == (16, 427, 640)
def test_build_training_sample_fasterrcnn(data_sample):
x, y = MantisFasterRCNN.build_training_sample(**data_sample)
assert x.dtype == torch.float32
assert x.shape == (3, 427, 640)
assert isinstance(y, dict)
assert set(y.keys()) == {"image_id", "labels", "boxes"}
assert y["image_id"].dtype == torch.int64
assert y["image_id"] == 0
assert y["labels"].dtype == torch.int64
assert y["labels"].shape == (16,)
assert (
y["labels"] == tensor([6, 1, 1, 1, 1, 1, 1, 1, 31, 31, 1, 3, 31, 1, 31, 31])
).all()
assert y["boxes"].dtype == torch.float32
assert y["boxes"].shape == (16, 4)
def test_rcnn_empty_training_sample(data_sample):
data_sample["bboxes"] = []
data_sample["labels"] = []
x, y = MantisMaskRCNN.build_training_sample(**data_sample)
assert (y["boxes"] == tensor([_fake_box], dtype=torch.float)).all()
assert y["labels"] == tensor([0])
|
alixedi/data-hub-api-cd-poc
|
datahub/omis/order/views.py
|
<gh_stars>0
from django.http import Http404
from oauth2_provider.contrib.rest_framework.permissions import IsAuthenticatedOrTokenHasScope
from rest_framework import status
from rest_framework.decorators import action
from rest_framework.response import Response
from rest_framework.views import APIView
from datahub.core.schemas import StubSchema
from datahub.core.viewsets import CoreViewSet
from datahub.oauth.scopes import Scope
from datahub.omis.order.models import Order, OrderAssignee, OrderSubscriber
from datahub.omis.order.serializers import (
CancelOrderSerializer,
CompleteOrderSerializer,
OrderAssigneeSerializer,
OrderSerializer,
PublicOrderSerializer,
SubscribedAdviserSerializer,
)
class OrderViewSet(CoreViewSet):
"""Order ViewSet"""
required_scopes = (Scope.internal_front_end,)
serializer_class = OrderSerializer
queryset = Order.objects.select_related(
'company',
'contact',
'primary_market',
)
@action(methods=['post'], detail=True, schema=StubSchema())
def complete(self, request, *args, **kwargs):
"""Complete an order."""
instance = self.get_object()
serializer = CompleteOrderSerializer(
instance,
data={},
context=self.get_serializer_context(),
)
serializer.is_valid(raise_exception=True)
instance = serializer.complete()
return Response(
self.get_serializer(instance=instance).data,
status=status.HTTP_200_OK,
)
@action(methods=['post'], detail=True, schema=StubSchema())
def cancel(self, request, *args, **kwargs):
"""Cancel an order."""
instance = self.get_object()
serializer = CancelOrderSerializer(
instance,
data=request.data,
context=self.get_serializer_context(),
)
serializer.is_valid(raise_exception=True)
instance = serializer.cancel()
return Response(
self.get_serializer(instance=instance).data,
status=status.HTTP_200_OK,
)
def get_serializer_context(self):
"""Extra context provided to the serializer class."""
return {
**super().get_serializer_context(),
'current_user': self.request.user if self.request else None,
}
class PublicOrderViewSet(CoreViewSet):
"""ViewSet for public facing order endpoint."""
lookup_field = 'public_token'
permission_classes = (IsAuthenticatedOrTokenHasScope,)
required_scopes = (Scope.public_omis_front_end,)
serializer_class = PublicOrderSerializer
queryset = Order.objects.publicly_accessible(
include_reopened=True,
).select_related(
'company',
'contact',
)
class SubscriberListView(APIView):
"""API View for advisers subscribed to an order."""
# queryset is used only by DjangoCrudPermissions (to get the model for the view)
queryset = OrderSubscriber.objects.all()
required_scopes = (Scope.internal_front_end,)
def get_order(self, order_pk):
"""
Returns the order related to the subscriber list or
raises Http404 if it doesn't exist.
"""
try:
return Order.objects.get(pk=order_pk)
except Order.DoesNotExist:
raise Http404
def get_list_response(self, order):
"""
Returns a Response object with the serialised list of advisers subscribed to
the order.
"""
advisers = (sub.adviser for sub in order.subscribers.select_related('adviser').all())
serializer = SubscribedAdviserSerializer(advisers, many=True)
return Response(serializer.data)
def get(self, request, order_pk, format=None):
"""
Returns a serialised list of advisers subscribed to the order.
"""
order = self.get_order(order_pk)
return self.get_list_response(order)
def put(self, request, order_pk, format=None):
"""
Updates a subscriber list.
It adds/keeps/deletes the advisers based on the new list passed in.
"""
order = self.get_order(order_pk)
serializer = SubscribedAdviserSerializer(
data=request.data,
many=True,
context={
'order': order,
'modified_by': self.request.user,
},
)
serializer.is_valid(raise_exception=True)
serializer.save()
return self.get_list_response(order)
class AssigneeView(APIView):
"""API View for advisers assigned to an order."""
# queryset is used only by DjangoCrudPermissions (to get the model for the view)
queryset = OrderAssignee.objects.all()
FORCE_DELETE_PARAM = 'force-delete'
required_scopes = (Scope.internal_front_end,)
def get_order(self, order_pk):
"""
Returns the related order or raises Http404 if it doesn't exist.
"""
try:
return Order.objects.get(pk=order_pk)
except Order.DoesNotExist:
raise Http404
def get_list_response(self, order):
"""
Returns a Response object with the serialised list of advisers assigned to
the order.
"""
advisers = order.assignees.select_related('adviser').all()
serializer = OrderAssigneeSerializer(advisers, many=True)
return Response(serializer.data)
def get(self, request, order_pk, format=None):
"""
Returns a serialised list of advisers assigned to the order.
"""
order = self.get_order(order_pk)
return self.get_list_response(order)
def patch(self, request, order_pk, format=None):
"""
Updates the list of assignees.
It adds/keeps/updates/deletes the advisers based on the new list passed in.
"""
order = self.get_order(order_pk)
force_delete = request.query_params.get(self.FORCE_DELETE_PARAM, '0').strip() == '1'
serializer = OrderAssigneeSerializer(
many=True,
data=request.data,
context={
'order': order,
'modified_by': self.request.user,
'force_delete': force_delete,
},
)
serializer.is_valid(raise_exception=True)
serializer.save()
return self.get_list_response(order)
class BaseNestedOrderViewSet(CoreViewSet):
"""
Base class for nested viewsets with order as parent
E.g. /order/<order-id>/<child>
"""
serializer_class = None
order_lookup_field = 'pk'
order_lookup_url_kwarg = 'order_pk'
order_queryset = Order.objects
def get_order(self):
"""
:returns: the main order from url kwargs (or None if it a matching order is not found).
"""
try:
return self.order_queryset.get(
**{self.order_lookup_field: self.kwargs[self.order_lookup_url_kwarg]},
)
except (Order.DoesNotExist, KeyError):
return None
def get_order_or_404(self):
"""
:returns: the main order from url kwargs.
:raises Http404: if the order doesn't exist
"""
order = self.get_order()
if not order:
raise Http404('The specified order does not exist.')
return order
def initial(self, request, *args, **kwargs):
"""
Makes sure that the order_pk in the URL path refers to an existent order.
:raises Http404: if a matching order cannot be found
"""
super().initial(request, *args, **kwargs)
self.get_order_or_404()
def get_serializer_context(self):
"""
Extra context provided to the serializer class.
Note: The DRF built-in docs feature will call this function with an empty dict in
self.kwargs. The function should not fail in this case.
"""
return {
**super().get_serializer_context(),
'order': self.get_order(),
'current_user': self.request.user if self.request else None,
}
|
hanya/aobook-haiku
|
src/style.h
|
<filename>src/style.h
/*$
Copyright (c) 2014-2017, Azel
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of the <organization> nor the
names of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
$*/
/********************************
* スタイルデータ
********************************/
#ifndef STYLE_H
#define STYLE_H
#include "mStr.h"
#include "aoStyle.h"
typedef struct _aoFont aoFont;
typedef struct _StyleData
{
AO_STYLE b;
mStr strName,
strFontText,
strFontBold,
strFontRuby,
strFontInfo,
strBkgndFile;
mRgbCol colBkgnd; //背景色
int bkgnd_imgtype;
//
aoFont *fontText,
*fontBold,
*fontRuby,
*fontHalf,
*fontInfo;
mImageBuf *imgBkgnd;
}StyleData;
void StyleFree(StyleData *p);
void StyleFreeBuf(StyleData *p);
void StyleFreeData(StyleData *p);
void StyleFreeFont(StyleData *p);
void StyleFreeAO(StyleData *p);
StyleData *StyleAlloc();
void StyleGetFilePath(mStr *str,const char *name);
void StyleLoadConf(StyleData *p,const char *name);
void StyleSaveConf(StyleData *p);
void StyleInit(StyleData *p);
void StyleSetDefault(StyleData *p);
void StyleCopyData(StyleData *dst,StyleData *src);
#ifdef __cplusplus
extern "C" {
#endif
mBool StyleChange(StyleData *pd,StyleData *ps);
mBool StyleChangeByName(const char *name);
void StyleCreateFont(StyleData *p);
void StyleSetUTF32Text(uint32_t **ppbuf,const char *text);
#ifdef __cplusplus
}
#endif
#endif
|
zealoussnow/chromium
|
third_party/android_crazy_linker/src/src/crazy_linker_elf_symbols.cpp
|
<filename>third_party/android_crazy_linker/src/src/crazy_linker_elf_symbols.cpp
// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "crazy_linker_elf_symbols.h"
#include "crazy_linker_debug.h"
#include "crazy_linker_elf_view.h"
namespace crazy {
ElfSymbols::ElfSymbols(const ELF::Sym* symbol_table,
const char* string_table,
uintptr_t dt_elf_hash,
uintptr_t dt_gnu_hash)
: symbol_table_(symbol_table), string_table_(string_table) {
if (dt_elf_hash)
elf_hash_.Init(dt_elf_hash);
if (dt_gnu_hash)
gnu_hash_.Init(dt_gnu_hash);
}
bool ElfSymbols::IsValid() const {
return (symbol_table_ && string_table_ &&
(gnu_hash_.IsValid() || elf_hash_.IsValid()));
}
bool ElfSymbols::Init(const ElfView* view) {
LOG("Parsing dynamic table");
ElfView::DynamicIterator dyn(view);
for (; dyn.HasNext(); dyn.GetNext()) {
uintptr_t dyn_addr = dyn.GetAddress(view->load_bias());
switch (dyn.GetTag()) {
case DT_HASH:
LOG(" DT_HASH addr=%p", dyn_addr);
elf_hash_.Init(dyn_addr);
break;
case DT_GNU_HASH:
LOG(" DT_GNU_HASH addr=%p", dyn_addr);
gnu_hash_.Init(dyn_addr);
break;
case DT_STRTAB:
LOG(" DT_STRTAB addr=%p", dyn_addr);
string_table_ = reinterpret_cast<const char*>(dyn_addr);
break;
case DT_SYMTAB:
LOG(" DT_SYMTAB addr=%p", dyn_addr);
symbol_table_ = reinterpret_cast<const ELF::Sym*>(dyn_addr);
break;
default:
;
}
}
return IsValid();
}
const ELF::Sym* ElfSymbols::LookupByAddress(void* address,
size_t load_bias) const {
ELF::Addr elf_addr =
reinterpret_cast<ELF::Addr>(address) - static_cast<ELF::Addr>(load_bias);
for (const ELF::Sym& sym : GetDynSymbols()) {
if (sym.st_shndx != SHN_UNDEF && elf_addr >= sym.st_value &&
elf_addr < sym.st_value + sym.st_size) {
return &sym;
}
}
return nullptr;
}
bool ElfSymbols::LookupNearestByAddress(void* address,
size_t load_bias,
const char** sym_name,
void** sym_addr,
size_t* sym_size) const {
ELF::Addr elf_addr =
reinterpret_cast<ELF::Addr>(address) - static_cast<ELF::Addr>(load_bias);
const ELF::Sym* nearest_sym = nullptr;
size_t nearest_diff = ~size_t(0);
for (const ELF::Sym& sym : GetDynSymbols()) {
if (sym.st_shndx == SHN_UNDEF)
continue;
if (elf_addr >= sym.st_value && elf_addr < sym.st_value + sym.st_size) {
// This is a perfect match.
nearest_sym = &sym;
break;
}
// Otherwise, compute distance.
size_t diff;
if (elf_addr < sym.st_value)
diff = sym.st_value - elf_addr;
else
diff = elf_addr - sym.st_value - sym.st_size;
if (diff < nearest_diff) {
nearest_sym = &sym;
nearest_diff = diff;
}
}
if (!nearest_sym)
return false;
*sym_name = string_table_ + nearest_sym->st_name;
*sym_addr = reinterpret_cast<void*>(nearest_sym->st_value + load_bias);
*sym_size = nearest_sym->st_size;
return true;
}
const ELF::Sym* ElfSymbols::LookupByName(const char* symbol_name) const {
const ELF::Sym* sym =
gnu_hash_.IsValid()
? gnu_hash_.LookupByName(symbol_name, symbol_table_, string_table_)
: elf_hash_.LookupByName(symbol_name, symbol_table_, string_table_);
// Ignore undefined symbols or those that are not global or weak definitions.
if (!sym || sym->st_shndx == SHN_UNDEF)
return nullptr;
uint8_t info = ELF_ST_BIND(sym->st_info);
if (info != STB_GLOBAL && info != STB_WEAK)
return nullptr;
return sym;
}
ElfSymbols::DynSymbols ElfSymbols::GetDynSymbols() const {
if (gnu_hash_.IsValid()) {
return {symbol_table_, gnu_hash_.dyn_symbols_offset(),
gnu_hash_.dyn_symbols_count()};
} else {
return {symbol_table_, elf_hash_.dyn_symbols_offset(),
elf_hash_.dyn_symbols_count()};
}
}
} // namespace crazy
|
harshit9715/serverless-stack
|
packages/cli/test/start-demo/src/sns/sub-folder/sns.js
|
exports.handler = async function (event) {
console.log(
`Logging from inside the SNS Lambda with event message: "${event.Records[0].Sns.Message}"`
);
return { status: true };
};
|
GregDevProjects/carbon-header-fix
|
node_modules/@carbon/icons-react/es/chevron--left/20.js
|
import { ChevronLeft20 } from '..';
export default ChevronLeft20;
|
kirikcoin/IPS
|
src/main/java/mobi/eyeline/ips/web/IPSViewHandler.java
|
package mobi.eyeline.ips.web;
import javax.faces.application.ViewHandler;
import javax.faces.application.ViewHandlerWrapper;
import javax.faces.context.FacesContext;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpSession;
import java.util.Locale;
public class IPSViewHandler extends ViewHandlerWrapper {
public final static String LOCALE_PARAMETER = "ips_user_locale";
private final ViewHandler delegate;
public IPSViewHandler(ViewHandler delegate) {
this.delegate = delegate;
}
@Override
public ViewHandler getWrapped() {
return delegate;
}
@Override
public Locale calculateLocale(FacesContext context) {
final FacesContext ctx = FacesContext.getCurrentInstance();
final HttpSession session =
((HttpServletRequest) ctx.getExternalContext().getRequest()).getSession();
final Locale locale = (Locale) session.getAttribute(LOCALE_PARAMETER);
return (locale != null) ? locale : super.calculateLocale(context);
}
}
|
ScalablyTyped/SlinkyTyped
|
v/vso-node-api/src/main/scala/typingsSlinky/vsoNodeApi/anon/ActionRequired.scala
|
package typingsSlinky.vsoNodeApi.anon
import org.scalablytyped.runtime.StObject
import scala.scalajs.js
import scala.scalajs.js.`|`
import scala.scalajs.js.annotation.{JSGlobalScope, JSGlobal, JSImport, JSName, JSBracketAccess}
@js.native
trait ActionRequired extends StObject {
var actionRequired: scala.Double = js.native
var actionResolved: scala.Double = js.native
var disabled: scala.Double = js.native
var enabled: scala.Double = js.native
var installed: scala.Double = js.native
var uninstalled: scala.Double = js.native
var versionUpdated: scala.Double = js.native
}
object ActionRequired {
@scala.inline
def apply(
actionRequired: scala.Double,
actionResolved: scala.Double,
disabled: scala.Double,
enabled: scala.Double,
installed: scala.Double,
uninstalled: scala.Double,
versionUpdated: scala.Double
): ActionRequired = {
val __obj = js.Dynamic.literal(actionRequired = actionRequired.asInstanceOf[js.Any], actionResolved = actionResolved.asInstanceOf[js.Any], disabled = disabled.asInstanceOf[js.Any], enabled = enabled.asInstanceOf[js.Any], installed = installed.asInstanceOf[js.Any], uninstalled = uninstalled.asInstanceOf[js.Any], versionUpdated = versionUpdated.asInstanceOf[js.Any])
__obj.asInstanceOf[ActionRequired]
}
@scala.inline
implicit class ActionRequiredMutableBuilder[Self <: ActionRequired] (val x: Self) extends AnyVal {
@scala.inline
def setActionRequired(value: scala.Double): Self = StObject.set(x, "actionRequired", value.asInstanceOf[js.Any])
@scala.inline
def setActionResolved(value: scala.Double): Self = StObject.set(x, "actionResolved", value.asInstanceOf[js.Any])
@scala.inline
def setDisabled(value: scala.Double): Self = StObject.set(x, "disabled", value.asInstanceOf[js.Any])
@scala.inline
def setEnabled(value: scala.Double): Self = StObject.set(x, "enabled", value.asInstanceOf[js.Any])
@scala.inline
def setInstalled(value: scala.Double): Self = StObject.set(x, "installed", value.asInstanceOf[js.Any])
@scala.inline
def setUninstalled(value: scala.Double): Self = StObject.set(x, "uninstalled", value.asInstanceOf[js.Any])
@scala.inline
def setVersionUpdated(value: scala.Double): Self = StObject.set(x, "versionUpdated", value.asInstanceOf[js.Any])
}
}
|
Mrjoy832/qit.cloud
|
website/src/models/AudioPlayerModel.js
|
import mirror, { actions } from 'mirrorx'
import { arrayMove } from 'react-sortable-hoc'
import API from '../adapters/API'
import { nextPlaybackRate, setPlaybackRate } from '../helpers'
export default mirror.model({
name: 'player',
initialState: {
nowPlaying: {},
queue: [],
playbackrate: 1
},
reducers: {
play (state, episode) {
window.localStorage.setItem('nowPlaying', JSON.stringify(episode))
return { ...state, nowPlaying: episode }
},
addToQueue (state, episode) {
return { ...state, queue: [...state.queue, episode] }
},
removeFromQueue (state, episodeToRemove) {
const queue = state.queue.filter(
episode => episode.id !== episodeToRemove.id
)
return { ...state, queue }
},
resortQueue (state, data) {
const queue = arrayMove(state.queue, data.oldIndex, data.newIndex)
return { ...state, queue }
},
hydrateQueue (state, episodes = []) {
return { ...state, queue: episodes }
},
playNextEpisode (state) {
const currentlyPlaying = state.nowPlaying
const queue = state.queue
.slice()
.filter(episode => episode.audioUrl !== currentlyPlaying.audioUrl)
const nowPlaying = queue.shift() || {}
window.localStorage.setItem('nowPlaying', JSON.stringify(nowPlaying))
return { ...state, nowPlaying, queue }
},
playNext (state, episode) {
const newQueue = state.queue.filter(
item => item.audioUrl !== episode.audioUrl
)
return { ...state, queue: [episode, ...newQueue] }
},
nextPlaybackRate (state) {
const playbackrate = nextPlaybackRate(state.playbackrate)
setPlaybackRate(playbackrate)
return { ...state, playbackrate }
},
updateWidth (state, width) {
return { ...state, containerWidth: width }
}
},
effects: {
async getRemoteEpisodes () {
try {
const { episodes, username } = await API.getUser()
actions.player.hydrateQueue(episodes)
actions.user.signin(username)
} catch (e) {
console.log(`API Error: ${e}`)
window.errorReporting.notify({
error: `API error: ${e}`
})
}
}
}
})
|
codehuanglei/-
|
leetcode/Array/46.cpp
|
<reponame>codehuanglei/-
class Solution {
public:
vector<vector<int>> res;
void backTrack(vector<int>& nums,vector<int>& vec,vector<bool> used){
if(nums.size() == vec.size()){
res.push_back(vec);
return;
}
for(int i=0; i<nums.size(); i++){
if(used[i]==false){
used[i] = true;
vec.push_back(nums[i]);
backTrack(nums,vec,used);
vec.pop_back();
used[i] = false;
}
}
}
vector<vector<int>> permute(vector<int>& nums) {
vector<int> vec;
vector<bool> used(nums.size(),false);
backTrack(nums,vec,used);
return res;
}
};
|
geekreflex/budget-tracker
|
client/src/redux/userSlice.js
|
import { createAsyncThunk, createSlice } from '@reduxjs/toolkit';
import axios from 'axios';
import { token } from '../helper/token';
import { BASE_URL } from '../helper/baseUrl';
const initialState = {
user: {},
token: '',
status: 'idle',
error: null,
isAuth: false,
};
export const registerUserAsync = createAsyncThunk(
'user/registerUserAsync',
async (payload, thunkAPI) => {
try {
const config = {
headers: {
'Content-Type': 'application/json',
},
};
const { data } = await axios.post(
`${BASE_URL}/auth/register`,
payload,
config
);
return data;
} catch (error) {
return thunkAPI.rejectWithValue(
error.response && error.response.data.message
? error.response.data.message
: error.message
);
}
}
);
export const loginUserAsync = createAsyncThunk(
'user/registerUserAsync',
async (payload, thunkAPI) => {
try {
const config = {
headers: {
'Content-Type': 'application/json',
},
};
const { data } = await axios.post(
`${BASE_URL}/auth/login`,
payload,
config
);
return data;
} catch (error) {
return thunkAPI.rejectWithValue(
error.response && error.response.data.message
? error.response.data.message
: error.message
);
}
}
);
export const getUserData = createAsyncThunk(
'user/getUserData',
async (payload, thunkAPI) => {
try {
const config = {
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${token}`,
},
};
const { data } = await axios.get(`${BASE_URL}/auth/user`, config);
return data;
} catch (error) {
return thunkAPI.rejectWithValue(
error.response && error.response.data.message
? error.response.data.message
: error.message
);
}
}
);
export const userSlice = createSlice({
name: 'user',
initialState,
reducers: {
getUserFromStorage(state) {
const user = localStorage.getItem('userData')
? JSON.parse(localStorage.getItem('userData'))
: null;
if (user) {
state.user = user.user;
}
},
getTokenFromStorage(state) {
const authToken = localStorage.getItem('authToken')
? JSON.parse(localStorage.getItem('authToken'))
: null;
if (authToken) {
state.isAuth = true;
state.token = authToken;
}
},
logoutUser() {
localStorage.removeItem('userData');
localStorage.removeItem('authToken');
window.location.href = '/';
},
},
extraReducers: {
// Register
[registerUserAsync.pending]: (state) => {
state.status = 'loading';
},
[registerUserAsync.fulfilled]: (state, action) => {
state.status = 'idle';
state.isAuth = true;
localStorage.setItem('authToken', JSON.stringify(action.payload.token));
window.location.href = '/dashboard';
},
[registerUserAsync.rejected]: (state, action) => {
state.status = 'idle';
state.error = action.payload;
},
// Login
[loginUserAsync.pending]: (state) => {
state.status = 'loading';
},
[loginUserAsync.fulfilled]: (state, action) => {
state.status = 'idle';
state.isAuth = true;
localStorage.setItem('authToken', JSON.stringify(action.payload.token));
window.location.href = '/dashboard';
},
// Get user data
[getUserData.pending]: (state) => {
state.status = 'loading';
},
[getUserData.fulfilled]: (state, action) => {
state.status = 'idle';
state.user = action.payload.user;
localStorage.setItem('userData', JSON.stringify(action.payload));
},
},
});
export const { getUserFromStorage, getTokenFromStorage, logoutUser } =
userSlice.actions;
export default userSlice.reducer;
|
LiaoyuanSHEN/NoTest
|
src/main/java/com/github/zszlly/recorder/asm/NoTestClassVisitor.java
|
<gh_stars>0
package com.github.zszlly.recorder.asm;
import com.github.zszlly.mark.NoTestMark;
import org.objectweb.asm.*;
import java.util.*;
import static org.objectweb.asm.Opcodes.ACC_STATIC;
public class NoTestClassVisitor extends ClassVisitor {
private boolean isNoTest = false;
private final Map<String, Set<String>> methods;
private String classInternalName;
private List<FieldDescription> fieldDescriptions = new LinkedList<>();
public NoTestClassVisitor(int api, ClassVisitor cv, Map<String, Set<String>> methods) {
super(api, cv);
this.methods = methods;
}
@Override
public void visit(int version, int access, String name, String signature, String superName, String[] interfaces) {
// for (String interf : interfaces) {
// if (NoTestMark.INTERNAL_NAME.equals(interf)) {
// isNoTest = true;
// break;
// }
// }
// if (isNoTest) {
// super.visit(version, access, name, signature, superName, interfaces);
// return;
// }
// String[] newInterfaces = Arrays.copyOf(interfaces, interfaces.length + 1);
// newInterfaces[interfaces.length] = NoTestMark.INTERNAL_NAME;
// super.visit(version, access, name, signature, superName, newInterfaces);
super.visit(version, access, name, signature, superName, interfaces);
classInternalName = name;
}
@Override
public FieldVisitor visitField(int access, String name, String desc, String signature, Object value) {
fieldDescriptions.add(new FieldDescription(name, classInternalName, desc));
return super.visitField(access, name, desc, signature, value);
}
@Override
public MethodVisitor visitMethod(int access, String name, String desc, String signature, String[] exceptions) {
MethodVisitor mv = super.visitMethod(access, name, desc, signature, exceptions);
if (isNoTest) {
return mv;
}
Set<String> methodDescriptions = methods.get(name);
if (methodDescriptions != null && methodDescriptions.contains(desc)) {
return new NoTestMethodVisitor(Opcodes.ASM5, access, desc, mv, new MethodDescription(name, classInternalName, desc), fieldDescriptions, (access & ACC_STATIC) != 0);
}
return mv;
}
}
|
CMPUT301F18T06/ManTracker
|
app/src/test/java/project/ece301/mantracker/MedicalProblemTest.java
|
<filename>app/src/test/java/project/ece301/mantracker/MedicalProblemTest.java
package project.ece301.mantracker;
public class MedicalProblemTest {
// @Test
// public void testsetDescription() {
// String testDescription = "Add this new description";
// MedicalProblem problem = new MedicalProblem("",
// "noTitle", "");
// problem.setDescription("Add this new description");
// assertEquals(testDescription, problem.getDescription());
// }
// @Test
// public void testgetDescription() {
// String testDescription = "This string should match";
// MedicalProblem problem = new MedicalProblem("This string should match",
// "noTitle", "");
// String returnedDescription = problem.getDescription();
// assertEquals(testDescription, returnedDescription);
// }
// @Test
// public void testsetDate() {
// String testDate = "2018-06-15";
// MedicalProblem problem = new MedicalProblem("", "",
// "");
// problem.setDate("2018-06-15");
// String returnedDate = problem.getDate();
// assertEquals(testDate, returnedDate);
//
// }
// @Test
// public void testgetDate() {
// String testDate = "2018-06-15";
// MedicalProblem problem = new MedicalProblem("", "",
// "2018-06-15");
// String returnedDate = problem.getDate();
// assertEquals(testDate, returnedDate);
// }
// @Test
// public void testsetTitle() {
// String testTitle = "Add this new title";
// MedicalProblem problem = new MedicalProblem("no description",
// "", "");
// problem.setTitle("Add this new title");
// assertEquals(testTitle, problem.getTitle());
// }
// @Test
// public void testgetTitle() {
// String testTitle = "Get this title";
// MedicalProblem problem = new MedicalProblem("no desciption",
// "Get this title", "");
// String returnedTitle = problem.getTitle();
// assertEquals(testTitle, returnedTitle);
// }
// @Test
// public void testaddRecord() {
// MedicalProblem problem = new MedicalProblem("",
// "", "");
// Record record = new Record();
// problem.addRecord(record);
// assertTrue(problem.hasRecord(record));
// }
// @Test
// public void testgetRecord() {
// MedicalProblem problem = new MedicalProblem("",
// "", "");
// Record record = new Record();
// problem.addRecord(record);
// assertTrue(problem.hasRecord(record));
//
// Record returnedRecord = problem.getRecord(0);
// assertEquals(record, returnedRecord);
//
// }
// @Test
// public void testdeleteRecord() {
// MedicalProblem problem = new MedicalProblem("",
// "", "");
// Record record = new Record();
// problem.addRecord(record);
// assertTrue(problem.hasRecord(record));
//
// problem.deleteRecord(record);
// assertFalse(problem.hasRecord(record));
// }
// @Test
// public void testhasRecord() {
// MedicalProblem problem = new MedicalProblem("",
// "", "");
// Record record = new Record();
// problem.addRecord(record);
// assertTrue(problem.hasRecord(record));
// }
}
|
mnpas/sponge
|
sponge-jython/src/main/java/org/openksavi/sponge/jython/package-info.java
|
<filename>sponge-jython/src/main/java/org/openksavi/sponge/jython/package-info.java
/**
* Abstract implementations of processors for Jython support.
*/
package org.openksavi.sponge.jython;
|
rhjain100/Card24_Android
|
app/libs/jep-java-3.4-trial/jep-java-3.4-trial/src/com/singularsys/jeptests/system/ExpressionEvalTest.java
|
package com.singularsys.jeptests.system;
import java.io.*;
import org.junit.Assert;
import org.junit.Test;
import com.singularsys.jep.EvaluationException;
import com.singularsys.jep.Jep;
import com.singularsys.jep.ParseException;
import com.singularsys.jep.standard.Complex;
import com.singularsys.jep.standard.FastEvaluator;
import com.singularsys.jep.walkers.PostfixEvaluator;
import com.singularsys.jep.configurableparser.StandardConfigurableParser;
import com.singularsys.jep.reals.RealEvaluator;
/**
* This class is designed for testing the validity of Jep evaluations.
* Expressions from a text file are evaluated with Jep in pairs of two, and
* the results are compared. If they do not match, the two expressions are
* printed to standard output.<p>
* Take for example an input text file containing the two lines
* <pre>1+2
*3.</pre>
* The expressions '1+2' and '3' are evaluated with Jep and the results compared.
*
* @author <NAME>
*/
public class ExpressionEvalTest {
/** Switch for printing expressions while running tests */
static final boolean printExpressions = true;
/** Current line position */
protected int lineCount;
/**
* Creates a new ExpressionEvalTest instance
*/
public ExpressionEvalTest() {
}
/**
* The main method checks the arguments and creates an instance
* and runs a test
*/
public static void main(String args[]) {
String fileName;
// get filename from argument, or use default
if (args!=null && args.length>0) {
fileName = args[0];
} else {
fileName = "JEPTestExpressions.txt";
println("Using default input file: " + fileName);
}
// Create an instance of this class and analyse the file
ExpressionEvalTest jt = new ExpressionEvalTest();
try {
jt.testStandard();
} catch (Exception e) {
e.printStackTrace();
}
}
@Test
public void testStandard() throws Exception {
Jep jep;
System.out.println("--------------------------------------------------------------");
jep = new Jep();
jep.setImplicitMul(true);
jep.getVariableTable().remove("true");
jep.getVariableTable().remove("false");
jep.addVariable("true", Boolean.TRUE);
jep.addVariable("false", Boolean.FALSE);
lineCount = 0;
testJepInstance(jep, "JEPTestExpressions.txt");
}
@Test
public void testFast() throws Exception {
Jep jep;
System.out.println("--------------------------------------------------------------");
jep = new Jep(new FastEvaluator());
jep.setImplicitMul(true);
jep.getVariableTable().remove("true");
jep.getVariableTable().remove("false");
jep.addVariable("true", Boolean.TRUE);
jep.addVariable("false", Boolean.FALSE);
lineCount = 0;
testJepInstance(jep, "JEPTestExpressions.txt");
}
@Test
public void testConfParser() throws Exception {
Jep jep;
System.out.println("--------------------------------------------------------------");
jep = new Jep(new StandardConfigurableParser());
jep.setImplicitMul(true);
jep.getVariableTable().remove("true");
jep.getVariableTable().remove("false");
jep.addVariable("true", Boolean.TRUE);
jep.addVariable("false", Boolean.FALSE);
lineCount = 0;
testJepInstance(jep, "JEPTestExpressions.txt");
}
@Test
public void testPostfixEvaluator() throws Exception {
Jep jep;
System.out.println("--------------------------------------------------------------");
jep = new Jep(new PostfixEvaluator());
jep.setImplicitMul(true);
jep.getVariableTable().remove("true");
jep.getVariableTable().remove("false");
jep.addVariable("true", Boolean.TRUE);
jep.addVariable("false", Boolean.FALSE);
lineCount = 0;
testJepInstance(jep, "JEPTestExpressions.txt");
}
@Test
public void testReals() throws Exception {
Jep jep;
System.out.println("--------------------------------------------------------------");
jep = new Jep(new RealEvaluator());
jep.setImplicitMul(true);
jep.getVariableTable().remove("true");
jep.getVariableTable().remove("false");
jep.addVariable("true", Boolean.TRUE);
jep.addVariable("false", Boolean.FALSE);
lineCount = 0;
testJepInstance(jep, "JEPTestExpressionsReals.txt");
}
/**
* Loads the file specified in fileName. Evaluates the expressions listed
* in it and compares the expressions with the results.
*/
public void testJepInstance(Jep jep, String fileName) {
BufferedReader reader;
Object v1, v2;
String expression1, expression2;
boolean hasError = false;
// Load the input file
try {
reader = new BufferedReader(new FileReader(fileName));
println("Reading from "+fileName);
} catch (Exception e) {
println("File \""+fileName+"\" not found");
Assert.fail("File \""+fileName+"\" not found");
return;
}
// reset the line count
lineCount = 0;
// cycle through the expressions in pairs of two
println("Evaluating and comparing expressions...");
while (true) {
v1 = null;
v2 = null;
// get values of a pair of two lines
try {
expression1 = getNextLine(reader);
expression2 = getNextLine(reader);
if (expression1 != null && expression2 != null) {
if (printExpressions)
System.out.println("Checking \"" + expression1 + "\" == \"" + expression2 + "\"?");
v1 = parseNextLine(jep, expression1);
v2 = parseNextLine(jep, expression2);
}
} catch (Exception e) {
println("Exception occured: "+e.getMessage());
e.printStackTrace();
hasError = true;
//break;
continue;
}
// expression1 or expression2 is null when end of file is reached
if (expression1 == null || expression2 == null) {
println("Reached end of file.");
break;
}
// compare the results
if (!equal(v1, v2)) {
hasError = true;
print("Line: " + lineCount + ": ");
println("\"" + expression1 + "\" (" + v1.toString() + ") != \""
+ expression2 + "\" (" + v2.toString()+")");
}
}
// Closing remarks
print("\n" + lineCount + " lines processed. ");
if (hasError) {
print("Errors were found.\n\n");
} else {
print("No errors were found.\n\n");
}
// Fail if errors are found
Assert.assertTrue("Errors were found.", !hasError);
}
/**
* Reads the next line from the Reader into a String.
* @throws Exception when IOException occurs, parsing fails, or when
* evaluation fails
*/
private String getNextLine(BufferedReader reader) throws Exception {
String line;
// cycle till a valid line is found
do {
line = reader.readLine(); // returns null on end of file
if (line == null) return null;
lineCount++;
} while (line.length() == 0 || line.trim().charAt(0) == '#');
return line;
}
/**
* Parses a single line from the reader, and returns the
* evaluation of that line.
* @return the value of the evaluated line. Returns null when the end of the file
* is reached.
* @throws Exception when parsing fails, or when
* evaluation fails
*/
private Object parseNextLine(Jep jep, String line) throws Exception {
Object value;
String errorStr;
// parse the expression
try {
jep.parse(line);
} catch (ParseException e) {
// an error occur while parsing
errorStr = e.getMessage();
throw new Exception("Error while parsing line " + lineCount + ": " + errorStr);
}
// evaluate the expression
try {
value = jep.evaluate();
if (value == null) throw new EvaluationException("Evaluated as null");
} catch (EvaluationException e) {
errorStr = e.getMessage();
throw new Exception("Error while evaluating line " + lineCount + ": " + errorStr);
}
return value;
}
/**
* Compares o1 and o2. Copied from Comparative.java.
* @return true if o1 and o2 are equal. false otherwise.
*/
private boolean equal(Object param1, Object param2)
{
double tolerance = 1e-15;
if ((param1 instanceof Complex) && (param2 instanceof Complex)) {
return ((Complex)param1).equals((Complex)param2, tolerance);
}
if ((param1 instanceof Complex) && (param2 instanceof Number)) {
return ((Complex)param1).equals(new Complex((Number) param2), tolerance);
}
if ((param2 instanceof Complex) && (param1 instanceof Number)) {
return ((Complex)param2).equals(new Complex((Number) param1), tolerance);
}
if ((param1 instanceof Number) && (param2 instanceof Number)) {
return Math.abs(((Number)param1).doubleValue()-((Number)param2).doubleValue())
< tolerance;
}
// test any other types here
return param1.equals(param2);
}
/**
* Helper function for printing.
*/
private static void print(String str) {
System.out.print(str);
}
/**
* Helper function for printing lines.
*/
protected static void println(String str) {
System.out.println(str);
}
}
|
lailuboy/cockroach
|
pkg/storage/bulk/row_counter.go
|
// Copyright 2017 The Cockroach Authors.
//
// Use of this software is governed by the Business Source License included
// in the file licenses/BSL.txt and at www.mariadb.com/bsl11.
//
// Change Date: 2022-10-01
//
// On the date above, in accordance with the Business Source License, use
// of this software will be governed by the Apache License, Version 2.0,
// included in the file licenses/APL.txt and at
// https://www.apache.org/licenses/LICENSE-2.0
package bulk
import (
"bytes"
"github.com/cockroachdb/cockroach/pkg/keys"
"github.com/cockroachdb/cockroach/pkg/roachpb"
"github.com/cockroachdb/cockroach/pkg/util/encoding"
)
// RowCounter is a helper that counts how many distinct rows appear in the KVs
// that is is shown via `Count`. Note: the `DataSize` field of the BulkOpSummary
// is *not* populated by this and should be set separately.
type RowCounter struct {
roachpb.BulkOpSummary
prev roachpb.Key
}
// Count examines each key passed to it and increments the running count when it
// sees a key that belongs to a new row.
func (r *RowCounter) Count(key roachpb.Key) error {
// EnsureSafeSplitKey is usually used to avoid splitting a row across ranges,
// by returning the row's key prefix.
// We reuse it here to count "rows" by counting when it changes.
// Non-SQL keys are returned unchanged or may error -- we ignore them, since
// non-SQL keys are obviously thus not SQL rows.
row, err := keys.EnsureSafeSplitKey(key)
if err != nil || len(key) == len(row) {
return nil
}
// no change key prefix => no new row.
if bytes.Equal(row, r.prev) {
return nil
}
r.prev = append(r.prev[:0], row...)
rest, tbl, err := keys.DecodeTablePrefix(row)
if err != nil {
return err
}
if tbl < keys.MaxReservedDescID {
r.SystemRecords++
} else {
if _, indexID, err := encoding.DecodeUvarintAscending(rest); err != nil {
return err
} else if indexID == 1 {
r.Rows++
} else {
r.IndexEntries++
}
}
return nil
}
|
mithro/chromium-infra
|
go/src/infra/monitoring/sysmon/cmd/sysmon/main.go
|
<reponame>mithro/chromium-infra
// Copyright (c) 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package main
import (
"flag"
"fmt"
"os"
"github.com/luci/luci-go/common/logging"
"github.com/luci/luci-go/common/logging/gologger"
"github.com/luci/luci-go/common/tsmon"
"golang.org/x/net/context"
"infra/monitoring/sysmon/android"
"infra/monitoring/sysmon/cipd"
"infra/monitoring/sysmon/puppet"
"infra/monitoring/sysmon/system"
)
func main() {
fs := flag.NewFlagSet("", flag.ExitOnError)
tsmonFlags := tsmon.NewFlags()
tsmonFlags.Flush = "auto"
tsmonFlags.Register(fs)
loggingConfig := logging.Config{Level: logging.Info}
loggingConfig.AddFlags(fs)
fs.Parse(os.Args[1:])
c := context.Background()
c = gologger.StdConfig.Use(c)
c = loggingConfig.Set(c)
if err := tsmon.InitializeFromFlags(c, &tsmonFlags); err != nil {
panic(fmt.Sprintf("Failed to initialize tsmon: %s", err))
}
// Register metric callbacks.
android.Register()
cipd.Register()
puppet.Register()
system.Register() // Should be registered last.
if tsmonFlags.Flush == "auto" {
// tsmon's auto-flusher goroutine will call the metric callbacks and flush
// the metrics every minute.
select {}
} else {
// Flush once and exit.
tsmon.Flush(c)
}
}
|
Bluestar123/Vue3-Admin-Plus
|
public/js/config.js
|
/*
* Copyright (c) 2021
* 项目名称:Vue3-Admin-Plus
* 文件名称:config.js
* 创建日期:2021/1/31 上午11:48
* 创建作者:Jaxson
*/
window.VUE_APP = {
VUE_APP_BASE_API: 'http://172.16.17.32:28081',
VUE_APP_URI: '/jy-service',
VUE_APP_DATA: '/data'
}
|
wfnex/OpenBRAS
|
src/ace/ACE_wrappers/tests/Recursive_Condition_Bug_Test.cpp
|
//=============================================================================
/**
* @file Recursive_Condition_Bug_Test.cpp
*
* This test program validates the functionality of the
* ACE_Condition<ACE_Recursive_Thread_Mutex> template
* specialization when combined with the
* ACE_Thread_Timer_Queue_Adapter on Win32 and Posix pthreads.
* It was added to test for bugs with the ACE_OS recursive
* condition implementation.
*
* @author <NAME> <<EMAIL>> and <NAME> <<EMAIL>>
*/
//=============================================================================
#include "test_config.h"
#include "ace/OS_NS_sys_time.h"
#include "ace/Task_T.h"
#include "ace/Activation_Queue.h"
#include "ace/Timer_Heap.h"
#include "ace/Timer_Queue_Adapters.h"
#include "ace/Condition_Recursive_Thread_Mutex.h"
#if defined (ACE_HAS_THREADS)
// Number of iterations for the performance tests.
static int max_iterations = 30;
typedef ACE_Thread_Timer_Queue_Adapter<ACE_Timer_Heap> Thread_Timer_Queue;
class Test_Handler;
class Test_Task : public ACE_Task<ACE_MT_SYNCH>
{
public:
virtual int svc (void)
{
while (--max_iterations > 0)
{
// dequeue the next object
ACE_Message_Block * mb = 0;
if (this->getq (mb) == -1)
ACE_ERROR_RETURN ((LM_ERROR,
ACE_TEXT ("(%t) %p\n"),
ACE_TEXT ("getq failed")),
-1);
Test_Handler *handler = reinterpret_cast<Test_Handler *> (mb->base ());
mb->release ();
ACE_Time_Value timeout = ACE_OS::gettimeofday () + ACE_Time_Value (1, 0);
if (timer_queue_.schedule (reinterpret_cast<ACE_Event_Handler *> (handler),
this,
timeout) == -1)
ACE_ERROR_RETURN ((LM_ERROR,
ACE_TEXT ("(%t) %p\n"),
ACE_TEXT ("schedule failed")),
-1);
ACE_DEBUG ((LM_INFO,
ACE_TEXT ("(%t) %d iteration(s) remain\n"),
max_iterations));
}
timer_queue_.deactivate ();
ACE_DEBUG ((LM_INFO,
ACE_TEXT ("(%t) leaving the Test_task\n")));
return 0;
}
//FUZZ: disable check_for_lack_ACE_OS
virtual int open (void * = 0)
{
//FUZZ: enable check_for_lack_ACE_OS
if (ACE_Task<ACE_MT_SYNCH>::activate (THR_NEW_LWP, 1) != 0)
ACE_ERROR_RETURN ((LM_ERROR,
ACE_TEXT ("(%t) %p\n"),
ACE_TEXT ("Test_Task::activate")),
-1);
if (0 != timer_queue_.activate ())
ACE_ERROR_RETURN ((LM_ERROR,
ACE_TEXT ("(%t) %p\n"),
ACE_TEXT ("Test_Task::queue activate")),
-1);
return 0;
}
private:
Thread_Timer_Queue timer_queue_;
};
class Test_Handler : public ACE_Event_Handler
{
public:
virtual int handle_timeout (const ACE_Time_Value &,
const void *arg)
{
ACE_DEBUG ((LM_DEBUG,
ACE_TEXT ("(%t) Test_Handler::handle_timeout\n")));
void *nc_arg = const_cast<void *> (arg);
Test_Task *test_task =
reinterpret_cast<Test_Task *> (nc_arg);
ACE_Message_Block *mb = 0;
ACE_NEW_MALLOC_RETURN (mb,
static_cast<ACE_Message_Block *> (ACE_Allocator::instance()->malloc (sizeof (ACE_Message_Block))),
ACE_Message_Block (sizeof (*this), // size
ACE_Message_Block::MB_DATA, // type
0, // cont
(char *) this, // data
0,
0,
ACE_DEFAULT_MESSAGE_BLOCK_PRIORITY,
ACE_Time_Value::zero,
ACE_Time_Value::max_time,
0,
ACE_Allocator::instance()), // data
-1);
test_task->putq (mb);
return 0;
}
};
#endif /* ACE_HAS_THREADS */
int
run_main (int, ACE_TCHAR *[])
{
ACE_START_TEST (ACE_TEXT ("Recursive_Condition_Bug_Test"));
#if defined (ACE_HAS_THREADS)
// Timer queue usage.
Test_Handler handler;
Test_Task task;
if (0 != task.open ())
ACE_ERROR_RETURN ((LM_ERROR,
ACE_TEXT ("(%t) %p\n"),
ACE_TEXT ("open")),
-1);
ACE_Message_Block *mb = 0;
ACE_NEW_MALLOC_RETURN (mb,
static_cast<ACE_Message_Block *> (ACE_Allocator::instance()->malloc (sizeof (ACE_Message_Block))),
ACE_Message_Block (sizeof (handler), // size
ACE_Message_Block::MB_DATA, // type
0, // cont
(char *) &handler,
0,
0,
ACE_DEFAULT_MESSAGE_BLOCK_PRIORITY,
ACE_Time_Value::zero,
ACE_Time_Value::max_time,
0,
ACE_Allocator::instance()), // data
-1);
if (-1 == task.putq (mb))
ACE_ERROR_RETURN ((LM_ERROR,
ACE_TEXT ("(%t) %p\n"),
ACE_TEXT ("putq")),
-1);
if (ACE_Thread_Manager::instance ()->wait () == -1)
ACE_ERROR_RETURN ((LM_ERROR,
"wait on Thread_Manager failed"),
-1);
#else
ACE_ERROR ((LM_ERROR,
ACE_TEXT ("ACE doesn't support recursive condition variables on this platform\n")));
#endif /* ACE_HAS_THREADS */
ACE_END_TEST;
return 0;
}
|
emelnychenko/xtra
|
src/eval.h
|
//
// Created by eugen on 5/15/2017.
//
#ifndef XTRA_EVAL_H
#define XTRA_EVAL_H
xtra_token_p
xtra_feval(char * f);
xtra_token_p
xtra_seval(char * s);
#endif //XTRA_EVAL_H
|
hijkzzz/mini-interpreter
|
ast/block_stmnt.go
|
package ast
import (
"stone/environment"
)
type BlockStmnt struct {
astList
}
func NewBlockStmnt(list []ASTree) *BlockStmnt{
return &BlockStmnt{astList{list}}
}
func (self *BlockStmnt) Eval(env environment.Environment, args... interface{}) interface{} {
var result interface{}
// 返回最后一条语句执行结果
for _, value := range self.children {
result = value.Eval(env)
}
return result
}
|
AnushaPoojaryNEU/INFO6205-ProgramsAndAlgorithms
|
src/test/java/edu/neu/coe/info6205/life/base/GroupTest.java
|
<reponame>AnushaPoojaryNEU/INFO6205-ProgramsAndAlgorithms
package edu.neu.coe.info6205.life.base;
import edu.neu.coe.info6205.util.PrivateMethodTester;
import org.junit.Test;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import static edu.neu.coe.info6205.life.base.Grid.Origin;
import static edu.neu.coe.info6205.life.library.Library.*;
import static org.junit.Assert.*;
public class GroupTest {
@Test
public void testConstructor() {
Group target = new Group(0L);
assertNull(target.getExtent1());
assertFalse(target.overlap(target));
assertEquals(0, target.getCount());
}
@Test
public void testConstructor1() {
final List<Point> points = new ArrayList<>();
final Point point = new Point(1, 1);
points.add(point);
Group target = new Group(0L, Origin, points);
assertEquals(1, target.getCount());
assertEquals(point.move(-1, -1), target.getExtent1());
assertEquals(point.move(1, 1), target.getExtent2());
assertEquals(point, target.pointsAbsolute().get(0));
}
@Test
public void testConstructor2() throws LifeException {
final List<Point> points = new ArrayList<>();
points.add(Origin);
points.add(new Point(1, 1));
points.add(new Point(2, 0));
points.add(new Point(2, 2));
points.add(new Point(3, 1));
points.add(new Point(4, 1));
points.add(new Point(4, 2));
points.add(new Point(5, 0));
final Point last = new Point(5, 2);
points.add(last);
Group target = new Group(0L, Origin, points);
assertEquals(0L, target.getGeneration());
assertEquals(9, target.getCount());
assertEquals(Origin, target.getOrigin());
assertEquals(Origin.move(-1, -1), target.getExtent1());
assertEquals(last.move(1, 1), target.getExtent2());
}
@Test
public void testCellsAndNeighbors0() {
Group target = Group.create(0L, Origin);
final Group.CellsAndNeighbors can = Group.CellsAndNeighbors.create(target);
assertEquals("O\n", can.toString());
final PrivateMethodTester targetTester = new PrivateMethodTester(target);
final Point vector = new Point(1, 1);
final Group changed = (Group) targetTester.invokePrivate("changeOrigin", 1L, vector);
assertEquals("*\n", Group.CellsAndNeighbors.create(changed).toString());
}
@Test
public void testCellsAndNeighbors1() {
Group target = Group.create(0L, Origin);
target.add(new Point(-1, -1));
final Group.CellsAndNeighbors can = Group.CellsAndNeighbors.create(target);
assertEquals(".O\n" + "*.\n", can.toString());
final PrivateMethodTester targetTester = new PrivateMethodTester(target);
final Point vector = new Point(1, 1);
final Group changed = (Group) targetTester.invokePrivate("changeOrigin", 1L, vector);
assertEquals(".*\n" + "*.\n", Group.CellsAndNeighbors.create(changed).toString());
}
@Test
public void testToString() {
final List<Point> points = new ArrayList<>();
final Point point = new Point(1, 1);
points.add(Origin);
points.add(point);
Group target = new Group(0L, Origin, points);
assertEquals("generation 0, origin = {0, 0}, extents = [{-1, -1}, {2, 2}]\n" +
" [{0, 0}, {1, 1}]", target.toString());
}
@Test
public void testToStringInGrid() {
final List<Point> points = new ArrayList<>();
final Point point = new Point(1, 1);
points.add(point);
Group target = new Group(0L, Origin, points);
assertEquals("generation 0: extents = [{0, 0}, {2, 2}]\n" +
" [{1, 1}]", target.toStringInGrid());
}
@Test
public void testRender() {
final List<Point> points = new ArrayList<>();
final Point point = new Point(1, 1);
points.add(point);
Group target = new Group(0L, Origin, points);
assertEquals("O\nOrigin: {1, 1}\n", target.render());
}
@Test
public void testChangeOrigin() {
Group target = Group.create(0L, Origin);
final PrivateMethodTester targetTester = new PrivateMethodTester(target);
final Point point1 = new Point(1, 1);
target.add(point1);
final String targetRendered = target.render();
assertEquals(".*\n" + "O.\nOrigin: {0, 0}\n", targetRendered);
final Group changed = (Group) targetTester.invokePrivate("changeOrigin", 1L, point1);
assertEquals(point1, changed.getOrigin());
assertEquals(target.getExtent1(), changed.getExtent1());
assertEquals(target.getExtent2(), changed.getExtent2());
assertEquals(target.pointsAbsolute(), changed.pointsAbsolute());
assertEquals(".O\n" + "*.\nOrigin: {1, 1}\n", changed.render());
assertEquals("generation 1, origin = {1, 1}, extents = [{-2, -2}, {1, 1}]\n" +
" [{-1, -1}, {0, 0}]", changed.toString());
}
@Test
public void testAdd() throws LifeException {
Group target = new Group(0L);
final Point point = Origin;
assertTrue(target.add(point));
assertEquals(1, target.getCount());
assertEquals(point.move(-1, -1), target.getExtent1());
assertEquals(point.move(1, 1), target.getExtent2());
assertTrue(target.overlap(target));
}
@Test
public void testAdd0() {
final int x = 0;
final int y = 1;
final Point point = new Point(x, y);
Group target = new Group(0L);
assertTrue(target.add(x, y));
assertEquals(1, target.getCount());
assertEquals(point.move(-1, -1), target.getExtent1());
assertEquals(point.move(1, 1), target.getExtent2());
}
@Test
public void testAdd1() {
final int x = 1;
final int y = 3;
String s = x + " " + y;
final Point point = new Point(x, y);
Group target = new Group(0L);
assertTrue(target.add(s));
assertEquals(1, target.getCount());
assertEquals(point.move(-1, -1), target.getExtent1());
assertEquals(point.move(1, 1), target.getExtent2());
}
@Test
public void testAdd2() {
final String s = "1 2, 2 3";
List<Point> points = Point.points(s);
Group target = new Group(0L);
assertTrue(target.add(points));
assertEquals(2, target.getCount());
assertEquals(points.get(0).move(-1, -1), target.getExtent1());
assertEquals(points.get(1).move(1, 1), target.getExtent2());
}
@SuppressWarnings("unchecked")
@Test
public void testGetAbsolute0() throws LifeException {
Group target = new Group(0L);
final PrivateMethodTester targetTester = new PrivateMethodTester(target);
final Point point0 = new Point(1, 1);
final Point point1 = new Point(2, 2);
target.add(point0);
target.add(point1);
assertEquals(point0.move(-1, -1), target.getExtent1());
assertEquals(point1.move(1, 1), target.getExtent2());
final List<Point> cellsTarget = (List<Point>) targetTester.invokePrivate("getPoints");
assertEquals(point0.move(-1, -1), cellsTarget.get(0));
assertEquals(point1.move(-1, -1), cellsTarget.get(1));
}
@Test
public void testGetAbsolute1() throws LifeException {
final Point origin = new Point(10, 10);
final Point point0 = new Point(1, 1);
final Point point1 = new Point(2, 2);
final List<Point> points = new ArrayList<>();
points.add(point0);
points.add(point1);
Group target = new Group(0L, origin, points);
final PrivateMethodTester targetTester = new PrivateMethodTester(target);
assertEquals(point0.move(origin).move(-1, -1), target.getExtent1());
assertEquals(point1.move(origin).move(1, 1), target.getExtent2());
final List<Point> cellsTarget = target.pointsAbsolute();
assertEquals(point0.move(origin), cellsTarget.get(0));
assertEquals(point1.move(origin), cellsTarget.get(1));
}
@Test
public void testOverlap0() {
Group target = new Group(0L, Origin, null, null, null);
Group other = new Group(0L, Origin, null, null, null);
assertFalse(target.overlap(other));
}
@Test
public void testOverlap1() {
final Point point11 = new Point(1, 1);
final List<Point> points1 = new ArrayList<>();
points1.add(Origin);
final List<Point> points2 = new ArrayList<>();
points2.add(point11);
Group target = new Group(0L, Origin, points1);
Group other = new Group(0L, Origin, points2);
assertTrue(target.overlap(other));
}
@Test
public void testOverlap2() {
final Point point11 = new Point(-1, -1);
final Point point33 = new Point(3, 3);
final Point point44 = new Point(4, 4);
final List<Point> cells1 = new ArrayList<>();
cells1.add(Origin);
cells1.add(point11);
final List<Point> cells2 = new ArrayList<>();
cells2.add(point33);
cells2.add(point44);
Group target = new Group(0L, Origin, cells1);
Group other = new Group(0L, Origin, cells2);
assertFalse(target.overlap(other));
}
@Test
public void testOverlap3() {
final Point point22 = new Point(2, 2);
final Point point33 = new Point(3, 3);
final Point point44 = new Point(4, 4);
final List<Point> cells1 = new ArrayList<>();
cells1.add(Origin);
cells1.add(point22);
final List<Point> cells2 = new ArrayList<>();
cells2.add(point33);
cells2.add(point44);
Group target = new Group(0L, Origin, cells1);
Group other = new Group(0L, Origin, cells2);
assertTrue(target.overlap(other));
}
@Test
public void testOverlap4() {
final Point point22 = new Point(2, 2);
final Point point33 = new Point(3, 3);
final Point point44 = new Point(4, 4);
final List<Point> cells1 = new ArrayList<>();
cells1.add(Origin);
cells1.add(point22);
final List<Point> cells2 = new ArrayList<>();
cells2.add(point33);
cells2.add(point44);
Group target = new Group(0L, Origin, cells1);
Group other = new Group(0L, new Point(10, 10), cells2);
assertFalse(target.overlap(other));
}
@Test(expected = LifeException.class)
public void testMerge0() throws LifeException {
Group target = new Group(0L);
target.merge(target);
}
@Test
public void testMerge1() throws LifeException {
Group target = new Group(0L);
final Point point = Origin;
assertTrue(target.add(point));
assertEquals(1, target.getCount());
assertEquals(point.move(-1, -1), target.getExtent1());
assertEquals(point.move(1, 1), target.getExtent2());
assertTrue(target.overlap(target));
Group merged = target.merge(new Group(0L));
assertEquals(target, merged);
}
@Test
public void testMerge2() throws LifeException {
final String s = "1 2, 2 3";
Collection<Point> points = Point.points(s);
Group target1 = new Group(0L);
assertTrue(target1.add(points));
final Point offset = new Point(10, 10);
Group target2 = target1.move(offset);
assertFalse(target1.overlap(target2));
Group merged = target1.merge(target2);
assertEquals(4, merged.getCount());
}
@Test
public void testRemove() {
final List<Point> cells = new ArrayList<>();
cells.add(Origin);
Group target = new Group(0L, Origin, Origin, Origin, cells);
assertEquals(1, target.getCount());
assertTrue(target.remove(Origin));
assertEquals(0, target.getCount());
}
@Test
public void testForEach() {
final List<Point> cells = new ArrayList<>();
cells.add(Origin);
Group target = new Group(0L, Origin, Origin, Origin, cells);
final List<Point> result = new ArrayList<>();
target.forEach(result::add);
assertEquals(1, result.size());
assertEquals(Origin, result.get(0));
}
@Test
public void testGetOrigin() {
Group target = new Group(0L, Origin, Origin, null, null);
assertEquals(Origin, target.getExtent1());
}
@Test
public void testGetExtent() {
Group target = new Group(0L, Origin, null, Origin, null);
assertEquals(Origin, target.getExtent2());
}
@Test
public void testWithinExtents() {
final Point point11 = new Point(1, 1);
final Point point01 = new Point(0, 1);
final Point point02 = new Point(0, 2);
final Point point03 = new Point(0, 3);
Group target = new Group(0L);
final PrivateMethodTester targetTester = new PrivateMethodTester(target);
assertTrue(target.add(Origin));
assertTrue(target.add(point11));
assertTrue((Boolean) targetTester.invokePrivate("withinExtents", Origin));
assertTrue((Boolean) targetTester.invokePrivate("withinExtents", point01));
assertTrue((Boolean) targetTester.invokePrivate("withinExtents", point02));
assertFalse((Boolean) targetTester.invokePrivate("withinExtents", point03));
assertTrue((Boolean) targetTester.invokePrivate("withinExtents", point11));
}
@Test
public void testNewGeneration0() throws LifeException {
final List<Point> cells1 = new ArrayList<>();
cells1.add(Origin);
cells1.add(new Point(1, 1));
cells1.add(new Point(2, 0));
cells1.add(new Point(2, 2));
cells1.add(new Point(3, 1));
cells1.add(new Point(4, 1));
cells1.add(new Point(4, 2));
cells1.add(new Point(5, 0));
cells1.add(new Point(5, 2));
Group target = new Group(0L, Origin, cells1);
assertEquals(9, target.getCount());
assertEquals("..*.**\n" + ".*.**.\n" + "O.*..*\nOrigin: {0, 0}\n", target.render());
final Group newGeneration = target.newGeneration(1L);
assertEquals(".*.**\n" + "O....\n" + "****.\nOrigin: {1, 1}\n", newGeneration.render());
assertEquals(8, newGeneration.getCount());
}
@Test
public void testNewGeneration1() throws LifeException {
Group target = new Group(0L);
target.add(Origin);
target.add(new Point(-1, -1));
final Group newGeneration = target.newGeneration(1L);
assertEquals("Origin: {0, 0}\n", newGeneration.render());
assertEquals(0, newGeneration.getCount());
}
@Test
public void transpose() {
Group target = new Group(0L);
target.add(Origin);
target.add(new Point(0, 1));
final Collection<Point> points = target.pointsAbsolute();
final Group transposed = target.transpose();
final List<Point> pointsT = transposed.pointsAbsolute();
assertEquals(Origin, pointsT.get(0));
assertEquals(new Point(1, 0), pointsT.get(1));
final Group transposedAgain = transposed.transpose();
assertEquals(target, transposedAgain);
}
@Test
public void testBlock() throws LifeException {
Group target = Group.create(0L, Block);
assertEquals(new Point(0, 0), target.getExtent1());
assertEquals(new Point(3, 3), target.getExtent2());
final Group newGeneration = target.newGeneration(1L);
final Point p11 = new Point(1, 1);
assertEquals(p11, newGeneration.getOrigin());
final int count = newGeneration.getCount();
assertEquals(4, count);
final List<Point> cellsTarget = target.pointsAbsolute();
final List<Point> cellsNG = newGeneration.pointsAbsolute();
for (int i = 0; i < count; i++) assertEquals(cellsTarget.get(i), cellsNG.get(i));
final Group gen2 = newGeneration.newGeneration(1L);
assertEquals(p11, gen2.getOrigin());
assertEquals(4, gen2.getCount());
final List<Point> cellsGen2 = gen2.pointsAbsolute();
for (int i = 0; i < count; i++) assertEquals(cellsTarget.get(i), cellsGen2.get(i));
}
@SuppressWarnings("unchecked")
@Test
public void testBeehive() throws LifeException {
Group target = Group.create(0L, Beehive);
final PrivateMethodTester targetTester = new PrivateMethodTester(target);
assertEquals(".**.\n" + "O..*\n" + ".**.\nOrigin: {1, 2}\n", target.render());
assertEquals(6, target.getCount());
assertEquals(Origin, target.getExtent1());
assertEquals(new Point(5, 4), target.getExtent2());
final Group newGeneration = target.newGeneration(1L);
final PrivateMethodTester ngTester = new PrivateMethodTester(newGeneration);
final Point point12 = new Point(1, 2);
assertEquals(point12, newGeneration.getOrigin());
final int count = newGeneration.getCount();
assertEquals(6, count);
final List<Point> cellsTarget = (List<Point>) targetTester.invokePrivate("getPoints");
final List<Point> cellsNG = (List<Point>) ngTester.invokePrivate("getPoints");
for (int i = 0; i < count; i++) assertEquals(cellsTarget.get(i), cellsNG.get(i));
final Group gen2 = newGeneration.newGeneration(1L);
final PrivateMethodTester gen2Tester = new PrivateMethodTester(gen2);
assertEquals(point12, gen2.getOrigin());
assertEquals(".**.\n" + "O..*\n" + ".**.\nOrigin: {1, 2}\n", newGeneration.render());
assertEquals(6, gen2.getCount());
final List<Point> cellsGen2 = (List<Point>) gen2Tester.invokePrivate("getPoints");
for (int i = 0; i < count; i++) assertEquals(cellsTarget.get(i), cellsGen2.get(i));
}
@SuppressWarnings("unchecked")
@Test
public void testLoaf() throws LifeException {
Group target = Group.create(0L, Loaf);
final PrivateMethodTester targetTester = new PrivateMethodTester(target);
assertEquals(".**.\n" + "O..*\n" + ".*.*\n" + "..*.\nOrigin: {1, 3}\n", target.render());
assertEquals(Origin, target.getExtent1());
assertEquals(new Point(5, 5), target.getExtent2());
final Group newGeneration = target.newGeneration(1L);
final PrivateMethodTester ngTester = new PrivateMethodTester(newGeneration);
final Point p13 = new Point(1, 3);
assertEquals(p13, newGeneration.getOrigin());
final int count = newGeneration.getCount();
assertEquals(7, count);
final List<Point> cellsTarget = (List<Point>) targetTester.invokePrivate("getPoints");
final List<Point> cellsNG = (List<Point>) ngTester.invokePrivate("getPoints");
for (int i = 0; i < count; i++) assertEquals(cellsTarget.get(i), cellsNG.get(i));
final Group gen2 = newGeneration.newGeneration(1L);
final PrivateMethodTester gen2Tester = new PrivateMethodTester(gen2);
assertEquals(p13, gen2.getOrigin());
assertEquals(".**.\n" + "O..*\n" + ".*.*\n" + "..*.\nOrigin: {1, 3}\n", newGeneration.render());
assertEquals(7, gen2.getCount());
final List<Point> cellsGen2 = (List<Point>) gen2Tester.invokePrivate("getPoints");
for (int i = 0; i < count; i++) assertEquals(cellsTarget.get(i), cellsGen2.get(i));
}
@Test
public void testBlinker() throws LifeException {
Group target = Group.create(0L, Blinker);
assertEquals("*\n" + "O\n" + "*\nOrigin: {0, 0}\n", target.render());
assertEquals(new Point(-1, -2), target.getExtent1());
assertEquals(new Point(1, 2), target.getExtent2());
final Group newGeneration = target.newGeneration(1L);
final Point p01 = new Point(0, 1);
assertEquals(Origin, newGeneration.getOrigin());
assertEquals("*O*\nOrigin: {0, 0}\n", newGeneration.render());
final int count = newGeneration.getCount();
assertEquals(3, count);
final List<Point> cellsNG = newGeneration.pointsAbsolute();
for (int i = 0; i < count; i++) assertEquals(0, cellsNG.get(i).getY());
for (int i = 0; i < count; i++) assertTrue(Math.abs(cellsNG.get(i).getX()) <= 1);
final Group gen2 = newGeneration.newGeneration(2L);
assertEquals("*\n" + "O\n" + "*\nOrigin: {0, 0}\n", gen2.render());
assertEquals(3, gen2.getCount());
final List<Point> cellsGen2 = gen2.pointsAbsolute();
for (int i = 0; i < count; i++) assertEquals(0, cellsGen2.get(i).getX());
for (int i = 0; i < count; i++) assertTrue(Math.abs(cellsNG.get(i).getY()) <= 1);
}
@Test
public void testGlider1() throws LifeException {
Group glider0 = Group.create(0L, Glider1);
System.out.println(glider0.toStringInGrid());
final String renderGlider0 = glider0.render();
System.out.println(renderGlider0);
assertEquals(5, glider0.getCount());
assertEquals("O**\n" + "..*\n" + ".*.\nOrigin: {0, 0}\n", renderGlider0);
assertEquals(new Point(-1, -3), glider0.getExtent1());
assertEquals(new Point(3, 1), glider0.getExtent2());
Group expected = glider0.move(-1, 1);
final Group glider1 = glider0.newGeneration(1L);
System.out.println(glider1.toStringInGrid());
final Point p10 = new Point(1, 0);
assertEquals(p10, glider1.getOrigin());
assertEquals(".*.\n" + ".O*\n" + "*.*\nOrigin: {1, 0}\n", glider1.render());
assertEquals(5, glider1.getCount());
final Group glider2 = glider1.newGeneration(2L);
System.out.println(glider2.toStringInGrid());
assertEquals(".**\n" + "*.O\n" + "..*\nOrigin: {1, 0}\n", glider2.render());
assertEquals(5, glider2.getCount());
final Group glider3 = glider2.newGeneration(3L);
System.out.println(glider3.toStringInGrid());
assertEquals("**.\n" + ".O*\n" + "*..\nOrigin: {1, 0}\n", glider3.render());
assertEquals(5, glider3.getCount());
final Group glider4 = glider3.newGeneration(4L);
System.out.println(glider4.toStringInGrid());
final String glider4Render = glider4.render();
System.out.println(glider4Render);
assertEquals("O**\n" + "..*\n" + ".*.\n" + "Origin: {-1, 1}\n", glider4Render);
assertEquals(5, glider4.getCount());
final List<Point> expectedPoints = expected.pointsAbsolute();
Collections.sort(expectedPoints);
final List<Point> glider4Points = glider4.pointsAbsolute();
Collections.sort(glider4Points);
assertEquals(expectedPoints, glider4Points);
}
@Test
public void testGlider2() throws LifeException {
Group glider0 = Group.create(0L, Glider2);
System.out.println(glider0.toStringInGrid());
final String renderGlider0 = glider0.render();
System.out.println(renderGlider0);
assertEquals(5, glider0.getCount());
assertEquals("O**\n" + "*..\n" + ".*.\n" + "Origin: {0, 0}\n", renderGlider0);
assertEquals(new Point(-1, -3), glider0.getExtent1());
assertEquals(new Point(3, 1), glider0.getExtent2());
Group expected = glider0.move(-1, 1);
final Group glider1 = glider0.newGeneration(1L);
System.out.println(glider1.toStringInGrid());
final Point p10 = new Point(1, 0);
assertEquals(Origin, glider1.getOrigin());
assertEquals(".*.\n" + "O*.\n" + "*.*\n" + "Origin: {0, 0}\n", glider1.render());
assertEquals(5, glider1.getCount());
final Group glider2 = glider1.newGeneration(2L);
System.out.println(glider2.toStringInGrid());
assertEquals("**.\n" + "O.*\n" + "*..\n" + "Origin: {0, 0}\n", glider2.render());
assertEquals(5, glider2.getCount());
final Group glider3 = glider2.newGeneration(3L);
System.out.println(glider3.toStringInGrid());
assertEquals(".**\n" + "*O.\n" + "..*\n" + "Origin: {0, 0}\n", glider3.render());
assertEquals(5, glider3.getCount());
final Group glider4 = glider3.newGeneration(4L);
System.out.println(glider4.toStringInGrid());
final String glider4Render = glider4.render();
System.out.println(glider4Render);
assertEquals("**O\n" + "*..\n" + ".*.\n" + "Origin: {1, 1}\n", glider4Render);
assertEquals(5, glider4.getCount());
final List<Point> expectedPoints = expected.pointsAbsolute();
Collections.sort(expectedPoints);
final List<Point> glider4Points = glider4.pointsAbsolute();
Collections.sort(glider4Points);
assertEquals(expectedPoints, glider4Points);
}
@Test
public void testGlider3() throws LifeException {
Group glider0 = Group.create(0L, Glider3);
// System.out.println(glider0.toStringInGrid());
final PrivateMethodTester privateMethodTester = new PrivateMethodTester(glider0);
final String renderGlider0 = (String) privateMethodTester.invokePrivate("doRender", false);
// final String renderGlider0 = glider0.render();
assertEquals(5, glider0.getCount());
assertEquals(".*.\n" + "..*\n" + "O**\n", renderGlider0);
assertEquals(new Point(-1, -1), glider0.getExtent1());
assertEquals(new Point(3, 3), glider0.getExtent2());
Group expected = glider0.move(-1, -1);
final Group glider1 = glider0.newGeneration(1L);
// System.out.println(glider1.toStringInGrid());
final Point p10 = new Point(1, 0);
assertEquals(p10, glider1.getOrigin());
assertEquals("*.*\n" + ".O*\n" + ".*.\nOrigin: {1, 0}\n", glider1.render());
assertEquals(5, glider1.getCount());
final Group glider2 = glider1.newGeneration(2L);
// System.out.println(glider2.toStringInGrid());
assertEquals("..*\n" + "*.O\n" + ".**\nOrigin: {1, 0}\n", glider2.render());
assertEquals(5, glider2.getCount());
final Group glider3 = glider2.newGeneration(3L);
// System.out.println(glider3.toStringInGrid());
assertEquals("*..\n" + ".O*\n" + "**.\nOrigin: {1, 0}\n", glider3.render());
assertEquals(5, glider3.getCount());
final Group glider4 = glider3.newGeneration(4L);
// System.out.println(glider4.toStringInGrid());
assertEquals(renderGlider0 + "Origin: {-1, -1}\n", glider4.render());
assertEquals(5, glider4.getCount());
final List<Point> expectedPoints = expected.pointsAbsolute();
Collections.sort(expectedPoints);
final List<Point> glider4Points = glider4.pointsAbsolute();
Collections.sort(glider4Points);
assertEquals(expectedPoints, glider4Points);
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.